diff --git a/.eslintrc.json b/.eslintrc.json index 5198b99ae923..622290c697c2 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -62,7 +62,61 @@ "args": "none" } ], - "@typescript-eslint/no-use-before-define": 0 + "@typescript-eslint/no-use-before-define": 0, + "@typescript-eslint/naming-convention": [ + "error", + { + "selector": [ + "classProperty", + "typeProperty", + "parameterProperty", + "classMethod", + "typeMethod", + "accessor" + ], + "modifiers": ["private"], + "leadingUnderscore": "require", + "format": ["camelCase"], + "filter": { + "regex": "^(test_| )", + "match": false + } + }, + { + "selector": [ + "classProperty", + "typeProperty", + "parameterProperty", + "classMethod", + "typeMethod", + "accessor" + ], + "modifiers": ["protected"], + "leadingUnderscore": "allow", + "format": ["camelCase"], + "filter": { + "regex": "^(test_| )", + "match": false + } + }, + { + "selector": [ + "classProperty", + "typeProperty", + "parameterProperty", + "classMethod", + "typeMethod", + "accessor" + ], + "modifiers": ["public"], + "leadingUnderscore": "forbid", + "format": ["camelCase"], + "filter": { + "regex": "^(test_| )", + "match": false + } + } + ] }, "overrides": [ { diff --git a/.github/workflows/validation.yml b/.github/workflows/validation.yml index d5ba8ee70da5..1e2f21ff3a6d 100644 --- a/.github/workflows/validation.yml +++ b/.github/workflows/validation.yml @@ -1,7 +1,7 @@ name: 'Validation' env: - NODE_VERSION: '12' # Shipped with VS Code. + NODE_VERSION: '14' # Shipped with VS Code. on: push: @@ -14,7 +14,7 @@ on: jobs: typecheck: if: github.repository == 'microsoft/pyright' - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest name: Typecheck steps: @@ -41,7 +41,7 @@ jobs: style: if: github.repository == 'microsoft/pyright' - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest name: Style steps: @@ -73,7 +73,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-10.15, windows-2019, ubuntu-18.04] + os: [macos-latest, windows-2019, ubuntu-latest] name: Test ${{ matrix.os }} runs-on: ${{ matrix.os }} @@ -108,7 +108,7 @@ jobs: working-directory: packages/pyright-internal build: - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest name: Build needs: typecheck @@ -139,7 +139,7 @@ jobs: working-directory: packages/vscode-pyright required: - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest name: Required needs: - typecheck diff --git a/.vscode/launch.json b/.vscode/launch.json index ca77f9b3c04d..774b1ad1b0dc 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -76,6 +76,17 @@ "disableOptimisticBPs": true, "program": "${workspaceFolder}/packages/pyright-internal/node_modules/jest/bin/jest" }, + { + "name": "Pyright jest selected test", + "type": "node", + "request": "launch", + "args": ["${fileBasenameNoExtension}", "--runInBand", "-t", "${selectedText}"], + "cwd": "${workspaceRoot}/packages/pyright-internal", + "console": "integratedTerminal", + "internalConsoleOptions": "neverOpen", + "disableOptimisticBPs": true, + "program": "${workspaceFolder}/packages/pyright-internal/node_modules/jest/bin/jest" + }, { "name": "Pyright fourslash current file", "type": "node", diff --git a/README.md b/README.md index c918afe7308e..d21fbd85b7ee 100644 --- a/README.md +++ b/README.md @@ -25,10 +25,12 @@ Pyright supports [configuration files](/docs/configuration.md) that provide gran * [PEP 612](https://www.python.org/dev/peps/pep-0612/) parameter specification variables * [PEP 613](https://www.python.org/dev/peps/pep-0613/) explicit type aliases * [PEP 635](https://www.python.org/dev/peps/pep-0635/) structural pattern matching -* [PEP 637](https://www.python.org/dev/peps/pep-0637/) indexing with keyword arguments * [PEP 646](https://www.python.org/dev/peps/pep-0646/) variadic generics * [PEP 647](https://www.python.org/dev/peps/pep-0647/) user-defined type guards * [PEP 655](https://www.python.org/dev/peps/pep-0655/) required typed dictionary items +* [PEP 673](https://www.python.org/dev/peps/pep-0673/) Self type +* [PEP 675](https://www.python.org/dev/peps/pep-0675/) arbitrary literal strings +* [PEP 681](https://www.python.org/dev/peps/pep-0681/) dataclass transform * Type inference for function return values, instance variables, class variables, and globals * Type guards that understand conditional code flow constructs like if/else statements @@ -61,22 +63,34 @@ For rich Python editing and debugging capabilities with Visual Studio Code, be s ## Installation ### VS Code Extension -For most VS Code users, we recommend using the Pylance extension rather than Pyright. Pylance incorporates the pyright type checker but features additional capabilities such as IntelliCode and semantic token highlighting. You can install the latest-published version of the Pylance VS Code extension directly from VS Code. Simply open the extensions panel and search for `pylance`. +For most VS Code users, we recommend using the Pylance extension rather than Pyright. Pylance incorporates the Pyright type checker but features additional capabilities such as IntelliCode and semantic token highlighting. You can install the latest-published version of the Pylance VS Code extension directly from VS Code. Simply open the extensions panel and search for “Pylance”. ### Vim -For vim/neovim users, you can install [coc-pyright](https://github.com/fannheyward/coc-pyright), the Pyright extension for coc.nvim. +Vim/neovim users can install [coc-pyright](https://github.com/fannheyward/coc-pyright), the Pyright extension for coc.nvim. Alternatively, [ALE](https://github.com/dense-analysis/ale) will automatically check your code with Pyright, without requiring any additional configuration. ### Sublime Text -For sublime text users, you can install the [LSP-pyright](https://github.com/sublimelsp/LSP-pyright) plugin from [package control](https://packagecontrol.io/packages/LSP-pyright). +Sublime text users can install the [LSP-pyright](https://github.com/sublimelsp/LSP-pyright) plugin from [package control](https://packagecontrol.io/packages/LSP-pyright). ### Emacs -For emacs users, you can install [lsp-mode](https://github.com/emacs-lsp/lsp-mode) that includes [lsp-pyright](https://github.com/emacs-lsp/lsp-pyright). +Emacs users can install [lsp-mode](https://github.com/emacs-lsp/lsp-mode) that includes [lsp-pyright](https://github.com/emacs-lsp/lsp-pyright). To activate the pyright extension follow the instructions in the [docs](https://emacs-lsp.github.io/lsp-pyright/). ### Command-line -The latest version of the command-line tool can be installed with npm, which is part of node. If you don't have a recent version of node on your system, install that first from [nodejs.org](https://nodejs.org). +A [community-maintained](https://github.com/RobertCraigie/pyright-python) Python package by the name of “pyright” is available on pypi and conda-forge. This package will automatically install node (which Pyright requires) and keep Pyright up to date. + +`pip install pyright` + +or + +`conda install pyright` + +Once installed, you can run the tool from the command line as follows: +`pyright ` + + +Alternatively, you can install the command-line version of Pyright directly from npm, which is part of node. If you don't have a recent version of node on your system, install that first from [nodejs.org](https://nodejs.org). To install pyright globally: `npm install -g pyright` @@ -84,17 +98,10 @@ To install pyright globally: On MacOS or Linux, sudo is required to install globally: `sudo npm install -g pyright` -Once installed, you can run the tool from the command line as follows: -`pyright ` - To update to the latest version: `sudo npm update -g pyright` -## Using Pyright with VS Code Python Extension -Pyright’s type-checking functionality and language features are now incorporated into a VS Code extension called [Pylance](https://github.com/microsoft/pylance-release), the officially supported Python Language Server from Microsoft. Pylance is designed to work with the Python extension for VS Code. In addition to Pyright’s functionality, Pylance adds compatibility with several advanced features including IntelliCode for AI-assisted completions. If you are a VS Code user, we recommend that you uninstall Pyright and instead install Pylance. You will get all the benefits of Pyright and more! - - ## Documentation * [Getting Started with Type Checking](/docs/getting-started.md) * [Type Concepts](/docs/type-concepts.md) @@ -105,6 +112,7 @@ Pyright’s type-checking functionality and language features are now incorporat * [Comments](/docs/comments.md) * [Type Inference](/docs/type-inference.md) * [Import Resolution](/docs/import-resolution.md) +* [Extending Builtins](/docs/builtins.md) * [Type Stubs](/docs/type-stubs.md) * [Types in Libraries](/docs/typed-libraries.md) * [Commands](/docs/commands.md) @@ -114,12 +122,14 @@ Pyright’s type-checking functionality and language features are now incorporat For additional information about Python static typing, refer to this community-maintained [Python Type School](https://github.com/python/typing/discussions). ## Limitations -Pyright provides support for Python 3.0 and newer. There is currently no plan to support older versions. +Pyright provides support for Python 3.0 and newer. There are no plans to support older versions. ## Community Do you have questions about Pyright or Python type annotations in general? Post your questions in [the discussion section](https://github.com/microsoft/pyright/discussions). +If you would like to report a bug or request an enhancement, file a new issue in either the [pyright](https://github.com/microsoft/pyright/issues) or [pylance-release](https://github.com/microsoft/pylance-release/issues) issue tracker. In general, core type checking functionality is associated with Pyright while language service functionality is associated with Pylance, but the same contributors monitor both repos. For best results, provide the information requested in the issue template. + ## FAQ **Q:** What is the difference between Pyright and [Pylance](https://github.com/microsoft/pylance-release)? @@ -128,12 +138,12 @@ Do you have questions about Pyright or Python type annotations in general? Post **Q:** What is the long-term plan for Pyright? -**A:** Pyright is now an officially-supported Microsoft type checker for Python. It will continue to be developed and maintained as an open-source project under its original MIT license terms. The Pyright extension for VSCode is a reference implementation and is not guaranteed to be fully functional or maintained long-term. +**A:** Pyright is an officially-supported Microsoft type checker for Python. It will continue to be developed and maintained as an open-source project under its original MIT license terms. The Pyright extension for VS Code is a reference implementation and is not guaranteed to be fully functional or maintained long-term. ## Contributing -This project welcomes contributions and suggestions. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. +This project welcomes contributions and suggestions. For feature and complex bug fix contributions, it is recommended that you first discuss the proposed change with Pyright’s maintainers before submitting the pull request. Most contributions require you to agree to a Contributor License Agreement (CLA) declaring that you have the right to, and actually do, grant us the rights to use your contribution. For details, visit https://cla.microsoft.com. When you submit a pull request, a CLA-bot will automatically determine whether you need to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repos using our CLA. diff --git a/build/lib/updateDeps.js b/build/lib/updateDeps.js index 23346b663086..9e50e785adb4 100644 --- a/build/lib/updateDeps.js +++ b/build/lib/updateDeps.js @@ -47,7 +47,7 @@ async function updatePackage(packageFile, transitive, reject = undefined) { reject: reject, }); - if (!transitive && Object.keys(updateResult).length === 0) { + if (!transitive && Object.keys(/**@type {any}*/ (updateResult)).length === 0) { // If nothing changed and we aren't updating transitive deps, don't run npm install. return; } diff --git a/build/updateDeps.js b/build/updateDeps.js index 5689ab8ff7ef..62d636e6e636 100644 --- a/build/updateDeps.js +++ b/build/updateDeps.js @@ -14,6 +14,7 @@ async function main() { // These packages impact compatibility with VS Code and other users; // ensure they remained pinned exactly. '@types/vscode', + 'vsce', 'vscode-jsonrpc', 'vscode-languageclient', 'vscode-languageserver', diff --git a/docs/build-debug.md b/docs/build-debug.md index 508f7736838b..7a41ebb9a65c 100644 --- a/docs/build-debug.md +++ b/docs/build-debug.md @@ -24,6 +24,12 @@ The resulting package (pyright-X.Y.Z.vsix) can be found in the client directory. To install in VS Code, go to the extensions panel and choose “Install from VSIX...” from the menu, then select the package. +## Running Pyright tests + +1. cd to the `packages/pyright-internal` directory +2. Execute `npm run test` + + ## Debugging Pyright To debug pyright, open the root source directory within VS Code. Open the debug sub-panel and choose “Pyright CLI” from the debug target menu. Click on the green “run” icon or press F5 to build and launch the command-line version in the VS Code debugger. diff --git a/docs/builtins.md b/docs/builtins.md new file mode 100644 index 000000000000..3aadb27ea255 --- /dev/null +++ b/docs/builtins.md @@ -0,0 +1,8 @@ +# Extending Builtins + +The Python interpreter implicitly adds a set of symbols that are available within every module even though they are not explicitly imported. These so-called “built in” symbols include commonly-used types and functions such as “list”, “dict”, “int”, “float”, “min”, and “len”. + +Pyright gains knowledge of which types are included in “builtins” scope through the type stub file `builtins.pyi`. This stub file comes from the typeshed github repo and is bundled with pyright, along with type stubs that describe other stdlib modules. + +Some Python environments are customized to include additional builtins symbols. If you are using such an environment, you may want to tell Pyright about these additional symbols that are available at runtime. To do so, you can add a local type stub file called `__builtins__.pyi`. This file can be placed at the root of your project directory or at the root of the subdirectory specified in the `stubPath` setting (which is named `typings` by default). + diff --git a/docs/ci-integration.md b/docs/ci-integration.md index 5e19c3f98e95..d80ba8e0fdae 100644 --- a/docs/ci-integration.md +++ b/docs/ci-integration.md @@ -13,7 +13,8 @@ You can configure pyright to run as a git hook (e.g. prior to each check-in) by language: node pass_filenames: false types: [python] - additional_dependencies: ['pyright@1.1.99'] + # Replace the version below with the latest pyright version + additional_dependencies: ['pyright@1.1.XXX'] ``` ### Running Pyright from a CI script diff --git a/docs/command-line.md b/docs/command-line.md index 1bdff96815e4..a7f603868f57 100644 --- a/docs/command-line.md +++ b/docs/command-line.md @@ -15,12 +15,14 @@ Pyright can be run as either a VS Code extension or as a node-based command-line | -p, --project `` | Use the configuration file at this location | | --pythonplatform `` | Analyze for platform (Darwin, Linux, Windows) | | --pythonversion `` | Analyze for version (3.3, 3.4, etc.) | +| --skipunannotated | Skip type analysis of unannotated functions? | | --stats | Print detailed performance stats | | -t, --typeshed-path `` | Use typeshed type stubs at this location (2) | | -v, --venv-path `` | Directory that contains virtual environments (3) | | --verbose | Emit verbose diagnostics | | --verifytypes `` | Verify completeness of types in py.typed package | | --version | Print pyright version | +| --warnings | Use exit code of 1 if warnings are reported | | -w, --watch | Continue to run and watch for changes (4) | (1) If specific files are specified on the command line, the pyrightconfig.json file is ignored. @@ -61,7 +63,7 @@ If the “--outputjson” option is specified on the command line, diagnostics a } ``` -Each Diagnostic is formatted output in the following format: +Each Diagnostic is output in the following format: ```javascript { @@ -83,3 +85,5 @@ Each Diagnostic is formatted output in the following format: ``` Diagnostic line and character numbers are zero-based. + +Not all diagnostics have an associated diagnostic rule. Diagnostic rules are used only for diagnostics that can be disabled or enabled. If a rule is associated with the diagnostic, it is included in the output. If it’s not, the rule field is omitted from the JSON output. diff --git a/docs/comments.md b/docs/comments.md index fd602c106ac9..69c27d3a4e15 100644 --- a/docs/comments.md +++ b/docs/comments.md @@ -12,13 +12,19 @@ self._target = 3 # type: Union[int, str] ``` ## File-level Type Controls -Strict typing controls (where all supported type-checking switches generate errors) can be enabled for a file through the use of a special comment. Typically this comment is placed at or near the top of a code file on its own line. +Strict type checking, where most supported type-checking switches generate errors, can be enabled for a file through the use of a special comment. Typically this comment is placed at or near the top of a code file on its own line. ```python # pyright: strict ``` -Individual configuration settings can also be overridden on a per-file basis and combined with “strict” typing. For example, if you want to enable all type checks except for “reportPrivateUsage”, you could add the following comment: +Likewise, basic type checking can be enabled for a file. + +```python +# pyright: basic +``` + +Individual configuration settings can also be overridden on a per-file basis and optionally combined with “strict” or “basic” type checking. For example, if you want to enable all type checks except for “reportPrivateUsage”, you could add the following comment: ```python # pyright: strict, reportPrivateUsage=false diff --git a/docs/configuration.md b/docs/configuration.md index 58deddef1429..cf3409400c90 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -14,7 +14,7 @@ Relative paths specified within the config file are relative to the config file **ignore** [array of paths, optional]: Paths of directories or files whose diagnostic output (errors and warnings) should be suppressed even if they are an included file or within the transitive closure of an included file. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). -**strict** [array of paths, optional]: Paths of directories or files that should use “strict” analysis if they are included. This is the same as manually adding a “# pyright: strict” comment. In strict mode, all type-checking rules are enabled. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). +**strict** [array of paths, optional]: Paths of directories or files that should use “strict” analysis if they are included. This is the same as manually adding a “# pyright: strict” comment. In strict mode, most type-checking rules are enabled. Refer to [this table](https://github.com/microsoft/pyright/blob/main/docs/configuration.md#diagnostic-rule-defaults) for details about which rules are enabled in strict mode. Paths may contain wildcard characters ** (a directory or multiple levels of directories), * (a sequence of zero or more characters), or ? (a single character). **typeshedPath** [path, optional]: Path to a directory that contains typeshed type stub files. Pyright ships with a bundled copy of typeshed type stubs. If you want to use a different version of typeshed stubs, you can clone the [typeshed github repo](https://github.com/python/typeshed) to a local directory and reference the location with this path. This option is useful if you’re actively contributing updates to typeshed. @@ -30,9 +30,9 @@ Relative paths specified within the config file are relative to the config file **pythonVersion** [string, optional]: Specifies the version of Python that will be used to execute the source code. The version should be specified as a string in the format "M.m" where M is the major version and m is the minor (e.g. `"3.0"` or `"3.6"`). If a version is provided, pyright will generate errors if the source code makes use of language features that are not supported in that version. It will also tailor its use of type stub files, which conditionalizes type definitions based on the version. If no version is specified, pyright will use the version of the current python interpreter, if one is present. -**pythonPlatform** [string, optional]: Specifies the target platform that will be used to execute the source code. Should be one of `"Windows"`, `"Darwin"` or `"Linux"`. If specified, pyright will tailor its use of type stub files, which conditionalize type definitions based on the platform. If no platform is specified, pyright will use the current platform. +**pythonPlatform** [string, optional]: Specifies the target platform that will be used to execute the source code. Should be one of `"Windows"`, `"Darwin"`, `"Linux"`, or `"All"`. If specified, pyright will tailor its use of type stub files, which conditionalize type definitions based on the platform. If no platform is specified, pyright will use the current platform. -**executionEnvironments** [array of objects, optional]: Specifies a list of execution environments (see below). Execution environments are searched from start to finish by comparing the path of a source file with the root path specified in the execution environment. +**executionEnvironments** [array of objects, optional]: Specifies a list of execution environments (see [below](https://github.com/microsoft/pyright/blob/main/docs/configuration.md#execution-environment-options)). Execution environments are searched from start to finish by comparing the path of a source file with the root path specified in the execution environment. **typeCheckingMode** ["off", "basic", "strict"]: Specifies the default rule set to use. Some rules can be overridden using additional configuration flags documented below. The default value for this setting is "basic". If set to "off", all type-checking rules are disabled, but Python syntax and semantic errors are still reported. @@ -48,17 +48,17 @@ The following settings control pyright’s diagnostic output (warnings or errors **strictSetInference** [boolean]: When inferring the type of a set, use strict type assumptions. For example, the expression `{1, 'a', 3.4}` could be inferred to be of type `Set[Any]` or `Set[Union[int, str, float]]`. If this setting is true, it will use the latter (stricter) type. The default value for this setting is 'false'. -**strictParameterNoneValue** [boolean]: PEP 484 indicates that when a function parameter is assigned a default value of None, its type should implicitly be Optional even if the explicit type is not. When enabled, this rule requires that parameter type annotations use Optional explicitly in this case. The default value for this setting is 'false'. +**strictParameterNoneValue** [boolean]: PEP 484 indicates that when a function parameter is assigned a default value of None, its type should implicitly be Optional even if the explicit type is not. When enabled, this rule requires that parameter type annotations use Optional explicitly in this case. The default value for this setting is 'true'. **enableTypeIgnoreComments** [boolean]: PEP 484 defines support for "# type: ignore" comments. This switch enables or disables support for these comments. The default value for this setting is 'true'. **reportGeneralTypeIssues** [boolean or string, optional]: Generate or suppress diagnostics for general type inconsistencies, unsupported operations, argument/parameter mismatches, etc. This covers all of the basic type-checking rules not covered by other rules. It does not include syntax errors. The default value for this setting is 'error'. -**reportPropertyTypeMismatch** [boolean or string, optional]: Generate or suppress diagnostics for properties where the type of the value passed to the setter is not assignable to the value returned by the getter. Such mismatches violate the intended use of properties, which are meant to act like variables. The default value for this setting is 'error'. +**reportPropertyTypeMismatch** [boolean or string, optional]: Generate or suppress diagnostics for properties where the type of the value passed to the setter is not assignable to the value returned by the getter. Such mismatches violate the intended use of properties, which are meant to act like variables. The default value for this setting is 'none'. **reportFunctionMemberAccess** [boolean or string, optional]: Generate or suppress diagnostics for non-standard member accesses for functions. The default value for this setting is 'none'. -**reportMissingImports** [boolean or string, optional]: Generate or suppress diagnostics for imports that have no corresponding imported python file or type stub file. The default value for this setting is 'none', although pyright can do a much better job of static type checking if type stub files are provided for all imports. +**reportMissingImports** [boolean or string, optional]: Generate or suppress diagnostics for imports that have no corresponding imported python file or type stub file. The default value for this setting is 'error'. **reportMissingModuleSource** [boolean or string, optional]: Generate or suppress diagnostics for imports that have no corresponding source file. This happens when a type stub is found, but the module source file was not found, indicating that the code may fail at runtime when using this execution environment. Type checking will be done using the type stub. The default value for this setting is 'warning'. @@ -72,7 +72,7 @@ The following settings control pyright’s diagnostic output (warnings or errors **reportUnusedFunction** [boolean or string, optional]: Generate or suppress diagnostics for a function or method with a private name (starting with an underscore) that is not accessed. The default value for this setting is 'none'. -**reportUnusedVariable** [boolean or string, optional]: Generate or suppress diagnostics for a variable that is not accessed. The default value for this setting is 'none'. +**reportUnusedVariable** [boolean or string, optional]: Generate or suppress diagnostics for a variable that is not accessed. The default value for this setting is 'none'. Variables whose names begin with an underscore are exempt from this check. **reportDuplicateImport** [boolean or string, optional]: Generate or suppress diagnostics for an imported symbol or module that is imported more than once. The default value for this setting is 'none'. @@ -110,8 +110,12 @@ The following settings control pyright’s diagnostic output (warnings or errors **reportIncompatibleVariableOverride** [boolean or string, optional]: Generate or suppress diagnostics for class variable declarations that override a symbol of the same name in a base class with a type that is incompatible with the base class symbol type. The default value for this setting is 'none'. +**reportInconsistentConstructor** [boolean or string, optional]: Generate or suppress diagnostics when an `__init__` method signature is inconsistent with a `__new__` signature. The default value for this setting is 'none'. + **reportOverlappingOverload** [boolean or string, optional]: Generate or suppress diagnostics for function overloads that overlap in signature and obscure each other or have incompatible return types. The default value for this setting is 'none'. +**reportMissingSuperCall** [boolean or string, optional]: Generate or suppress diagnostics for `__init__`, `__init_subclass__`, `__enter__` and `__exit__` methods in a subclass that fail to call through to the same-named method on a base class. The default value for this setting is 'none'. + **reportUninitializedInstanceVariable** [boolean or string, optional]: Generate or suppress diagnostics for instance variables within a class that are not initialized or declared within the class body or the `__init__` method. The default value for this setting is 'none'. **reportInvalidStringEscapeSequence** [boolean or string, optional]: Generate or suppress diagnostics for invalid escape sequences used within string literals. The Python specification indicates that such sequences will generate a syntax error in future versions. The default value for this setting is 'warning'. @@ -126,9 +130,11 @@ The following settings control pyright’s diagnostic output (warnings or errors **reportUnknownMemberType** [boolean or string, optional]: Generate or suppress diagnostics for class or instance variables that have an unknown type. The default value for this setting is 'none'. +**reportMissingParameterType** [boolean or string, optional]: Generate or suppress diagnostics for input parameters for functions or methods that are missing a type annotation. The 'self' and 'cls' parameters used within methods are exempt from this check. The default value for this setting is 'none'. + **reportMissingTypeArgument** [boolean or string, optional]: Generate or suppress diagnostics when a generic class is used without providing explicit or implicit type arguments. The default value for this setting is 'none'. -**reportInvalidTypeVarUse** [boolean or string, optional]: Generate or suppress diagnostics when a TypeVar is used inappropriately (e.g. if a TypeVar appears only once) within a generic function signature. The default value for this setting is 'none'. +**reportInvalidTypeVarUse** [boolean or string, optional]: Generate or suppress diagnostics when a TypeVar is used inappropriately (e.g. if a TypeVar appears only once) within a generic function signature. The default value for this setting is 'warning'. **reportCallInDefaultInitializer** [boolean or string, optional]: Generate or suppress diagnostics for function calls, list expressions, set expressions, or dictionary expressions within a default value initialization expression. Such calls can mask expensive operations that are performed at module initialization time. The default value for this setting is 'none'. @@ -158,6 +164,10 @@ The following settings control pyright’s diagnostic output (warnings or errors **reportUnusedCoroutine** [boolean or string, optional]: Generate or suppress diagnostics for call statements whose return value is not used in any way and is a Coroutine. This identifies a common error where an `await` keyword is mistakenly omitted. The default value for this setting is 'error'. +**reportUnnecessaryTypeIgnoreComment** [boolean or string, optional]: Generate or suppress diagnostics for a '# type: ignore' comment that would have no effect if removed. The default value for this setting is 'none'. + +**reportMatchNotExhaustive** [boolean or string, optional]: Generate or suppress diagnostics for a 'match' statement that does not provide cases that exhaustively match against all potential types of the target expression. The default value for this setting is 'none'. + ## Execution Environment Options Pyright allows multiple “execution environments” to be defined for different portions of your source tree. For example, a subtree may be designed to run with different import search paths or a different version of the python interpreter than the rest of the source base. @@ -269,10 +279,10 @@ The following table lists the default severity levels for each diagnostic rule w | strictListInference | false | false | true | | strictDictionaryInference | false | false | true | | strictSetInference | false | false | true | -| strictParameterNoneValue | false | false | true | +| strictParameterNoneValue | true | true | true | | enableTypeIgnoreComments | true | true | true | | reportGeneralTypeIssues | "none" | "error" | "error" | -| reportPropertyTypeMismatch | "none" | "error" | "error" | +| reportPropertyTypeMismatch | "none" | "none" | "none" | | reportFunctionMemberAccess | "none" | "none" | "error" | | reportMissingImports | "warning" | "error" | "error" | | reportMissingModuleSource | "warning" | "warning" | "warning" | @@ -300,7 +310,9 @@ The following table lists the default severity levels for each diagnostic rule w | reportConstantRedefinition | "none" | "none" | "error" | | reportIncompatibleMethodOverride | "none" | "none" | "error" | | reportIncompatibleVariableOverride | "none" | "none" | "error" | +| reportInconsistentConstructor | "none" | "none" | "error" | | reportOverlappingOverload | "none" | "none" | "error" | +| reportMissingSuperCall | "none" | "none" | "none" | | reportUninitializedInstanceVariable | "none" | "none" | "none" | | reportInvalidStringEscapeSequence | "none" | "warning" | "error" | | reportUnknownParameterType | "none" | "none" | "error" | @@ -308,6 +320,7 @@ The following table lists the default severity levels for each diagnostic rule w | reportUnknownLambdaType | "none" | "none" | "error" | | reportUnknownVariableType | "none" | "none" | "error" | | reportUnknownMemberType | "none" | "none" | "error" | +| reportMissingParameterType | "none" | "none" | "error" | | reportMissingTypeArgument | "none" | "none" | "error" | | reportInvalidTypeVarUse | "none" | "warning" | "error" | | reportCallInDefaultInitializer | "none" | "none" | "none" | @@ -324,15 +337,7 @@ The following table lists the default severity levels for each diagnostic rule w | reportUnsupportedDunderAll | "none" | "warning" | "error" | | reportUnusedCallResult | "none" | "none" | "none" | | reportUnusedCoroutine | "none" | "error" | "error" | - - - - - - - - - - +| reportUnnecessaryTypeIgnoreComment | "none" | "none" | "none" | +| reportMatchNotExhaustive | "none" | "none" | "error" | diff --git a/docs/import-resolution.md b/docs/import-resolution.md index 48a47546aa4d..699836e25fb3 100644 --- a/docs/import-resolution.md +++ b/docs/import-resolution.md @@ -5,11 +5,9 @@ If the import is relative (the module name starts with one or more dots), it res For absolute (non-relative) imports, Pyright employs the following resolution order: -1. Try to resolve using a **stdlib typeshed stub**. If the `typeshedPath` is configured, use this instead of the typeshed stubs that are packaged with Pyright. This allows for the use of a newer or a patched version of the typeshed stdlib stubs. +1. Try to resolve using the **stubPath** as defined in the `stubPath` config entry or the `python.analysis.stubPath` setting. -2. Try to resolve using the **stubPath** as defined in the `stubPath` config entry or the `python.analysis.stubPath` setting. - -3. Try to resolve using **code within the workspace**. +2. Try to resolve using **code within the workspace**. * Try to resolve relative to the **root directory** of the execution environment. If no execution environments are specified in the config file, use the root of the workspace. For more information about execution environments, refer to the [configuration documentation](https://github.com/microsoft/pyright/blob/main/docs/configuration.md#execution-environment-options). @@ -17,15 +15,16 @@ For absolute (non-relative) imports, Pyright employs the following resolution or * If no execution environment or extraPaths are configured, try to resolve using the **local directory `src`**. It is common for Python projects to place local source files within a directory of this name. -4. Try to resolve using **stubs or inlined types found within installed packages**. Pyright uses the configured Python environment to determine whether a package has been installed. For more details about how to configure your Python environment for Pyright, see below. If a Python environment is configured, Pyright looks in the `lib/site-packages`, `Lib/site-packages`, or `python*/site-packages` subdirectory. If no site-packages directory can be found, Pyright attempts to run the configured Python interpreter and ask it for its search paths. If no Python environment is configured, Pyright will use the default Python interpreter by invoking `python`. +3. Try to resolve using **stubs or inlined types found within installed packages**. Pyright uses the configured Python environment to determine whether a package has been installed. For more details about how to configure your Python environment for Pyright, see below. If a Python environment is configured, Pyright looks in the `lib/site-packages`, `Lib/site-packages`, or `python*/site-packages` subdirectory. If no site-packages directory can be found, Pyright attempts to run the configured Python interpreter and ask it for its search paths. If no Python environment is configured, Pyright will use the default Python interpreter by invoking `python`. * For a given package, try to resolve first using a **stub package**. Stub packages, as defined in [PEP 561](https://www.python.org/dev/peps/pep-0561/#type-checker-module-resolution-order), are named the same as the original package but with “-stubs” appended. * Try to resolve using an **inline stub**, a “.pyi” file that ships within the package. * If the package contains a “py.typed” file as described in [PEP 561](https://www.python.org/dev/peps/pep-0561/), use inlined type annotations provided in “.py” files within the package. + * If the `python.analysis.useLibraryCodeForTypes` setting is set to true (or the `--lib` command-line argument was specified), try to resolve using the **library implementation** (“.py” file). Some “.py” files may contain partial or complete type annotations. Pyright will use type annotations that are provided and do its best to infer any missing type information. If you are using Pyright, `python.analysis.useLibraryCodeForTypes` is false by default. If you are using Pylance, it is true. -5. Try to resolve using a **third-party typeshed** stub. If the `typeshedPath` is configured, use this instead of the typeshed stubs that are packaged with Pyright. This allows for the use of a newer or a patched version of the typeshed third-party stubs. +4. Try to resolve using a **stdlib typeshed stub**. If the `typeshedPath` is configured, use this instead of the typeshed stubs that are packaged with Pyright. This allows for the use of a newer or a patched version of the typeshed stdlib stubs. -6. If the `python.analysis.useLibraryCodeForTypes` setting is set to true (or the `--lib` command-line argument was specified), try to resolve using the **library implementation** (“.py” file). Some “.py” files may contain partial or complete type annotations. Pyright will use type annotations that are provided and do its best to infer any missing type information. If you are using Pyright, `python.analysis.useLibraryCodeForTypes` is false by default. If you are using Pylance, it is true. +5. Try to resolve using a **third-party typeshed** stub. If the `typeshedPath` is configured, use this instead of the typeshed stubs that are packaged with Pyright. This allows for the use of a newer or a patched version of the typeshed third-party stubs. diff --git a/docs/type-concepts.md b/docs/type-concepts.md index 6cf6b0cc1268..9def6d7295a2 100644 --- a/docs/type-concepts.md +++ b/docs/type-concepts.md @@ -8,6 +8,26 @@ When you add a type annotation to a variable or a parameter in Python, you are _ If a variable or parameter has no type annotation, the type checker must assume that any value can be assigned to it. This eliminates the ability for a type checker to identify type incompatibilities. +### Debugging Inferred Types + +When you want to know the type that the type checker has inferred for an expression, you can use the special `reveal_type()` function: + +``` +x = 1 +reveal_type(x) # Type of "x" is "Literal[1]" +``` + +This function is always available and does not need to be imported. When you use Pyright within an IDE, you can also simply hover over an expression to see the inferred type. + +You can also see the inferred types of all local variables at once with the `reveal_locals()` function: + +``` +def f(x: int, y: str) -> None: + z = 1.0 + reveal_locals() # Type of "x" is "int". Type of "y" is "str". Type of "z" is "float". +``` + + ### Type Assignability When your code assigns a value to a symbol (in an assignment expression) or a parameter (in a call expression), the type checker first determines the type of the value being assigned. It then determines whether the target has a declared type. If so, it verifies that the type of the value is _assignable_ to the declared type. @@ -84,10 +104,9 @@ Switching from a mutable container type to a corresponding immutable container t ``` my_list_1: List[int] = [1, 2, 3] my_list_2: Sequence[Optional[int]] = my_list_1 # No longer an error -my_list_2.append(None) # Error ``` -The type error on the second line has now gone away, but a new error is reported on the third line because the `append` operation is not allowed on an immutable Sequence. +The type error on the second line has now gone away. For more details about generic types, type parameters, and invariance, refer to [PEP 483 — The Theory of Type Hints](https://www.python.org/dev/peps/pep-0483/). @@ -150,17 +169,21 @@ In addition to assignment-based type narrowing, Pyright supports the following t * `x is None` and `x is not None` * `x == None` and `x != None` * `type(x) is T` and `type(x) is not T` -* `x is E` and `x is not E` (where E is an enum value or True or False) +* `x is E` and `x is not E` (where E is a literal enum or bool) * `x == L` and `x != L` (where L is a literal expression) +* `x.y is E` and `x.y is not E` (where E is a literal enum or bool and x is a type that is distinguished by a field with a literal type) * `x.y == L` and `x.y != L` (where L is a literal expression and x is a type that is distinguished by a field with a literal type) * `x[K] == V` and `x[K] != V` (where K and V are literal expressions and x is a type that is distinguished by a TypedDict field with a literal type) * `x[I] == V` and `x[I] != V` (where I and V are literal expressions and x is a known-length tuple that is distinguished by the index indicated by I) -* `x in y` (where y is instance of list, set, frozenset, or deque) +* `x[I] is None` and `x[I] is not None` (where I is a literal expression and x is a known-length tuple that is distinguished by the index indicated by I) +* `len(x) == L` and `len(x) != L` (where x is tuple and L is a literal integer) +* `x in y` (where y is instance of list, set, frozenset, deque, or tuple) * `S in D` and `S not in D` (where S is a string literal and D is a TypedDict) * `isinstance(x, T)` (where T is a type or a tuple of types) * `issubclass(x, T)` (where T is a type or a tuple of types) * `callable(x)` * `f(x)` (where f is a user-defined type guard as defined in [PEP 647](https://www.python.org/dev/peps/pep-0647/)) +* `bool(x)` (where x is any expression that is statically verifiable to be truthy or falsy in all cases). * `x` (where x is any expression that is statically verifiable to be truthy or falsy in all cases) Expressions supported for type guards include simple names, member access chains (e.g. `a.b.c.d`), the unary `not` operator, the binary `and` and `or` operators, subscripts that are constant numbers (e.g. `a[2]`), and call expressions. Other operators (such as arithmetic operators or other subscripts) are not supported. @@ -186,6 +209,57 @@ def func2(val: Optional[int]): In the example of `func1`, the type was narrowed in both the positive and negative cases. In the example of `func2`, the type was narrowed only the positive case because the type of `val` might be either `int` (specifically, a value of 0) or `None` in the negative case. +### Aliased Conditional Expression + +Pyright also supports a type guard expression `c`, where `c` is an identifier that refers to a local variable that is assigned one of the above supported type guard expression forms. These are called “aliased conditional expressions”. Examples include `c = a is not None` and `c = isinstance(a, str)`. When “c” is used within a conditional check, it can be used to narrow the type of expression `a`. + +This pattern is supported only in cases where `c` is a local variable within a module or function scope and is assigned a value only once. It is also limited to cases where expression `a` is a simple identifier (as opposed to a member access expression or subscript expression), is local to the function or module scope, and is assigned only once within the scope. Unary `not` operators are allowed for expression `a`, but binary `and` and `or` are not. + +```python +def func1(x: str | None): + is_str = x is not None + + if is_str: + reveal_type(x) # str + else: + reveal_type(x) # None +``` + +```python +def func2(val: str | bytes): + is_str = not isinstance(val, bytes) + + if not is_str: + reveal_type(val) # bytes + else: + reveal_type(val) # str +``` + +```python +def func3(x: List[str | None]) -> str: + is_str = x[0] is not None + + if is_str: + # This technique doesn't work for subscript expressions, + # so x[0] is not narrowed in this case. + reveal_type(x[0]) # str | None +``` + +```python +def func4(x: str | None): + is_str = x is not None + + if is_str: + # This technique doesn't work in cases where the target + # expression is assigned elsewhere. Here `x` is assigned + # elsewhere in the function, so its type is not narrowed + # in this case. + reveal_type(x) # str | None + + x = "" +``` + + ### Narrowing for Implied Else When an “if” or “elif” clause is used without a corresponding “else”, Pyright will generally assume that the code can “fall through” without executing the “if” or “elif” block. However, there are cases where the analyzer can determine that a fall-through is not possible because the “if” or “elif” is guaranteed to be executed based on type analysis. @@ -232,7 +306,7 @@ This “narrowing for implied else” technique works for all narrowing expressi ### Narrowing Any -In general, the type `Any` is not narrowed. The only exceptions to this rule are the built-in `isinstance` and `issubclass` type guards plus user-defined type guards. In all other cases, `Any` is left as is, even for assignments. +In general, the type `Any` is not narrowed. The only exceptions to this rule are the built-in `isinstance` and `issubclass` type guards, class pattern matching in “match” statements, and user-defined type guards. In all other cases, `Any` is left as is, even for assignments. ```python a: Any = 3 @@ -297,3 +371,47 @@ def add_one(value: _StrOrFloat) -> _StrOrFloat: ``` Notice that the type of variable `sum` is reported with asterisks (`*`). This indicates that internally the type checker is tracking the type as conditional. In this particular example, it indicates that `sum` is a `str` type if the parameter `value` is a `str` but is a `float` if `value` is a `float`. By tracking these conditional types, the type checker can verify that the return type is consistent with the return type `_StrOrFloat`. + + +### Inferred type of self and cls parameters + +When a type annotation for a method’s `self` or `cls` parameter is omitted, pyright will infer its type based on the class that contains the method. The inferred type is internally represented as a type variable that is bound to the class. + +The type of `self` is represented as `Self@ClassName` where `ClassName` is the class that contains the method. Likewise, the `cls` parameter in a class method will have the type `Type[Self@ClassName]`. + +```python +class Parent: + def method1(self): + reveal_type(self) # Self@Parent + return self + + @classmethod + def method2(cls): + reveal_type(cls) # Type[Self@Parent] + return cls + +class Child(Parent): + ... + +reveal_type(Child().method1()) # Child +reveal_type(Child.method2()) # Type[Child] +``` + +### Overloads + +Some functions or methods can return one of several different types. In cases where the return type depends on the types of the input parameters, it is useful to specify this using a series of `@overload` signatures. When Pyright evaluates a call expression, it determines which overload signature best matches the supplied arguments. + +[PEP 484](https://www.python.org/dev/peps/pep-0484/#function-method-overloading) introduced the `@overload` decorator and described how it can be used, but the PEP did not specify precisely how a type checker should choose the “best” overload. Pyright uses the following rules. + +1. Pyright first filters the list of overloads based on simple “arity” (number of arguments) and keyword argument matching. For example, if one overload requires two position arguments but only one positional argument is supplied by the caller, that overload is eliminated from consideration. Likewise, if the call includes a keyword argument but no corresponding parameter is included in the overload, it is eliminated from consideration. + +2. Pyright next considers the types of the arguments and compares them to the declared types of the corresponding parameters. If the types do not match for a given overload, that overload is eliminated from consideration. Bidirectional type inference is used to determine the types of the argument expressions. + +3. If only one overload remains, it is the “winner”. + +4. If more than one overload remains, the “winner” is chosen based on the order in which the overloads are declared. In general, the first remaining overload is the “winner”. One exception to this rule is when a `*args` (unpacked) argument matches a `*args` parameter in one of the overload signatures. This situation overrides the normal order-based rule. + +5. If no overloads remain, Pyright considers whether any of the arguments are union types. If so, these union types are expanded into their constituent subtypes, and the entire process of overload matching is repeated with the expanded argument types. If two or more overloads match, the union of their respective return types form the final return type for the call expression. + +6. If no overloads remain and all unions have been expanded, a diagnostic is generated indicating that the supplied arguments are incompatible with all overload signatures. + diff --git a/docs/type-inference.md b/docs/type-inference.md index 140456f760e1..a2c65cc645e0 100644 --- a/docs/type-inference.md +++ b/docs/type-inference.md @@ -45,11 +45,13 @@ Note that once a symbol’s type is declared, it cannot be redeclared to a diffe ## Type Inference -Some languages require every symbol to be explicitly typed. Python allows a symbol to be bound to different values at runtime, so its type can change over time. A symbol’s type doesn’t need to declared statically. +Some languages require every symbol to be explicitly typed. Python allows a symbol to be bound to different values at runtime, so its type can change over time. A symbol’s type doesn’t need to be declared statically. When Pyright encounters a symbol with no type declaration, it attempts to _infer_ the type based on the values assigned to it. As we will see below, type inference cannot always determine the correct (intended) type, so type annotations are still required in some cases. Furthermore, type inference can require significant computation, so it is much less efficient than when type annotations are provided. -If a symbol’s type cannot be inferred, Pyright internally sets its type to “Unknown”, which is a special form of “Any”. The “Unknown” type allows Pyright to optionally warn when types are not declared and cannot be inferred, thus leaving potential “blind spots” in type checking. +## “Unknown” Type + +If a symbol’s type cannot be inferred, Pyright sets its type to “Unknown”, which is a special form of “Any”. The “Unknown” type allows Pyright to optionally warn when types are not declared and cannot be inferred, thus leaving potential “blind spots” in type checking. ### Single-Assignment Type Inference @@ -189,6 +191,40 @@ def func2(p_int: int, p_str: str, p_flt: float): var2 = func1(p_str, p_flt, p_int) ``` +### Parameter Type Inference + +Input parameters for functions and methods typically require type annotations. There are several cases where Pyright may be able to infer a parameter’s type if it is unannotated. + +For instance methods, the first parameter (named `self` by convention) is inferred to be type `Self`. + +For class methods, the first parameter (named `cls` by convention) is inferred to be type `type[Self]`. + +For other unannotated parameters within a method, Pyright looks for a method of the same name implemented in a base class. If the corresponding method in the base class has the same signature (the same number of parameters with the same names), no overloads, and annotated parameter types, the type annotation from this method is “inherited” for the corresponding parameter in the child class method. + +```python +class Parent: + def method1(self, a: int, b: str) -> float: + ... + + +class Child(Parent): + def method1(self, a, b): + return a + +reveal_type(Child.method1) # (self: Child, a: int, b: str) -> int +``` + +When parameter types are inherited from a base class method, the return type is not inherited. Instead, normal return type inference techniques are used. + +If the type of an unannotated parameter cannot be inferred using any of the above techniques and the parameter has a default argument expression associated with it, the parameter type is inferred from the default argument type. If the default argument is `None`, the inferred type is `Unknown | None`. + +```python +def func(a, b=0, c=None): + pass + +reveal_type(func) # (a: Unknown, b: int, c: Unknown | None) -> None +``` + ### Literals Python 3.8 introduced support for _literal types_. This allows a type checker like Pyright to track specific literal values of str, bytes, int, bool, and enum values. As with other types, literal types can be declared. diff --git a/docs/typed-libraries.md b/docs/typed-libraries.md index f85bbfea2433..9816c0a1fb37 100644 --- a/docs/typed-libraries.md +++ b/docs/typed-libraries.md @@ -13,13 +13,11 @@ These recommendations are intended to provide the following benefits: ## Inlined Type Annotations and Type Stubs [PEP 561](https://www.python.org/dev/peps/pep-0561/) documents several ways type information can be delivered for a library: inlined type annotations, type stub files included in the package, a separate companion type stub package, and type stubs in the typeshed repository. Some of these options fall short on delivering the benefits above. We therefore provide the following more specific guidance to library authors. -*All libraries should include inlined type annotations for the functions, classes, methods, and constants that comprise the public interface for the library.* +All libraries should include inlined type annotations for the functions, classes, methods, and constants that comprise the public interface for the library. Inlined type annotations should be included directly within the source code that ships with the package. Of the options listed in PEP 561, inlined type annotations offer the most benefits. They typically require the least effort to add and maintain, they are always consistent with the implementation, and docstrings and default parameter values are readily available, allowing language servers to enhance the development experience. -There are cases where inlined type annotations are not possible — most notably when a library’s exposed functionality is implemented in a language other than Python. - -*Libraries that expose symbols implemented in languages other than Python should include stub (“.pyi”) files that describe the types for those symbols. These stubs should also contain docstrings and default parameter values.* +There are cases where inlined type annotations are not possible — most notably when a library’s exposed functionality is implemented in a language other than Python. Libraries that expose symbols implemented in languages other than Python should include stub (“.pyi”) files that describe the types for those symbols. These stubs should also contain docstrings and default parameter values. In many existing type stubs (such as those found in typeshed), default parameter values are replaced with with “...” and all docstrings are removed. We recommend that default values and docstrings remain within the type stub file so language servers can display this information to developers. @@ -32,7 +30,7 @@ If a “py.typed” module is present, a type checker will treat all modules wit Each module exposes a set of symbols. Some of these symbols are considered “private” — implementation details that are not part of the library’s interface. Type checkers like pyright use the following rules to determine which symbols are visible outside of the package. * Symbols whose names begin with an underscore (but are not dunder names) are considered private. -* Imported symbols are considered private by default. If they use the “import A as A” (a redundant module alias), “from X import A as A” (a redundant symbol alias), or “from . import A” forms, symbol “A” is not private unless the name begins with an underscore. If a file `__init__.py` uses form “from .A import X”, symbol “A” is treated likewise. If a wildcard import (of the form “from X import *”) is used, all symbols referenced by the wildcard are not private. +* Imported symbols are considered private by default. If they use the “import A as A” (a redundant module alias), “from X import A as A” (a redundant symbol alias), or “from . import A” forms, symbol “A” is not private unless the name begins with an underscore. If a file `__init__.py` uses the form “from .A import X”, symbol “A” is not private unless the name begins with an underscore (but “X” is still private). If a wildcard import (of the form “from X import *”) is used, all symbols referenced by the wildcard are not private. * A module can expose an `__all__` symbol at the module level that provides a list of names that are considered part of the interface. This overrides all other rules above, allowing imported symbols or symbols whose names begin with an underscore to be included in the interface. * Local variables within a function (including nested functions) are always considered private. @@ -81,14 +79,26 @@ Type annotations can be omitted in a few specific cases where the type is obviou * The return type for an `__init__` method does not need to be specified, since it is always `None`. * The following module-level symbols do not require type annotations: `__all__`,`__author__`, `__copyright__`, `__email__`, `__license__`, `__title__`, `__uri__`, `__version__`. * The following class-level symbols do not require type annotations: `__class__`, `__dict__`, `__doc__`, `__module__`, `__slots__`. +* A variable is assigned in only one location using a simple assignment expression and the right-hand side of the assignment is a literal value (e.g. `1`, `3.14`, `"hi"`, or `MyEnum.Value`) or an identifier that has a known type that doesn't depend on type narrowing logic. + + +### Ambiguous Types + +When a symbol is missing a type annotation, a type checker may be able to infer its type based on contextual information. However, type inference rules are not standardized and differ between type checkers. A symbol is said to have an “ambiguous type” if its type may be inferred differently between different Python type checkers. This can lead to a bad experience for consumers of the library. + +Ambiguous types can be avoided by providing explicit type annotations. + -### Examples of known and unknown types +### Examples of known, ambiguous and unknown types ```python -# Variable with unknown type +# Variable with known type (unambiguous because it uses a literal assignment) +a = 3 + +# Variable with ambiguous type a = [3, 4, 5] -# Variable with known type +# Variable with known (declared) type a: List[int] = [3, 4, 5] # Type alias with partially unknown type (because type @@ -147,7 +157,7 @@ class MyClass: # Class with partially unknown type class MyClass: # Missing type annotation for class variable - height = 2.0 + height = None # Missing input parameter annotations def __init__(self, name, age): @@ -160,9 +170,9 @@ class MyClass: ... # Class with partially unknown type -class BaseClass: +class BaseClass1: # Missing type annotation - height = 2.0 + height: = 2.0 # Missing type annotation def get_stuff(self): @@ -170,12 +180,21 @@ class BaseClass: # Class with known type (because it overrides all symbols # exposed by BaseClass that have incomplete types) -class DerivedClass(BaseClass): +class DerivedClass1(BaseClass1): height: float def get_stuff(self) -> str: ... +# Class with known type +class BaseClass2: + height: float = 2.0 + +# Class with ambiguous type +class DerivedClass2(BaseClass2): + # Missing type annotation, could be inferred as float or int + height = 1 + # Class with partially unknown type because base class # (dict) is generic, and type arguments are not specified. class DictSubclass(dict): @@ -188,7 +207,7 @@ Pyright provides a feature that allows library authors to verify type completene `pyright --verifytypes ` -Pyright will analyze the library, identify all symbols that comprise the interface to the library and emit errors for any symbols whose types are unknown. It also produces a “type completeness score” which is the percentage of symbols with known types. +Pyright will analyze the library, identify all symbols that comprise the interface to the library and emit errors for any symbols whose types are ambiguous or unknown. It also produces a “type completeness score” which is the percentage of symbols with known types. To see additional details (including a full list of symbols in the library), append the `--verbose` option. @@ -325,11 +344,11 @@ LATEST_VERSION: Final[Tuple[int, int]] = (4, 5) ``` ### Typed Dictionaries, Data Classes, and Named Tuples -If your library runs only on newer versions of Python, you are encouraged to use some of the new type-friendly classes. +If a library runs only on newer versions of Python, it can use some of the new type-friendly classes. NamedTuple (described in [PEP 484](https://www.python.org/dev/peps/pep-0484/)) is preferred over namedtuple. -Data classes (described in [PEP 557](https://www.python.org/dev/peps/pep-0557/)) is preferred over untyped dictionaries. +Data classes (described in [PEP 557](https://www.python.org/dev/peps/pep-0557/)) are preferred over untyped dictionaries. TypedDict (described in [PEP 589](https://www.python.org/dev/peps/pep-0589/)) is preferred over untyped dictionaries. @@ -351,7 +370,7 @@ def get_config(self) -> "OrderedDict[str, str]": ### Type Comment Annotations Python 3.0 introduced syntax for parameter and return type annotations, as specified in [PEP 484](https://www.python.org/dev/peps/pep-0484/). Python 3.6 introduced support for variable type annotations, as specified in [PEP 526](https://www.python.org/dev/peps/pep-0526/). -If you need to support older versions of Python, type annotations can still be provided as “type comments”. These comments take the form # type: . +If you need to support older versions of Python, type annotations can still be provided as “type comments”. These comments take the form `# type: `. ```python class Foo: @@ -390,7 +409,7 @@ Type annotations provide a way to annotate typical type behaviors, but some clas ## Docstrings -Docstrings should be provided for all classes, functions, and methods in the interface. They should be formatted according to [PEP 257](https://www.python.org/dev/peps/pep-0257/). +It is recommended that docstrings be provided for all classes, functions, and methods in the interface. They should be formatted according to [PEP 257](https://www.python.org/dev/peps/pep-0257/). There is currently no single agreed-upon standard for function and method docstrings, but several common variants have emerged. We recommend using one of these variants. diff --git a/lerna.json b/lerna.json index 062687638446..4380ecc536f3 100644 --- a/lerna.json +++ b/lerna.json @@ -2,7 +2,7 @@ "packages": [ "packages/*" ], - "version": "1.1.170", + "version": "1.1.225", "command": { "version": { "push": false, diff --git a/package-lock.json b/package-lock.json index ece5a7494175..2f6be73039e5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -4,31 +4,40 @@ "lockfileVersion": 1, "dependencies": { "@babel/code-frame": { - "version": "7.12.11", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", - "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", "dev": true, "requires": { - "@babel/highlight": "^7.10.4" + "@babel/highlight": "^7.16.7" } }, "@babel/helper-validator-identifier": { - "version": "7.14.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz", - "integrity": "sha512-pQYxPY0UP6IHISRitNe8bsijHex4TWZXi2HwKVsjPiltzlhse2znVcm9Ace510VT1kxIHjGJCZZQBX2gJDbo0g==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", "dev": true }, "@babel/highlight": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", - "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.7.tgz", + "integrity": "sha512-aKpPMfLvGO3Q97V0qhw/V2SWNWlwfJknuwAunU7wZLSfrM4xTBvg7E5opUVi1kJTBKihE38CPg4nBiqX83PWYw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.14.5", + "@babel/helper-validator-identifier": "^7.16.7", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, "dependencies": { + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "requires": { + "color-convert": "^1.9.0" + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -40,27 +49,57 @@ "supports-color": "^5.3.0" } }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", "dev": true + }, + "has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true + }, + "supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "requires": { + "has-flag": "^3.0.0" + } } } }, "@eslint/eslintrc": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", - "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.0.5.tgz", + "integrity": "sha512-BLxsnmK3KyPunz5wmCCpqy0YelEoxxGmH73Is+Z74oOTMtExcjkr3dDR6quwrjh1YspA8DH9gnX1o069KiS9AQ==", "dev": true, "requires": { "ajv": "^6.12.4", - "debug": "^4.1.1", - "espree": "^7.3.0", + "debug": "^4.3.2", + "espree": "^9.2.0", "globals": "^13.9.0", "ignore": "^4.0.6", "import-fresh": "^3.2.1", - "js-yaml": "^3.13.1", + "js-yaml": "^4.1.0", "minimatch": "^3.0.4", "strip-json-comments": "^3.1.1" }, @@ -80,20 +119,20 @@ "dev": true }, "@humanwhocodes/config-array": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", - "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.9.2.tgz", + "integrity": "sha512-UXOuFCGcwciWckOpmfKDq/GyhlTf9pN/BzG//x8p8zTOFEcGuA68ANXheFS0AGvy3qgZqLBUkMs7hqzqCKOVwA==", "dev": true, "requires": { - "@humanwhocodes/object-schema": "^1.2.0", + "@humanwhocodes/object-schema": "^1.2.1", "debug": "^4.1.1", "minimatch": "^3.0.4" } }, "@humanwhocodes/object-schema": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.0.tgz", - "integrity": "sha512-wdppn25U8z/2yiaT6YGquE6X8sSv7hNMWSXYSSU1jGv/yd6XqjXgTDJ8KP4NgjTXfJ3GbRjeeb8RTV7a/VpM+w==", + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", "dev": true }, "@hutson/parse-repository-url": { @@ -428,9 +467,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -650,9 +689,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -820,9 +859,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -1002,9 +1041,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -1254,15 +1293,15 @@ } }, "@npmcli/ci-detect": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@npmcli/ci-detect/-/ci-detect-1.3.0.tgz", - "integrity": "sha512-oN3y7FAROHhrAt7Rr7PnTSwrHrZVRTS2ZbyxeQwSSYD0ifwM3YNgQqbaRmjcWoPyq77MjchusjJDspbzMmip1Q==", + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/@npmcli/ci-detect/-/ci-detect-1.4.0.tgz", + "integrity": "sha512-3BGrt6FLjqM6br5AhWRKTr3u5GIVkjRYeAFrMp3HjnfICrg4xOrVRwFavKT6tsp++bq5dluL5t8ME/Nha/6c1Q==", "dev": true }, "@npmcli/fs": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.0.0.tgz", - "integrity": "sha512-8ltnOpRR/oJbOp8vaGUnipOi3bqkcW+sLHFlyXIr08OGHmVJLB1Hn7QtGXbYcpVtH1gAYZTlmDXtE4YV0+AMMQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.0.tgz", + "integrity": "sha512-VhP1qZLXcrXRIaPoqb4YA55JQxLNF3jNR4T55IdOJa3+IFJKNYHtPvtXx8slmeMavj37vCzCfrqQM1vWLsYKLA==", "dev": true, "requires": { "@gar/promisify": "^1.0.1", @@ -1322,9 +1361,9 @@ } }, "@npmcli/node-gyp": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-1.0.2.tgz", - "integrity": "sha512-yrJUe6reVMpktcvagumoqD9r08fH1iRo01gn1u0zoCApa9lnZGEigVKUd2hzsCId4gdtkZZIVscLhNxMECKgRg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-1.0.3.tgz", + "integrity": "sha512-fnkhw+fmX65kiLqk6E3BFLXNC26rUhK90zVwe2yncPliVT/Qos3xjhTLE59Df8KnPlcwIERXKVlU1bXoUQ+liA==", "dev": true }, "@npmcli/promise-spawn": { @@ -1364,9 +1403,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -1432,9 +1471,9 @@ } }, "@octokit/auth-token": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.4.5.tgz", - "integrity": "sha512-BpGYsPgJt05M7/L/5FoE1PiAbdxXFZkX/3kDYcsvd1v6UhlnE5e96dTDr0ezX/EFwciQxf3cNV0loipsURU+WA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-2.5.0.tgz", + "integrity": "sha512-r5FVUJCOLl19AxiuZD2VRZ/ORjp/4IN98Of6YJoJOkY75CIBuYfmiNHGrDwXr+aLGG55igl9QrxX3hbiXlLb+g==", "dev": true, "requires": { "@octokit/types": "^6.0.3" @@ -1486,9 +1525,9 @@ } }, "@octokit/openapi-types": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-10.1.1.tgz", - "integrity": "sha512-ygp/6r25Ezb1CJuVMnFfOsScEtPF0zosdTJDZ7mZ+I8IULl7DP1BS5ZvPRwglcarGPXOvS5sHdR0bjnVDDfQdQ==", + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-11.2.0.tgz", + "integrity": "sha512-PBsVO+15KSlGmiI8QAzaqvsNlZlrDlyAJYcrXBCvVUxCp7VnXjkwPoFHgjEJXx3WF9BAwkA6nfCUA7i9sODzKA==", "dev": true }, "@octokit/plugin-enterprise-rest": { @@ -1498,12 +1537,12 @@ "dev": true }, "@octokit/plugin-paginate-rest": { - "version": "2.16.0", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.16.0.tgz", - "integrity": "sha512-8YYzALPMvEZ35kgy5pdYvQ22Roz+BIuEaedO575GwE2vb/ACDqQn0xQrTJR4tnZCJn7pi8+AWPVjrFDaERIyXQ==", + "version": "2.17.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-2.17.0.tgz", + "integrity": "sha512-tzMbrbnam2Mt4AhuyCHvpRkS0oZ5MvwwcQPYGtMv4tUa5kkzG58SVB0fcsLulOZQeRnOgdkZWkRUiyBlh0Bkyw==", "dev": true, "requires": { - "@octokit/types": "^6.26.0" + "@octokit/types": "^6.34.0" } }, "@octokit/plugin-request-log": { @@ -1513,19 +1552,19 @@ "dev": true }, "@octokit/plugin-rest-endpoint-methods": { - "version": "5.10.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.10.1.tgz", - "integrity": "sha512-Rf1iMl40I0dIxjh1g32qZ6Ym/uT8QWZMm2vYGG5Vi8SX1MwZvbuxEGXYgmzTUWSD3PYWSLilE2+4L8kmdLGTMg==", + "version": "5.13.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-5.13.0.tgz", + "integrity": "sha512-uJjMTkN1KaOIgNtUPMtIXDOjx6dGYysdIFhgA52x4xSadQCz3b/zJexvITDVpANnfKPW/+E0xkOvLntqMYpviA==", "dev": true, "requires": { - "@octokit/types": "^6.27.0", + "@octokit/types": "^6.34.0", "deprecation": "^2.3.1" } }, "@octokit/request": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.1.tgz", - "integrity": "sha512-Ls2cfs1OfXaOKzkcxnqw5MR6drMA/zWX/LIS/p8Yjdz7QKTPQLMsB3R+OvoxE6XnXeXEE2X7xe4G4l4X0gRiKQ==", + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-5.6.2.tgz", + "integrity": "sha512-je66CvSEVf0jCpRISxkUcCa0UkxmFs6eGDRSbfJtAVwbLH5ceqF+YEyC8lj8ystKyZTy8adWr0qmkY52EfOeLA==", "dev": true, "requires": { "@octokit/endpoint": "^6.0.1", @@ -1556,24 +1595,24 @@ } }, "@octokit/rest": { - "version": "18.10.0", - "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.10.0.tgz", - "integrity": "sha512-esHR5OKy38bccL/sajHqZudZCvmv4yjovMJzyXlphaUo7xykmtOdILGJ3aAm0mFHmMLmPFmDMJXf39cAjNJsrw==", + "version": "18.12.0", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-18.12.0.tgz", + "integrity": "sha512-gDPiOHlyGavxr72y0guQEhLsemgVjwRePayJ+FcKc2SJqKUbxbkvf5kAZEWA/MKvsfYlQAMVzNJE3ezQcxMJ2Q==", "dev": true, "requires": { "@octokit/core": "^3.5.1", - "@octokit/plugin-paginate-rest": "^2.16.0", + "@octokit/plugin-paginate-rest": "^2.16.8", "@octokit/plugin-request-log": "^1.0.4", - "@octokit/plugin-rest-endpoint-methods": "^5.9.0" + "@octokit/plugin-rest-endpoint-methods": "^5.12.0" } }, "@octokit/types": { - "version": "6.27.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.27.0.tgz", - "integrity": "sha512-ha27f8DToxXBPEJdzHCCuqpw7AgKfjhWGdNf3yIlBAhAsaexBXTfWw36zNSsncALXGvJq4EjLy1p3Wz45Aqb4A==", + "version": "6.34.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-6.34.0.tgz", + "integrity": "sha512-s1zLBjWhdEI2zwaoSgyOFoKSl109CUcVBCc7biPJ3aAf6LGLU6szDvi31JPU7bxfla2lqfhjbbg/5DdFNxOwHw==", "dev": true, "requires": { - "@octokit/openapi-types": "^10.1.0" + "@octokit/openapi-types": "^11.2.0" } }, "@sindresorhus/is": { @@ -1598,9 +1637,9 @@ "dev": true }, "@types/glob": { - "version": "7.1.4", - "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.1.4.tgz", - "integrity": "sha512-w+LsMxKyYQm347Otw+IfBXOv9UWVjpHpCDdbBMt8Kz/xbvCYNjP+0qPh91Km3iKfSRLBB0P7fAMf0KHrPu+MyA==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", "dev": true, "requires": { "@types/minimatch": "*", @@ -1626,9 +1665,9 @@ "dev": true }, "@types/node": { - "version": "12.20.24", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.24.tgz", - "integrity": "sha512-yxDeaQIAJlMav7fH5AQqPH1u8YIuhYJXYBzxaQ4PifsU0GDO38MSdmEDeRlIxrKbC6NbEaaEHDanWb+y30U8SQ==", + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", "dev": true }, "@types/normalize-package-data": { @@ -1659,85 +1698,98 @@ "dev": true }, "@typescript-eslint/eslint-plugin": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.31.0.tgz", - "integrity": "sha512-iPKZTZNavAlOhfF4gymiSuUkgLne/nh5Oz2/mdiUmuZVD42m9PapnCnzjxuDsnpnbH3wT5s2D8bw6S39TC6GNw==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.9.1.tgz", + "integrity": "sha512-Xv9tkFlyD4MQGpJgTo6wqDqGvHIRmRgah/2Sjz1PUnJTawjHWIwBivUE9x0QtU2WVii9baYgavo/bHjrZJkqTw==", "dev": true, "requires": { - "@typescript-eslint/experimental-utils": "4.31.0", - "@typescript-eslint/scope-manager": "4.31.0", - "debug": "^4.3.1", + "@typescript-eslint/experimental-utils": "5.9.1", + "@typescript-eslint/scope-manager": "5.9.1", + "@typescript-eslint/type-utils": "5.9.1", + "debug": "^4.3.2", "functional-red-black-tree": "^1.0.1", - "regexpp": "^3.1.0", + "ignore": "^5.1.8", + "regexpp": "^3.2.0", "semver": "^7.3.5", "tsutils": "^3.21.0" } }, "@typescript-eslint/experimental-utils": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-4.31.0.tgz", - "integrity": "sha512-Hld+EQiKLMppgKKkdUsLeVIeEOrwKc2G983NmznY/r5/ZtZCDvIOXnXtwqJIgYz/ymsy7n7RGvMyrzf1WaSQrw==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-5.9.1.tgz", + "integrity": "sha512-cb1Njyss0mLL9kLXgS/eEY53SZQ9sT519wpX3i+U457l2UXRDuo87hgKfgRazmu9/tQb0x2sr3Y0yrU+Zz0y+w==", "dev": true, "requires": { - "@types/json-schema": "^7.0.7", - "@typescript-eslint/scope-manager": "4.31.0", - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/typescript-estree": "4.31.0", + "@types/json-schema": "^7.0.9", + "@typescript-eslint/scope-manager": "5.9.1", + "@typescript-eslint/types": "5.9.1", + "@typescript-eslint/typescript-estree": "5.9.1", "eslint-scope": "^5.1.1", "eslint-utils": "^3.0.0" } }, "@typescript-eslint/parser": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-4.31.0.tgz", - "integrity": "sha512-oWbzvPh5amMuTmKaf1wp0ySxPt2ZXHnFQBN2Szu1O//7LmOvgaKTCIDNLK2NvzpmVd5A2M/1j/rujBqO37hj3w==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-5.9.1.tgz", + "integrity": "sha512-PLYO0AmwD6s6n0ZQB5kqPgfvh73p0+VqopQQLuNfi7Lm0EpfKyDalchpVwkE+81k5HeiRrTV/9w1aNHzjD7C4g==", "dev": true, "requires": { - "@typescript-eslint/scope-manager": "4.31.0", - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/typescript-estree": "4.31.0", - "debug": "^4.3.1" + "@typescript-eslint/scope-manager": "5.9.1", + "@typescript-eslint/types": "5.9.1", + "@typescript-eslint/typescript-estree": "5.9.1", + "debug": "^4.3.2" } }, "@typescript-eslint/scope-manager": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-4.31.0.tgz", - "integrity": "sha512-LJ+xtl34W76JMRLjbaQorhR0hfRAlp3Lscdiz9NeI/8i+q0hdBZ7BsiYieLoYWqy+AnRigaD3hUwPFugSzdocg==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-5.9.1.tgz", + "integrity": "sha512-8BwvWkho3B/UOtzRyW07ffJXPaLSUKFBjpq8aqsRvu6HdEuzCY57+ffT7QoV4QXJXWSU1+7g3wE4AlgImmQ9pQ==", + "dev": true, + "requires": { + "@typescript-eslint/types": "5.9.1", + "@typescript-eslint/visitor-keys": "5.9.1" + } + }, + "@typescript-eslint/type-utils": { + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-5.9.1.tgz", + "integrity": "sha512-tRSpdBnPRssjlUh35rE9ug5HrUvaB9ntREy7gPXXKwmIx61TNN7+l5YKgi1hMKxo5NvqZCfYhA5FvyuJG6X6vg==", "dev": true, "requires": { - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/visitor-keys": "4.31.0" + "@typescript-eslint/experimental-utils": "5.9.1", + "debug": "^4.3.2", + "tsutils": "^3.21.0" } }, "@typescript-eslint/types": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-4.31.0.tgz", - "integrity": "sha512-9XR5q9mk7DCXgXLS7REIVs+BaAswfdHhx91XqlJklmqWpTALGjygWVIb/UnLh4NWhfwhR5wNe1yTyCInxVhLqQ==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-5.9.1.tgz", + "integrity": "sha512-SsWegWudWpkZCwwYcKoDwuAjoZXnM1y2EbEerTHho19Hmm+bQ56QG4L4jrtCu0bI5STaRTvRTZmjprWlTw/5NQ==", "dev": true }, "@typescript-eslint/typescript-estree": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-4.31.0.tgz", - "integrity": "sha512-QHl2014t3ptg+xpmOSSPn5hm4mY8D4s97ftzyk9BZ8RxYQ3j73XcwuijnJ9cMa6DO4aLXeo8XS3z1omT9LA/Eg==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-5.9.1.tgz", + "integrity": "sha512-gL1sP6A/KG0HwrahVXI9fZyeVTxEYV//6PmcOn1tD0rw8VhUWYeZeuWHwwhnewnvEMcHjhnJLOBhA9rK4vmb8A==", "dev": true, "requires": { - "@typescript-eslint/types": "4.31.0", - "@typescript-eslint/visitor-keys": "4.31.0", - "debug": "^4.3.1", - "globby": "^11.0.3", - "is-glob": "^4.0.1", + "@typescript-eslint/types": "5.9.1", + "@typescript-eslint/visitor-keys": "5.9.1", + "debug": "^4.3.2", + "globby": "^11.0.4", + "is-glob": "^4.0.3", "semver": "^7.3.5", "tsutils": "^3.21.0" } }, "@typescript-eslint/visitor-keys": { - "version": "4.31.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-4.31.0.tgz", - "integrity": "sha512-HUcRp2a9I+P21+O21yu3ezv3GEPGjyGiXoEUQwZXjR8UxRApGeLyWH4ZIIUSalE28aG4YsV6GjtaAVB3QKOu0w==", + "version": "5.9.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-5.9.1.tgz", + "integrity": "sha512-Xh37pNz9e9ryW4TVdwiFzmr4hloty8cFj8GTWMXh3Z8swGwyQWeCcNgF0hm6t09iZd6eiZmIf4zHedQVP6TVtg==", "dev": true, "requires": { - "@typescript-eslint/types": "4.31.0", - "eslint-visitor-keys": "^2.0.0" + "@typescript-eslint/types": "5.9.1", + "eslint-visitor-keys": "^3.0.0" } }, "JSONStream": { @@ -1757,9 +1809,9 @@ "dev": true }, "acorn": { - "version": "7.4.1", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", - "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "dev": true }, "acorn-jsx": { @@ -1784,9 +1836,9 @@ } }, "agentkeepalive": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.1.4.tgz", - "integrity": "sha512-+V/rGa3EuU74H6wR04plBb7Ks10FbtUQgRj/FQOG7uUIEuaINI+AiqJR1k6t3SVNs7o7ZjIdus6706qqzVq8jQ==", + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.2.0.tgz", + "integrity": "sha512-0PhAp58jZNw13UJv7NVdTGb0ZcghHUb3DrZ046JiiJY/BOaTTpbwdHq2VObPCBV8M2GPh7sgrJ3AQ8Ey468LJw==", "dev": true, "requires": { "debug": "^4.1.0", @@ -1817,50 +1869,29 @@ } }, "ansi-align": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.0.tgz", - "integrity": "sha512-ZpClVKqXN3RGBmKibdfWzqCY4lnjEuoNzU5T0oEFpfd/z5qJHVarukridD4juLO2FXMiwUQxr9WqQtaYa8XRYw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", "dev": true, "requires": { - "string-width": "^3.0.0" + "string-width": "^4.1.0" }, "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "emoji-regex": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", - "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", - "dev": true - }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, "string-width": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", - "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", - "dev": true, - "requires": { - "emoji-regex": "^7.0.1", - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^5.1.0" - } - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { - "ansi-regex": "^4.1.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } } } @@ -1889,18 +1920,18 @@ } }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true }, "ansi-styles": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", - "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", "dev": true, "requires": { - "color-convert": "^1.9.0" + "color-convert": "^2.0.1" } }, "aproba": { @@ -1920,13 +1951,10 @@ } }, "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "requires": { - "sprintf-js": "~1.0.2" - } + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true }, "array-differ": { "version": "3.0.0", @@ -1959,9 +1987,9 @@ "dev": true }, "asn1": { - "version": "0.2.4", - "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", - "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz", + "integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==", "dev": true, "requires": { "safer-buffer": "~2.1.0" @@ -1973,12 +2001,6 @@ "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", "dev": true }, - "astral-regex": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", - "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", - "dev": true - }, "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", @@ -2025,26 +2047,43 @@ "dev": true }, "boxen": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.0.1.tgz", - "integrity": "sha512-49VBlw+PrWEF51aCmy7QIteYPIFZxSpvqBdP/2itCPPlJ49kj9zg/XPRFrdkne2W+CfwXUls8exMvu1RysZpKA==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", "dev": true, "requires": { "ansi-align": "^3.0.0", "camelcase": "^6.2.0", "chalk": "^4.1.0", "cli-boxes": "^2.2.1", - "string-width": "^4.2.0", + "string-width": "^4.2.2", "type-fest": "^0.20.2", "widest-line": "^3.1.0", "wrap-ansi": "^7.0.0" }, "dependencies": { "camelcase": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", - "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } } } }, @@ -2133,9 +2172,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -2258,47 +2297,6 @@ "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, - "has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", - "dev": true - }, - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } } }, "chardet": { @@ -2347,9 +2345,9 @@ } }, "cli-table": { - "version": "0.3.6", - "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.6.tgz", - "integrity": "sha512-ZkNZbnZjKERTY5NwC2SeMeLeifSPq/pubeRoTpdr3WchLlnZg6hEgvHkK5zL7KNFdd9PmHN8lxrENUwI3cE8vQ==", + "version": "0.3.11", + "resolved": "https://registry.npmjs.org/cli-table/-/cli-table-0.3.11.tgz", + "integrity": "sha512-IqLQi4lO0nIB4tcdTpN4LCB9FI3uqrJZK7RC515EnhZ6qBaglkIgICb1wjeAqpdoOabm1+SuQtkXIPdYC93jhQ==", "dev": true, "requires": { "colors": "1.0.3" @@ -2370,6 +2368,25 @@ "string-width": "^4.2.0", "strip-ansi": "^6.0.0", "wrap-ansi": "^7.0.0" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } } }, "clone": { @@ -2414,18 +2431,24 @@ "dev": true }, "color-convert": { - "version": "1.9.3", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", - "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", "dev": true, "requires": { - "color-name": "1.1.3" + "color-name": "~1.1.4" } }, "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "color-support": { "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", - "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "dev": true }, "colors": { @@ -2471,9 +2494,9 @@ } }, "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", "dev": true }, "compare-func": { @@ -2649,14 +2672,14 @@ "dev": true }, "conventional-changelog-writer": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-5.0.0.tgz", - "integrity": "sha512-HnDh9QHLNWfL6E1uHz6krZEQOgm8hN7z/m7tT16xwd802fwgMN0Wqd7AQYVkhpsjDUx/99oo+nGgvKF657XP5g==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-5.0.1.tgz", + "integrity": "sha512-5WsuKUfxW7suLblAbFnxAcrvf6r+0b7GvNaWUwUIk0bXMnENP/PEieGKVUQrjPqwPT4o3EPAASBXiY6iHooLOQ==", "dev": true, "requires": { "conventional-commits-filter": "^2.0.7", "dateformat": "^3.0.0", - "handlebars": "^4.7.6", + "handlebars": "^4.7.7", "json-stringify-safe": "^5.0.1", "lodash": "^4.17.15", "meow": "^8.0.0", @@ -2684,9 +2707,9 @@ } }, "conventional-commits-parser": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.2.2.tgz", - "integrity": "sha512-Jr9KAKgqAkwXMRHjxDwO/zOCDKod1XdAESHAGuJX38iZ7ZzVti/tvVoysO0suMsdAObp9NQ2rHSsSbnAqZ5f5g==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-3.2.4.tgz", + "integrity": "sha512-nK7sAtfi+QXbxHCYfhpZsfRtaitZLIA6889kFIouLvz6repszQDgxBu7wf2WbU+Dco7sAnNCJYERCwt54WPC2Q==", "dev": true, "requires": { "JSONStream": "^1.0.4", @@ -2771,9 +2794,9 @@ "dev": true }, "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", "dev": true, "requires": { "ms": "2.1.2" @@ -3017,9 +3040,9 @@ } }, "es-abstract": { - "version": "1.18.6", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.6.tgz", - "integrity": "sha512-kAeIT4cku5eNLNuUKhlmtuk1/TRZvQoYccn6TO0cSVdf1kzB0T7+dYuVK9MWM7l+/53W2Q8M7N2c6MQvhXFcUQ==", + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.19.1.tgz", + "integrity": "sha512-2vJ6tjA/UfqLm2MPs7jxVybLoB8i1t1Jd9R3kISld20sIxPcTbLuggQOUxeWeAvIUkduv/CfMjuh4WmiXr2v9w==", "dev": true, "requires": { "call-bind": "^1.0.2", @@ -3033,7 +3056,9 @@ "is-callable": "^1.2.4", "is-negative-zero": "^2.0.1", "is-regex": "^1.1.4", + "is-shared-array-buffer": "^1.0.1", "is-string": "^1.0.7", + "is-weakref": "^1.0.1", "object-inspect": "^1.11.0", "object-keys": "^1.1.1", "object.assign": "^4.1.2", @@ -3072,37 +3097,36 @@ "dev": true }, "eslint": { - "version": "7.32.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", - "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.6.0.tgz", + "integrity": "sha512-UvxdOJ7mXFlw7iuHZA4jmzPaUqIw54mZrv+XPYKNbKdLR0et4rf60lIZUU9kiNtnzzMzGWxMV+tQ7uG7JG8DPw==", "dev": true, "requires": { - "@babel/code-frame": "7.12.11", - "@eslint/eslintrc": "^0.4.3", - "@humanwhocodes/config-array": "^0.5.0", + "@eslint/eslintrc": "^1.0.5", + "@humanwhocodes/config-array": "^0.9.2", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", - "debug": "^4.0.1", + "debug": "^4.3.2", "doctrine": "^3.0.0", "enquirer": "^2.3.5", "escape-string-regexp": "^4.0.0", - "eslint-scope": "^5.1.1", - "eslint-utils": "^2.1.0", - "eslint-visitor-keys": "^2.0.0", - "espree": "^7.3.1", + "eslint-scope": "^7.1.0", + "eslint-utils": "^3.0.0", + "eslint-visitor-keys": "^3.1.0", + "espree": "^9.3.0", "esquery": "^1.4.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "functional-red-black-tree": "^1.0.1", - "glob-parent": "^5.1.2", + "glob-parent": "^6.0.1", "globals": "^13.6.0", "ignore": "^4.0.6", "import-fresh": "^3.0.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", - "js-yaml": "^3.13.1", + "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", @@ -3110,30 +3134,37 @@ "natural-compare": "^1.4.0", "optionator": "^0.9.1", "progress": "^2.0.0", - "regexpp": "^3.1.0", + "regexpp": "^3.2.0", "semver": "^7.2.1", - "strip-ansi": "^6.0.0", + "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", - "table": "^6.0.9", "text-table": "^0.2.0", "v8-compile-cache": "^2.0.3" }, "dependencies": { - "eslint-utils": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", - "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "eslint-scope": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.1.0.tgz", + "integrity": "sha512-aWwkhnS0qAXqNOgKOK0dJ2nvzEbhEvpy8OlJ9kZ0FeZnA6zpjv1/Vei+puGFFX7zkPCkHHXb7IDX3A+7yPrRWg==", "dev": true, "requires": { - "eslint-visitor-keys": "^1.1.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true - } + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + } + }, + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true + }, + "glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "requires": { + "is-glob": "^4.0.3" } }, "ignore": { @@ -3173,39 +3204,33 @@ "dev": true, "requires": { "eslint-visitor-keys": "^2.0.0" + }, + "dependencies": { + "eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true + } } }, "eslint-visitor-keys": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", - "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.1.0.tgz", + "integrity": "sha512-yWJFpu4DtjsWKkt5GeNBBuZMlNcYVs6vRCLoCVEJrTjaSB6LC98gFipNK/erM2Heg/E8mIK+hXG/pJMLK+eRZA==", "dev": true }, "espree": { - "version": "7.3.1", - "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", - "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.3.0.tgz", + "integrity": "sha512-d/5nCsb0JcqsSEeQzFZ8DH1RmxPcglRWh24EFTlUEmCKoehXGdpsx0RkHDubqUI8LSAIKMQp4r9SzQ3n+sm4HQ==", "dev": true, "requires": { - "acorn": "^7.4.0", + "acorn": "^8.7.0", "acorn-jsx": "^5.3.1", - "eslint-visitor-keys": "^1.3.0" - }, - "dependencies": { - "eslint-visitor-keys": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", - "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", - "dev": true - } + "eslint-visitor-keys": "^3.1.0" } }, - "esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true - }, "esquery": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", @@ -3216,9 +3241,9 @@ }, "dependencies": { "estraverse": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", - "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true } } @@ -3233,9 +3258,9 @@ }, "dependencies": { "estraverse": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", - "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true } } @@ -3322,9 +3347,9 @@ "dev": true }, "fast-glob": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz", - "integrity": "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==", + "version": "3.2.10", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.10.tgz", + "integrity": "sha512-s9nFhFnvR63wls6/kM88kQqDhMu0AfdjqouE2l5GVQPbqLgyFjjU5ry/r2yKsJxpb9Py1EYNqieFrmMaX4v++A==", "dev": true, "requires": { "@nodelib/fs.stat": "^2.0.2", @@ -3353,9 +3378,9 @@ "dev": true }, "fastq": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.12.0.tgz", - "integrity": "sha512-VNX0QkHK3RsXVKr9KrlUv/FoTa0NdbYoHHl7uXHv2rzyHSlxjdNAKug2twd9luJxpcyNeAgf5iPPMutJO67Dfg==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "dev": true, "requires": { "reusify": "^1.0.4" @@ -3429,9 +3454,9 @@ } }, "flatted": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.2.tgz", - "integrity": "sha512-JaTY/wtrcSyvXJl4IMFHPKyFur1sE9AUqc0QnhOaJ0CxHtAoIV8pYDzeEfAaNEtGkOfq4gr3LBFmdXW5mOQFnA==", + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.4.tgz", + "integrity": "sha512-8/sOawo8tJ4QOBX8YlQBMxL8+RLZfxMQOif9o0KUKTNTjMYElWPE0r/m5VNFxTRd0NSw8qSy8dajrwX4RYI1Hw==", "dev": true }, "forever-agent": { @@ -3518,26 +3543,6 @@ "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", "dev": true }, - "is-fullwidth-code-point": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", - "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", - "dev": true, - "requires": { - "number-is-nan": "^1.0.0" - } - }, - "string-width": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", - "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", - "dev": true, - "requires": { - "code-point-at": "^1.0.0", - "is-fullwidth-code-point": "^1.0.0", - "strip-ansi": "^3.0.0" - } - }, "strip-ansi": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", @@ -3628,9 +3633,9 @@ } }, "git-raw-commits": { - "version": "2.0.10", - "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.10.tgz", - "integrity": "sha512-sHhX5lsbG9SOO6yXdlwgEMQ/ljIn7qMpAbJZCGfXX2fq5T8M5SrDnpYk9/4HswTildcIqatsWa91vty6VhWSaQ==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-2.0.11.tgz", + "integrity": "sha512-VnctFhw+xfj8Va1xtfEqCUD2XDrbAPSJx+hSrE5K7fGdjZruW7XV+QOrN7LF/RJyvspRiD2I0asWsxFp0ya26A==", "dev": true, "requires": { "dargs": "^7.0.0", @@ -3705,9 +3710,9 @@ } }, "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -3745,25 +3750,25 @@ } }, "globals": { - "version": "13.11.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-13.11.0.tgz", - "integrity": "sha512-08/xrJ7wQjK9kkkRoI3OFUBbLx4f+6x3SGwcPvQ0QH6goFDrOU2oyAWrmh3dJezu65buo+HBMzAMQy6rovVC3g==", + "version": "13.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.0.tgz", + "integrity": "sha512-uS8X6lSKN2JumVoXrbUz+uG4BYG+eiawqm3qFcT7ammfbUHeCBoJMlHcec/S3krSk73/AE/f0szYFmgAA3kYZg==", "dev": true, "requires": { "type-fest": "^0.20.2" } }, "globby": { - "version": "11.0.4", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.4.tgz", - "integrity": "sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", "dev": true, "requires": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", - "fast-glob": "^3.1.1", - "ignore": "^5.1.4", - "merge2": "^1.3.0", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", "slash": "^3.0.0" } }, @@ -3798,9 +3803,9 @@ } }, "graceful-fs": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", - "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", "dev": true }, "handlebars": { @@ -3854,9 +3859,9 @@ "dev": true }, "has-flag": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", - "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, "has-symbols": { @@ -3887,9 +3892,9 @@ "dev": true }, "hosted-git-info": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.0.2.tgz", - "integrity": "sha512-c9OGXbZ3guC/xOlCg1Ci/VgWlwsqDv1yMQL1CWqXDL0hDjXuNcq0zuR4xqPSuasI3kqFDhqSyTjREz5gzq0fXg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", "dev": true, "requires": { "lru-cache": "^6.0.0" @@ -3959,9 +3964,9 @@ } }, "ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "dev": true }, "ignore-walk": { @@ -3990,9 +3995,9 @@ "dev": true }, "import-local": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.2.tgz", - "integrity": "sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "requires": { "pkg-dir": "^4.2.0", @@ -4040,16 +4045,15 @@ "dev": true }, "init-package-json": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.4.tgz", - "integrity": "sha512-gUACSdZYka+VvnF90TsQorC+1joAVWNI724vBNj3RD0LLMeDss2IuzaeiQs0T4YzKs76BPHtrp/z3sn2p+KDTw==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-2.0.5.tgz", + "integrity": "sha512-u1uGAtEFu3VA6HNl/yUWw57jmKEMx8SKOxHhxjGnOFUiIlFnohKDFg4ZrPpv9wWqk44nDxGJAtqjdQFm+9XXQA==", "dev": true, "requires": { - "glob": "^7.1.1", - "npm-package-arg": "^8.1.2", + "npm-package-arg": "^8.1.5", "promzard": "^0.3.0", "read": "~1.0.1", - "read-package-json": "^4.0.0", + "read-package-json": "^4.1.1", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", "validate-npm-package-name": "^3.0.0" @@ -4100,6 +4104,25 @@ "string-width": "^4.1.0", "strip-ansi": "^6.0.0", "through": "^2.3.6" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } } }, "internal-slot": { @@ -4160,9 +4183,9 @@ } }, "is-core-module": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", - "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", "dev": true, "requires": { "has": "^1.0.3" @@ -4184,15 +4207,18 @@ "dev": true }, "is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "dev": true + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "requires": { "is-extglob": "^2.1.1" @@ -4215,9 +4241,9 @@ "dev": true }, "is-negative-zero": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", - "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", + "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", "dev": true }, "is-npm": { @@ -4278,6 +4304,12 @@ "has-tostringtag": "^1.0.0" } }, + "is-shared-array-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz", + "integrity": "sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA==", + "dev": true + }, "is-ssh": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/is-ssh/-/is-ssh-1.3.3.tgz", @@ -4326,6 +4358,15 @@ "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", "dev": true }, + "is-weakref": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz", + "integrity": "sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ==", + "dev": true, + "requires": { + "call-bind": "^1.0.2" + } + }, "is-yarn-global": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", @@ -4369,13 +4410,12 @@ "dev": true }, "js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, "requires": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" + "argparse": "^2.0.1" } }, "jsbn": { @@ -4412,9 +4452,9 @@ } }, "json-schema": { - "version": "0.2.3", - "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", - "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=", + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", "dev": true }, "json-schema-traverse": { @@ -4473,14 +4513,14 @@ "dev": true }, "jsprim": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", - "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.2.tgz", + "integrity": "sha512-P2bSOMAc/ciLz6DzgjVlGJP9+BrJWu5UDGK70C2iweC5QBIeFf0ZXRvGjEj2uYgrY2MkAAhsSWHDWlFtEroZWw==", "dev": true, "requires": { "assert-plus": "1.0.0", "extsprintf": "1.3.0", - "json-schema": "0.2.3", + "json-schema": "0.4.0", "verror": "1.10.0" } }, @@ -4569,9 +4609,9 @@ "dev": true }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -4654,9 +4694,9 @@ } }, "lines-and-columns": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.1.6.tgz", - "integrity": "sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA=", + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "dev": true }, "load-json-file": { @@ -4700,12 +4740,6 @@ "integrity": "sha1-DM8tiRZq8Ds2Y8eWU4t1rG4RTZ0=", "dev": true }, - "lodash.clonedeep": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", - "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=", - "dev": true - }, "lodash.ismatch": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz", @@ -4737,12 +4771,6 @@ "lodash._reinterpolate": "^3.0.0" } }, - "lodash.truncate": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", - "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", - "dev": true - }, "lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", @@ -4801,9 +4829,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -4812,9 +4840,9 @@ } }, "map-obj": { - "version": "4.2.1", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.2.1.tgz", - "integrity": "sha512-+WA2/1sPmDj1dlvvJmB5G6JKfY9dpn7EVBUL06+y6PoljPkh+6V1QihwxNkbcGxCRjt2b0F9K0taiCuo7MbdFQ==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", + "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", "dev": true }, "meow": { @@ -4942,18 +4970,18 @@ } }, "mime-db": { - "version": "1.49.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "dev": true }, "mime-types": { - "version": "2.1.32", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", - "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dev": true, "requires": { - "mime-db": "1.49.0" + "mime-db": "1.51.0" } }, "mimic-fn": { @@ -5036,9 +5064,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5059,9 +5087,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5089,9 +5117,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5110,9 +5138,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5130,9 +5158,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5150,9 +5178,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5253,10 +5281,37 @@ "dev": true }, "node-fetch": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.2.tgz", - "integrity": "sha512-aLoxToI6RfZ+0NOjmWAgn9+LEd30YCkJKFSyWacNZdEKTit/ZMcKjGkTRo8uWEsnIb/hfKecNPEbln02PdWbcA==", - "dev": true + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz", + "integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==", + "dev": true, + "requires": { + "whatwg-url": "^5.0.0" + }, + "dependencies": { + "tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=", + "dev": true + }, + "webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE=", + "dev": true + }, + "whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=", + "dev": true, + "requires": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + } + } }, "node-gyp": { "version": "5.1.1", @@ -5355,15 +5410,15 @@ } }, "npm-check-updates": { - "version": "11.8.5", - "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-11.8.5.tgz", - "integrity": "sha512-IYSHjlWe8UEugDy7X0qjBeJwcni4DlcWdBK4QQEbwgkNlEDlXyd4yQJYWFumKaJzrp/n5/EcvaboXsBD1Er/pw==", + "version": "12.1.0", + "resolved": "https://registry.npmjs.org/npm-check-updates/-/npm-check-updates-12.1.0.tgz", + "integrity": "sha512-9GtSetBvcth7MuL+0MpOmWgdfiEgZcWRfnvoYnNZxbZpleHZCT0Z3HnbsL6/EAT2M+ye7FTZ+YjmDmZptt7Rkg==", "dev": true, "requires": { "chalk": "^4.1.2", "cint": "^8.2.1", - "cli-table": "^0.3.6", - "commander": "^6.2.1", + "cli-table": "^0.3.11", + "commander": "^8.3.0", "fast-memoize": "^2.5.2", "find-up": "5.0.0", "fp-and-or": "^0.1.3", @@ -5376,29 +5431,99 @@ "lodash": "^4.17.21", "minimatch": "^3.0.4", "p-map": "^4.0.0", - "pacote": "^11.3.5", + "pacote": "^12.0.2", "parse-github-url": "^1.0.2", "progress": "^2.0.3", - "prompts": "^2.4.1", + "prompts": "^2.4.2", "rc-config-loader": "^4.0.0", "remote-git-tags": "^3.0.0", "rimraf": "^3.0.2", "semver": "^7.3.5", "semver-utils": "^1.1.4", + "source-map-support": "^0.5.21", "spawn-please": "^1.0.0", "update-notifier": "^5.1.0" }, "dependencies": { - "find-up": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", - "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "@npmcli/run-script": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-2.0.0.tgz", + "integrity": "sha512-fSan/Pu11xS/TdaTpTB0MRn9guwGU8dye+x56mEVgBEd/QsybBbYcAL0phPXi8SGWFEChkQd6M9qL4y6VOpFig==", + "dev": true, + "requires": { + "@npmcli/node-gyp": "^1.0.2", + "@npmcli/promise-spawn": "^1.3.2", + "node-gyp": "^8.2.0", + "read-package-json-fast": "^2.0.1" + } + }, + "are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "dev": true, + "requires": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + } + }, + "chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, "requires": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, + "fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "requires": { + "minipass": "^3.0.0" + } + }, + "gauge": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.0.tgz", + "integrity": "sha512-F8sU45yQpjQjxKkm1UOAhf0U/O0aFt//Fl7hsrNVto+patMHjs7dPI9mFOGUKbhrgKm0S3EjW3scMFuQmWSROw==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1", + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + } + }, + "ignore-walk": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-4.0.1.tgz", + "integrity": "sha512-rzDQLaW4jQbh2YrOFlJdCtX8qgJTehFRYiUB2r1osqTeDzV/3+Jh8fz1oAPzUThf3iku8Ds4IDqawI5d8mUiQw==", + "dev": true, + "requires": { + "minimatch": "^3.0.4" + } + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -5408,6 +5533,82 @@ "p-locate": "^5.0.0" } }, + "minipass": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", + "dev": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "dev": true, + "requires": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + } + }, + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true + }, + "node-gyp": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", + "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", + "dev": true, + "requires": { + "env-paths": "^2.2.0", + "glob": "^7.1.4", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^9.1.0", + "nopt": "^5.0.0", + "npmlog": "^6.0.0", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.2", + "which": "^2.0.2" + } + }, + "nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "dev": true, + "requires": { + "abbrev": "1" + } + }, + "npm-packlist": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-3.0.0.tgz", + "integrity": "sha512-L/cbzmutAwII5glUcf2DBRNY/d0TFd4e/FnaZigJV6JD85RHZXJFGwCndjMWiiViiWSsWt3tiOLpI3ByTnIdFQ==", + "dev": true, + "requires": { + "glob": "^7.1.6", + "ignore-walk": "^4.0.1", + "npm-bundled": "^1.1.1", + "npm-normalize-package-bin": "^1.0.1" + } + }, + "npmlog": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.0.tgz", + "integrity": "sha512-03ppFRGlsyUaQFbGC2C8QWJN/C/K7PsfyD9aQdhVKAQIH4sQBc8WASqFBP7O+Ut4d2oo5LoeoboB3cGdBZSp6Q==", + "dev": true, + "requires": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^4.0.0", + "set-blocking": "^2.0.0" + } + }, "p-limit": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", @@ -5425,6 +5626,69 @@ "requires": { "p-limit": "^3.0.2" } + }, + "pacote": { + "version": "12.0.2", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.2.tgz", + "integrity": "sha512-Ar3mhjcxhMzk+OVZ8pbnXdb0l8+pimvlsqBGRNkble2NVgyqOGE3yrCGi/lAYq7E7NRDMz89R1Wx5HIMCGgeYg==", + "dev": true, + "requires": { + "@npmcli/git": "^2.1.0", + "@npmcli/installed-package-contents": "^1.0.6", + "@npmcli/promise-spawn": "^1.2.0", + "@npmcli/run-script": "^2.0.0", + "cacache": "^15.0.5", + "chownr": "^2.0.0", + "fs-minipass": "^2.1.0", + "infer-owner": "^1.0.4", + "minipass": "^3.1.3", + "mkdirp": "^1.0.3", + "npm-package-arg": "^8.0.1", + "npm-packlist": "^3.0.0", + "npm-pick-manifest": "^6.0.0", + "npm-registry-fetch": "^11.0.0", + "promise-retry": "^2.0.1", + "read-package-json-fast": "^2.0.1", + "rimraf": "^3.0.2", + "ssri": "^8.0.1", + "tar": "^6.1.0" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "tar": { + "version": "6.1.11", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.1.11.tgz", + "integrity": "sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA==", + "dev": true, + "requires": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^3.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + } } } }, @@ -5520,9 +5784,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5580,9 +5844,9 @@ "dev": true }, "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", "dev": true }, "object-keys": { @@ -5604,14 +5868,14 @@ } }, "object.getownpropertydescriptors": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.2.tgz", - "integrity": "sha512-WtxeKSzfBjlzL+F9b7M7hewDzMwy+C8NRssHd1YrNlzHzIDrXcXiNOMrezdAEM4UXixgV+vvnyBeN7Rygl2ttQ==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.3.tgz", + "integrity": "sha512-VdDoCwvJI4QdC6ndjpqFmoL3/+HxffFBbcJzKi5hwLLqqx3mdbedRpfZDdK0SrOSauj8X4GzBvnDZl4vTN7dOw==", "dev": true, "requires": { "call-bind": "^1.0.2", "define-properties": "^1.1.3", - "es-abstract": "^1.18.0-next.2" + "es-abstract": "^1.19.1" } }, "once": { @@ -5822,9 +6086,9 @@ } }, "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -5902,9 +6166,9 @@ }, "dependencies": { "qs": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.1.tgz", - "integrity": "sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg==", + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", "dev": true, "requires": { "side-channel": "^1.0.4" @@ -5961,9 +6225,9 @@ "dev": true }, "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, "pify": { @@ -5994,9 +6258,9 @@ "dev": true }, "prettier": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.0.tgz", - "integrity": "sha512-DsEPLY1dE5HF3BxCRBmD4uYZ+5DCbvatnolqTqcxEgKVZnL2kUfyu7b8pPQ5+hTBkdhU9SLUmK0/pHb07RE4WQ==", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.5.1.tgz", + "integrity": "sha512-vBZcPRUR5MZJwoyi3ZoyQlc1rXeEck8KgeC9AwwOn+exuxLxq5toTRDTSaVrXHxelDMHy9zlicw8u66yxoSUFg==", "dev": true }, "process-nextick-args": { @@ -6028,9 +6292,9 @@ } }, "prompts": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.1.tgz", - "integrity": "sha512-EQyfIuO2hPDsX1L/blblV+H7I0knhgAd82cVneCwcdND9B8AuCDuRcBH6yIcG4dFzlOUqbazQqwGjx5xmsNLuQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dev": true, "requires": { "kleur": "^3.0.3", @@ -6096,9 +6360,9 @@ "dev": true }, "qs": { - "version": "6.5.2", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", - "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==", + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.3.tgz", + "integrity": "sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA==", "dev": true }, "query-string": { @@ -6155,23 +6419,6 @@ "js-yaml": "^4.0.0", "json5": "^2.1.2", "require-from-string": "^2.0.2" - }, - "dependencies": { - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - } } }, "read": { @@ -6347,23 +6594,6 @@ "requires": { "js-yaml": "^4.0.0", "strip-bom": "^4.0.0" - }, - "dependencies": { - "argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "requires": { - "argparse": "^2.0.1" - } - } } }, "readable-stream": { @@ -6474,13 +6704,14 @@ "dev": true }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.21.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.21.0.tgz", + "integrity": "sha512-3wCbTpk5WJlyE4mSOtDLhqQmGFi0/TD9VPwmiolnk8U0wRgMEktqCXd3vy5buTO3tljvalNvKrjHEfrd2WpEKA==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.8.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -6656,9 +6887,9 @@ } }, "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "version": "3.0.6", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.6.tgz", + "integrity": "sha512-sDl4qMFpijcGw22U5w63KmD3cZJfBuFlVNbVMKje2keoKML7X2UzWbc4XrmEbDwg0NXJc3yv4/ox7b+JWb57kQ==", "dev": true }, "sisteransi": { @@ -6673,43 +6904,6 @@ "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true }, - "slice-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", - "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", - "dev": true, - "requires": { - "ansi-styles": "^4.0.0", - "astral-regex": "^2.0.0", - "is-fullwidth-code-point": "^3.0.0" - }, - "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } - }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "requires": { - "color-name": "~1.1.4" - } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - } - } - }, "slide": { "version": "1.1.6", "resolved": "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz", @@ -6733,9 +6927,9 @@ } }, "socks-proxy-agent": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.0.0.tgz", - "integrity": "sha512-FIgZbQWlnjVEQvMkylz64/rUggGtrKstPnx8OZyYFG0tAFR8CSBtpXxSwbFLHyeXFn/cunFL7MpuSOvDSOPo9g==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.1.1.tgz", + "integrity": "sha512-t8J0kG3csjA4g6FTbsMOWws+7R7vuRC8aQ/wy3/1OWmsgwA68zs/+cExQ0koSitUDXqhufF/YJr9wtNMZHw5Ew==", "dev": true, "requires": { "agent-base": "^6.0.2", @@ -6758,6 +6952,16 @@ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "dev": true }, + "source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, "spawn-please": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/spawn-please/-/spawn-please-1.0.0.tgz", @@ -6791,9 +6995,9 @@ } }, "spdx-license-ids": { - "version": "3.0.10", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.10.tgz", - "integrity": "sha512-oie3/+gKf7QtpitB0LYLETe+k8SifzsX4KixvpOsbI6S0kRiRQ5MKOio8eMSAKQ17N06+wdEOXRiId+zOxo0hA==", + "version": "3.0.11", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz", + "integrity": "sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g==", "dev": true }, "split": { @@ -6833,16 +7037,10 @@ } } }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true - }, "sshpk": { - "version": "1.16.1", - "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", - "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", + "version": "1.17.0", + "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.17.0.tgz", + "integrity": "sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ==", "dev": true, "requires": { "asn1": "~0.2.3", @@ -6866,9 +7064,9 @@ }, "dependencies": { "minipass": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.3.tgz", - "integrity": "sha512-Mgd2GdMVzY+x3IJ+oHnVM+KG3lA5c8tnabyJKmHSaG2kAGpudxuOf8ToDkhumF7UzME7DecbQE9uOZhNm7PuJg==", + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.1.6.tgz", + "integrity": "sha512-rty5kpw9/z8SX9dmxblFA6edItUmwJgMeYDZRrwlIVN27i8gysGbznJwUggw2V/FVqFSDdWy040ZPS811DYAqQ==", "dev": true, "requires": { "yallist": "^4.0.0" @@ -6883,14 +7081,31 @@ "dev": true }, "string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", "dev": true, "requires": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "dependencies": { + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } + } } }, "string.prototype.trimend": { @@ -6923,12 +7138,12 @@ } }, "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } }, "strip-bom": { @@ -6970,14 +7185,20 @@ } }, "supports-color": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", - "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { - "has-flag": "^3.0.0" + "has-flag": "^4.0.0" } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "syncpack": { "version": "5.8.15", "resolved": "https://registry.npmjs.org/syncpack/-/syncpack-5.8.15.tgz", @@ -7023,40 +7244,20 @@ "jsonfile": "^6.0.1", "universalify": "^2.0.0" } - } - } - }, - "table": { - "version": "6.7.1", - "resolved": "https://registry.npmjs.org/table/-/table-6.7.1.tgz", - "integrity": "sha512-ZGum47Yi6KOOFDE8m223td53ath2enHcYLgOCjGr5ngu8bdIARQk6mN/wRMv4yMRcHnCSnHbCEha4sobQx5yWg==", - "dev": true, - "requires": { - "ajv": "^8.0.1", - "lodash.clonedeep": "^4.5.0", - "lodash.truncate": "^4.4.2", - "slice-ansi": "^4.0.0", - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0" - }, - "dependencies": { - "ajv": { - "version": "8.6.2", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.2.tgz", - "integrity": "sha512-9807RlWAgT564wT+DjeyU5OFMPjmzxVobvDFmNAhY+5zD6A2ly3jDp6sgnfyDtlIQ+7H97oc/DGCzzfu9rjw9w==", + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", "dev": true, "requires": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" } - }, - "json-schema-traverse": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", - "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", - "dev": true } } }, @@ -7275,15 +7476,15 @@ } }, "typescript": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz", - "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.4.tgz", + "integrity": "sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==", "dev": true }, "uglify-js": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.14.2.tgz", - "integrity": "sha512-rtPMlmcO4agTUfz10CbgJ1k6UAoXM2gWb3GoMPPZB/+/Ackf8lNWk11K4rYi2D0apgoFRLtQOZhb+/iGNJq26A==", + "version": "3.14.5", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.14.5.tgz", + "integrity": "sha512-qZukoSxOG0urUTvjc2ERMTcAy+BiFh3weWAkeurLwjrCba73poHmG3E36XEjd/JGukMzwTL7uCxZiAexj8ppvQ==", "dev": true, "optional": true }, @@ -7510,56 +7711,42 @@ } }, "wide-align": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", - "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", "dev": true, "requires": { - "string-width": "^1.0.2 || 2" + "string-width": "^4.0.0" }, "dependencies": { - "ansi-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", - "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", - "dev": true - }, "is-fullwidth-code-point": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", - "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "dev": true }, "string-width": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", - "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { - "is-fullwidth-code-point": "^2.0.0", - "strip-ansi": "^4.0.0" - } - }, - "strip-ansi": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", - "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", - "dev": true, - "requires": { - "ansi-regex": "^3.0.0" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } } } }, - "widest-line": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", - "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", - "dev": true, - "requires": { - "string-width": "^4.0.0" - } - }, "word-wrap": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", @@ -7583,29 +7770,22 @@ "strip-ansi": "^6.0.0" }, "dependencies": { - "ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "requires": { - "color-convert": "^2.0.1" - } + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true }, - "color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { - "color-name": "~1.1.4" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" } - }, - "color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true } } }, @@ -7766,6 +7946,25 @@ "string-width": "^4.2.0", "y18n": "^5.0.5", "yargs-parser": "^20.2.2" + }, + "dependencies": { + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + } } }, "yargs-parser": { diff --git a/package.json b/package.json index bca2f0accacf..fcdcd6c8cd3d 100644 --- a/package.json +++ b/package.json @@ -20,23 +20,23 @@ "check:lockindent": "node ./build/checkLockIndent.js" }, "devDependencies": { - "@types/glob": "^7.1.4", - "@types/node": "^12.20.24", + "@types/glob": "^7.2.0", + "@types/node": "^17.0.14", "@types/yargs": "^16.0.4", - "@typescript-eslint/eslint-plugin": "^4.31.0", - "@typescript-eslint/parser": "^4.31.0", + "@typescript-eslint/eslint-plugin": "^5.9.1", + "@typescript-eslint/parser": "^5.9.1", "detect-indent": "^6.1.0", - "eslint": "^7.32.0", + "eslint": "^8.6.0", "eslint-config-prettier": "^8.3.0", "eslint-plugin-simple-import-sort": "^7.0.0", - "glob": "^7.1.7", + "glob": "^7.2.0", "jsonc-parser": "^3.0.0", "lerna": "^4.0.0", - "npm-check-updates": "^11.8.5", + "npm-check-updates": "^12.1.0", "p-queue": "^6.6.2", - "prettier": "2.4.0", + "prettier": "2.5.1", "syncpack": "^5.8.15", - "typescript": "~4.4.2", + "typescript": "~4.4.4", "yargs": "^16.2.0" } } diff --git a/packages/browser-pyright/src/browser-server.ts b/packages/browser-pyright/src/browser-server.ts index 9053df597ee0..2c642aa7a422 100644 --- a/packages/browser-pyright/src/browser-server.ts +++ b/packages/browser-pyright/src/browser-server.ts @@ -18,7 +18,7 @@ import { ConsoleInterface, ConsoleWithLogLevel, LogLevel } from 'pyright-interna import { isString } from 'pyright-internal/common/core'; import { FileSystem, nullFileWatcherProvider } from 'pyright-internal/common/fileSystem'; import { Host, NoAccessHost } from 'pyright-internal/common/host'; -import { convertUriToPath, normalizeSlashes, resolvePaths } from 'pyright-internal/common/pathUtils'; +import { normalizeSlashes, resolvePaths } from 'pyright-internal/common/pathUtils'; import { ProgressReporter } from 'pyright-internal/common/progressReporter'; import { createWorker, parentPort } from 'pyright-internal/common/workersHost'; import { LanguageServerBase, ServerSettings, WorkspaceServiceInstance } from 'pyright-internal/languageServerBase'; @@ -89,7 +89,7 @@ export class PyrightServer extends LanguageServerBase { super.setupConnection(supportedCommands, supportedCodeActions); // A non-standard way to mutate the file system. this._connection.onNotification('pyright/createFile', (params: CreateFile) => { - const filePath = convertUriToPath(this._serverOptions.fileSystem, params.uri); + const filePath = this._uriParser.decodeTextDocumentUri(params.uri); (this._serverOptions.fileSystem as TestFileSystem).apply({ [filePath]: '' }); this._workspaceMap.forEach((workspace) => { const backgroundAnalysis = workspace.serviceInstance.backgroundAnalysisProgram.backgroundAnalysis; @@ -98,7 +98,7 @@ export class PyrightServer extends LanguageServerBase { }); }); this._connection.onNotification('pyright/deleteFile', (params: DeleteFile) => { - const filePath = convertUriToPath(this._serverOptions.fileSystem, params.uri); + const filePath = this._uriParser.decodeTextDocumentUri(params.uri); this._serverOptions.fileSystem.unlinkSync(filePath); this._workspaceMap.forEach((workspace) => { const backgroundAnalysis = workspace.serviceInstance.backgroundAnalysisProgram.backgroundAnalysis; @@ -293,7 +293,7 @@ export class PyrightServer extends LanguageServerBase { ): Promise<(Command | CodeAction)[] | undefined | null> { this.recordUserInteractionTime(); - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); const workspace = await this.getWorkspaceForFile(filePath); return CodeActionProvider.getCodeActionsForPosition(workspace, filePath, params.range, token); } diff --git a/packages/pyright-internal/package-lock.json b/packages/pyright-internal/package-lock.json index 05a6a9df78c1..e7783aa630cd 100644 --- a/packages/pyright-internal/package-lock.json +++ b/packages/pyright-internal/package-lock.json @@ -1,62 +1,64 @@ { "name": "pyright-internal", - "version": "1.1.170", + "version": "1.1.225", "lockfileVersion": 1, "requires": true, "dependencies": { + "@ampproject/remapping": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.0.2.tgz", + "integrity": "sha512-sE8Gx+qSDMLoJvb3QarJJlDQK7SSY4rK3hxp4XsiANeFOmjU46ZI7Y9adAQRJrmbz8zbtZkp3mJTT+rGxtF0XA==", + "dev": true, + "requires": { + "@jridgewell/trace-mapping": "^0.2.2", + "sourcemap-codec": "1.4.8" + } + }, "@babel/code-frame": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.14.5.tgz", - "integrity": "sha512-9pzDqyc6OLDaqe+zbACgFkb6fKMNG6CObKpnYXChRsvYGyEdc7CA2BaqeOM+vOtCS5ndmJicPJhKAwYRI6UfFw==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.16.7.tgz", + "integrity": "sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg==", "dev": true, "requires": { - "@babel/highlight": "^7.14.5" + "@babel/highlight": "^7.16.7" } }, "@babel/compat-data": { - "version": "7.15.0", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.15.0.tgz", - "integrity": "sha512-0NqAC1IJE0S0+lL1SWFMxMkz1pKCNCjI4tr2Zx4LJSXxCLAdr6KyArnY+sno5m3yH9g737ygOyPABDsnXkpxiA==", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.17.0.tgz", + "integrity": "sha512-392byTlpGWXMv4FbyWw3sAZ/FrW/DrwqLGXpy0mbyNe9Taqv1mg9yON5/o0cnr8XYCkFTZbC1eV+c+LAROgrng==", "dev": true }, "@babel/core": { - "version": "7.15.5", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.15.5.tgz", - "integrity": "sha512-pYgXxiwAgQpgM1bNkZsDEq85f0ggXMA5L7c+o3tskGMh2BunCI9QUwB9Z4jpvXUOuMdyGKiGKQiRe11VS6Jzvg==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.14.5", - "@babel/generator": "^7.15.4", - "@babel/helper-compilation-targets": "^7.15.4", - "@babel/helper-module-transforms": "^7.15.4", - "@babel/helpers": "^7.15.4", - "@babel/parser": "^7.15.5", - "@babel/template": "^7.15.4", - "@babel/traverse": "^7.15.4", - "@babel/types": "^7.15.4", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.17.0.tgz", + "integrity": "sha512-x/5Ea+RO5MvF9ize5DeVICJoVrNv0Mi2RnIABrZEKYvPEpldXwauPkgvYA17cKa6WpU3LoYvYbuEMFtSNFsarA==", + "dev": true, + "requires": { + "@ampproject/remapping": "^2.0.0", + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.17.0", + "@babel/helper-compilation-targets": "^7.16.7", + "@babel/helper-module-transforms": "^7.16.7", + "@babel/helpers": "^7.17.0", + "@babel/parser": "^7.17.0", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.17.0", + "@babel/types": "^7.17.0", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.1.2", - "semver": "^6.3.0", - "source-map": "^0.5.0" - }, - "dependencies": { - "source-map": { - "version": "0.5.7", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", - "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", - "dev": true - } + "semver": "^6.3.0" } }, "@babel/generator": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.15.4.tgz", - "integrity": "sha512-d3itta0tu+UayjEORPNz6e1T3FtvWlP5N4V5M+lhp/CxT4oAA7/NcScnpRyspUMLK6tu9MNHmQHxRykuN2R7hw==", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.17.0.tgz", + "integrity": "sha512-I3Omiv6FGOC29dtlZhkfXO6pgkmukJSlT26QjVvS1DGZe/NzSVCPG41X0tS21oZkJYlovfj9qDWgKP+Cn4bXxw==", "dev": true, "requires": { - "@babel/types": "^7.15.4", + "@babel/types": "^7.17.0", "jsesc": "^2.5.1", "source-map": "^0.5.0" }, @@ -70,155 +72,134 @@ } }, "@babel/helper-compilation-targets": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.15.4.tgz", - "integrity": "sha512-rMWPCirulnPSe4d+gwdWXLfAXTTBj8M3guAf5xFQJ0nvFY7tfNAFnWdqaHegHlgDZOCT4qvhF3BYlSJag8yhqQ==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.16.7.tgz", + "integrity": "sha512-mGojBwIWcwGD6rfqgRXVlVYmPAv7eOpIemUG3dGnDdCY4Pae70ROij3XmfrH6Fa1h1aiDylpglbZyktfzyo/hA==", "dev": true, "requires": { - "@babel/compat-data": "^7.15.0", - "@babel/helper-validator-option": "^7.14.5", - "browserslist": "^4.16.6", + "@babel/compat-data": "^7.16.4", + "@babel/helper-validator-option": "^7.16.7", + "browserslist": "^4.17.5", "semver": "^6.3.0" } }, - "@babel/helper-function-name": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.15.4.tgz", - "integrity": "sha512-Z91cOMM4DseLIGOnog+Z8OI6YseR9bua+HpvLAQ2XayUGU+neTtX+97caALaLdyu53I/fjhbeCnWnRH1O3jFOw==", + "@babel/helper-environment-visitor": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.16.7.tgz", + "integrity": "sha512-SLLb0AAn6PkUeAfKJCCOl9e1R53pQlGAfc4y4XuMRZfqeMYLE0dM1LMhqbGAlGQY0lfw5/ohoYWAe9V1yibRag==", "dev": true, "requires": { - "@babel/helper-get-function-arity": "^7.15.4", - "@babel/template": "^7.15.4", - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, - "@babel/helper-get-function-arity": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.15.4.tgz", - "integrity": "sha512-1/AlxSF92CmGZzHnC515hm4SirTxtpDnLEJ0UyEMgTMZN+6bxXKg04dKhiRx5Enel+SUA1G1t5Ed/yQia0efrA==", + "@babel/helper-function-name": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.16.7.tgz", + "integrity": "sha512-QfDfEnIUyyBSR3HtrtGECuZ6DAyCkYFp7GHl75vFtTnn6pjKeK0T1DB5lLkFvBea8MdaiUABx3osbgLyInoejA==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/helper-get-function-arity": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/types": "^7.16.7" } }, - "@babel/helper-hoist-variables": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.15.4.tgz", - "integrity": "sha512-VTy085egb3jUGVK9ycIxQiPbquesq0HUQ+tPO0uv5mPEBZipk+5FkRKiWq5apuyTE9FUrjENB0rCf8y+n+UuhA==", + "@babel/helper-get-function-arity": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.16.7.tgz", + "integrity": "sha512-flc+RLSOBXzNzVhcLu6ujeHUrD6tANAOU5ojrRx/as+tbzf8+stUCj7+IfRRoAbEZqj/ahXEMsjhOhgeZsrnTw==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, - "@babel/helper-member-expression-to-functions": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.15.4.tgz", - "integrity": "sha512-cokOMkxC/BTyNP1AlY25HuBWM32iCEsLPI4BHDpJCHHm1FU2E7dKWWIXJgQgSFiu4lp8q3bL1BIKwqkSUviqtA==", + "@babel/helper-hoist-variables": { + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.16.7.tgz", + "integrity": "sha512-m04d/0Op34H5v7pbZw6pSKP7weA6lsMvfiIAMeIvkY/R4xQtBSMFEigu9QTZ2qB/9l22vsxtM8a+Q8CzD255fg==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, "@babel/helper-module-imports": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.15.4.tgz", - "integrity": "sha512-jeAHZbzUwdW/xHgHQ3QmWR4Jg6j15q4w/gCfwZvtqOxoo5DKtLHk8Bsf4c5RZRC7NmLEs+ohkdq8jFefuvIxAA==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.16.7.tgz", + "integrity": "sha512-LVtS6TqjJHFc+nYeITRo6VLXve70xmq7wPhWTqDJusJEgGmkAACWwMiTNrvfoQo6hEhFwAIixNkvB0jPXDL8Wg==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, "@babel/helper-module-transforms": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.15.4.tgz", - "integrity": "sha512-9fHHSGE9zTC++KuXLZcB5FKgvlV83Ox+NLUmQTawovwlJ85+QMhk1CnVk406CQVj97LaWod6KVjl2Sfgw9Aktw==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.16.7.tgz", + "integrity": "sha512-gaqtLDxJEFCeQbYp9aLAefjhkKdjKcdh6DB7jniIGU3Pz52WAmP268zK0VgPz9hUNkMSYeH976K2/Y6yPadpng==", "dev": true, "requires": { - "@babel/helper-module-imports": "^7.15.4", - "@babel/helper-replace-supers": "^7.15.4", - "@babel/helper-simple-access": "^7.15.4", - "@babel/helper-split-export-declaration": "^7.15.4", - "@babel/helper-validator-identifier": "^7.14.9", - "@babel/template": "^7.15.4", - "@babel/traverse": "^7.15.4", - "@babel/types": "^7.15.4" - } - }, - "@babel/helper-optimise-call-expression": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.15.4.tgz", - "integrity": "sha512-E/z9rfbAOt1vDW1DR7k4SzhzotVV5+qMciWV6LaG1g4jeFrkDlJedjtV4h0i4Q/ITnUu+Pk08M7fczsB9GXBDw==", - "dev": true, - "requires": { - "@babel/types": "^7.15.4" + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-module-imports": "^7.16.7", + "@babel/helper-simple-access": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/helper-validator-identifier": "^7.16.7", + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.16.7", + "@babel/types": "^7.16.7" } }, "@babel/helper-plugin-utils": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.14.5.tgz", - "integrity": "sha512-/37qQCE3K0vvZKwoK4XU/irIJQdIfCJuhU5eKnNxpFDsOkgFaUAwbv+RYw6eYgsC0E4hS7r5KqGULUogqui0fQ==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.16.7.tgz", + "integrity": "sha512-Qg3Nk7ZxpgMrsox6HreY1ZNKdBq7K72tDSliA6dCl5f007jR4ne8iD5UzuNnCJH2xBf2BEEVGr+/OL6Gdp7RxA==", "dev": true }, - "@babel/helper-replace-supers": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.15.4.tgz", - "integrity": "sha512-/ztT6khaXF37MS47fufrKvIsiQkx1LBRvSJNzRqmbyeZnTwU9qBxXYLaaT/6KaxfKhjs2Wy8kG8ZdsFUuWBjzw==", - "dev": true, - "requires": { - "@babel/helper-member-expression-to-functions": "^7.15.4", - "@babel/helper-optimise-call-expression": "^7.15.4", - "@babel/traverse": "^7.15.4", - "@babel/types": "^7.15.4" - } - }, "@babel/helper-simple-access": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.15.4.tgz", - "integrity": "sha512-UzazrDoIVOZZcTeHHEPYrr1MvTR/K+wgLg6MY6e1CJyaRhbibftF6fR2KU2sFRtI/nERUZR9fBd6aKgBlIBaPg==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.16.7.tgz", + "integrity": "sha512-ZIzHVyoeLMvXMN/vok/a4LWRy8G2v205mNP0XOuf9XRLyX5/u9CnVulUtDgUTama3lT+bf/UqucuZjqiGuTS1g==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, "@babel/helper-split-export-declaration": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.15.4.tgz", - "integrity": "sha512-HsFqhLDZ08DxCpBdEVtKmywj6PQbwnF6HHybur0MAnkAKnlS6uHkwnmRIkElB2Owpfb4xL4NwDmDLFubueDXsw==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.16.7.tgz", + "integrity": "sha512-xbWoy/PFoxSWazIToT9Sif+jJTlrMcndIsaOKvTA6u7QEo7ilkRZpjew18/W3c7nm8fXdUDXh02VXTbZ0pGDNw==", "dev": true, "requires": { - "@babel/types": "^7.15.4" + "@babel/types": "^7.16.7" } }, "@babel/helper-validator-identifier": { - "version": "7.14.9", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.14.9.tgz", - "integrity": "sha512-pQYxPY0UP6IHISRitNe8bsijHex4TWZXi2HwKVsjPiltzlhse2znVcm9Ace510VT1kxIHjGJCZZQBX2gJDbo0g==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz", + "integrity": "sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw==", "dev": true }, "@babel/helper-validator-option": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.14.5.tgz", - "integrity": "sha512-OX8D5eeX4XwcroVW45NMvoYaIuFI+GQpA2a8Gi+X/U/cDUIRsV37qQfF905F0htTRCREQIB4KqPeaveRJUl3Ow==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.16.7.tgz", + "integrity": "sha512-TRtenOuRUVo9oIQGPC5G9DgK4743cdxvtOw0weQNpZXaS16SCBi5MNjZF8vba3ETURjZpTbVn7Vvcf2eAwFozQ==", "dev": true }, "@babel/helpers": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.15.4.tgz", - "integrity": "sha512-V45u6dqEJ3w2rlryYYXf6i9rQ5YMNu4FLS6ngs8ikblhu2VdR1AqAd6aJjBzmf2Qzh6KOLqKHxEN9+TFbAkAVQ==", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.17.0.tgz", + "integrity": "sha512-Xe/9NFxjPwELUvW2dsukcMZIp6XwPSbI4ojFBJuX5ramHuVE22SVcZIwqzdWo5uCgeTXW8qV97lMvSOjq+1+nQ==", "dev": true, "requires": { - "@babel/template": "^7.15.4", - "@babel/traverse": "^7.15.4", - "@babel/types": "^7.15.4" + "@babel/template": "^7.16.7", + "@babel/traverse": "^7.17.0", + "@babel/types": "^7.17.0" } }, "@babel/highlight": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.14.5.tgz", - "integrity": "sha512-qf9u2WFWVV0MppaL877j2dBtQIDgmidgjGk5VIMw3OadXvYaXn66U1BFlH2t4+t3i+8PhedppRv+i40ABzd+gg==", + "version": "7.16.10", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.16.10.tgz", + "integrity": "sha512-5FnTQLSLswEj6IkgVw5KusNUUFY9ZGqe/TRFnP/BKYHYgfh7tc+C7mwiy95/yNP7Dh9x580Vv8r7u7ZfTBFxdw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.14.5", + "@babel/helper-validator-identifier": "^7.16.7", "chalk": "^2.0.0", "js-tokens": "^4.0.0" }, @@ -276,9 +257,9 @@ } }, "@babel/parser": { - "version": "7.15.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.15.5.tgz", - "integrity": "sha512-2hQstc6I7T6tQsWzlboMh3SgMRPaS4H6H7cPQsJkdzTzEGqQrpLDsE2BGASU5sBPoEQyHzeqU6C8uKbFeEk6sg==", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.17.0.tgz", + "integrity": "sha512-VKXSCQx5D8S04ej+Dqsr1CzYvvWgf20jIw2D+YhQCrIlr2UZGaDds23Y0xg75/skOxpLCRpUZvk/1EAVkGoDOw==", "dev": true }, "@babel/plugin-syntax-async-generators": { @@ -390,49 +371,50 @@ } }, "@babel/plugin-syntax-typescript": { - "version": "7.14.5", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.14.5.tgz", - "integrity": "sha512-u6OXzDaIXjEstBRRoBCQ/uKQKlbuaeE5in0RvWdA4pN6AhqxTIwUsnHPU1CFZA/amYObMsuWhYfRl3Ch90HD0Q==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.16.7.tgz", + "integrity": "sha512-YhUIJHHGkqPgEcMYkPCKTyGUdoGKWtopIycQyjJH8OjvRgOYsXsaKehLVPScKJWAULPxMa4N1vCe6szREFlZ7A==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.14.5" + "@babel/helper-plugin-utils": "^7.16.7" } }, "@babel/template": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.15.4.tgz", - "integrity": "sha512-UgBAfEa1oGuYgDIPM2G+aHa4Nlo9Lh6mGD2bDBGMTbYnc38vulXPuC1MGjYILIEmlwl6Rd+BPR9ee3gm20CBtg==", + "version": "7.16.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.16.7.tgz", + "integrity": "sha512-I8j/x8kHUrbYRTUxXrrMbfCa7jxkE7tZre39x3kjr9hvI82cK1FfqLygotcWN5kdPGWcLdWMHpSBavse5tWw3w==", "dev": true, "requires": { - "@babel/code-frame": "^7.14.5", - "@babel/parser": "^7.15.4", - "@babel/types": "^7.15.4" + "@babel/code-frame": "^7.16.7", + "@babel/parser": "^7.16.7", + "@babel/types": "^7.16.7" } }, "@babel/traverse": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.15.4.tgz", - "integrity": "sha512-W6lQD8l4rUbQR/vYgSuCAE75ADyyQvOpFVsvPPdkhf6lATXAsQIG9YdtOcu8BB1dZ0LKu+Zo3c1wEcbKeuhdlA==", - "dev": true, - "requires": { - "@babel/code-frame": "^7.14.5", - "@babel/generator": "^7.15.4", - "@babel/helper-function-name": "^7.15.4", - "@babel/helper-hoist-variables": "^7.15.4", - "@babel/helper-split-export-declaration": "^7.15.4", - "@babel/parser": "^7.15.4", - "@babel/types": "^7.15.4", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.17.0.tgz", + "integrity": "sha512-fpFIXvqD6kC7c7PUNnZ0Z8cQXlarCLtCUpt2S1Dx7PjoRtCFffvOkHHSom+m5HIxMZn5bIBVb71lhabcmjEsqg==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.16.7", + "@babel/generator": "^7.17.0", + "@babel/helper-environment-visitor": "^7.16.7", + "@babel/helper-function-name": "^7.16.7", + "@babel/helper-hoist-variables": "^7.16.7", + "@babel/helper-split-export-declaration": "^7.16.7", + "@babel/parser": "^7.17.0", + "@babel/types": "^7.17.0", "debug": "^4.1.0", "globals": "^11.1.0" } }, "@babel/types": { - "version": "7.15.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.15.4.tgz", - "integrity": "sha512-0f1HJFuGmmbrKTCZtbm3cU+b/AqdEYk5toj5iQur58xkVMlS0JWaKxTBSmCXd47uiN7vbcozAupm6Mvs80GNhw==", + "version": "7.17.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.17.0.tgz", + "integrity": "sha512-TmKSNO4D5rzhL5bjWFcVHHLETzfQ/AmbKpKPOSjlP0WoHZ6L911fgoOKY4Alp/emzG4cHJdyN49zpgkbXFEHHw==", "dev": true, "requires": { - "@babel/helper-validator-identifier": "^7.14.9", + "@babel/helper-validator-identifier": "^7.16.7", "to-fast-properties": "^2.0.0" } }, @@ -467,129 +449,129 @@ "dev": true }, "@jest/console": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.1.1.tgz", - "integrity": "sha512-VpQJRsWSeAem0zpBjeRtDbcD6DlbNoK11dNYt+PSQ+DDORh9q2/xyEpErfwgnLjWX0EKkSZmTGx/iH9Inzs6vQ==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-27.4.6.tgz", + "integrity": "sha512-jauXyacQD33n47A44KrlOVeiXHEXDqapSdfb9kTekOchH/Pd18kBIO1+xxJQRLuG+LUuljFCwTG92ra4NW7SpA==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "@types/node": "*", "chalk": "^4.0.0", - "jest-message-util": "^27.1.1", - "jest-util": "^27.1.1", + "jest-message-util": "^27.4.6", + "jest-util": "^27.4.2", "slash": "^3.0.0" } }, "@jest/core": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.1.1.tgz", - "integrity": "sha512-oCkKeTgI0emznKcLoq5OCD0PhxCijA4l7ejDnWW3d5bgSi+zfVaLybVqa+EQOxpNejQWtTna7tmsAXjMN9N43Q==", + "version": "27.4.7", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-27.4.7.tgz", + "integrity": "sha512-n181PurSJkVMS+kClIFSX/LLvw9ExSb+4IMtD6YnfxZVerw9ANYtW0bPrm0MJu2pfe9SY9FJ9FtQ+MdZkrZwjg==", "dev": true, "requires": { - "@jest/console": "^27.1.1", - "@jest/reporters": "^27.1.1", - "@jest/test-result": "^27.1.1", - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/console": "^27.4.6", + "@jest/reporters": "^27.4.6", + "@jest/test-result": "^27.4.6", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", "emittery": "^0.8.1", "exit": "^0.1.2", "graceful-fs": "^4.2.4", - "jest-changed-files": "^27.1.1", - "jest-config": "^27.1.1", - "jest-haste-map": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-regex-util": "^27.0.6", - "jest-resolve": "^27.1.1", - "jest-resolve-dependencies": "^27.1.1", - "jest-runner": "^27.1.1", - "jest-runtime": "^27.1.1", - "jest-snapshot": "^27.1.1", - "jest-util": "^27.1.1", - "jest-validate": "^27.1.1", - "jest-watcher": "^27.1.1", + "jest-changed-files": "^27.4.2", + "jest-config": "^27.4.7", + "jest-haste-map": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-regex-util": "^27.4.0", + "jest-resolve": "^27.4.6", + "jest-resolve-dependencies": "^27.4.6", + "jest-runner": "^27.4.6", + "jest-runtime": "^27.4.6", + "jest-snapshot": "^27.4.6", + "jest-util": "^27.4.2", + "jest-validate": "^27.4.6", + "jest-watcher": "^27.4.6", "micromatch": "^4.0.4", - "p-each-series": "^2.1.0", "rimraf": "^3.0.0", "slash": "^3.0.0", "strip-ansi": "^6.0.0" } }, "@jest/environment": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.1.1.tgz", - "integrity": "sha512-+y882/ZdxhyqF5RzxIrNIANjHj991WH7jifdcplzMDosDUOyCACFYUyVTBGbSTocbU+s1cesroRzkwi8hZ9SHg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-27.4.6.tgz", + "integrity": "sha512-E6t+RXPfATEEGVidr84WngLNWZ8ffCPky8RqqRK6u1Bn0LK92INe0MDttyPl/JOzaq92BmDzOeuqk09TvM22Sg==", "dev": true, "requires": { - "@jest/fake-timers": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/fake-timers": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", - "jest-mock": "^27.1.1" + "jest-mock": "^27.4.6" } }, "@jest/fake-timers": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.1.1.tgz", - "integrity": "sha512-u8TJ5VlsVYTsGFatoyIae2l25pku4Bu15QCPTx2Gs5z+R//Ee3tHN85462Vc9yGVcdDvgADbqNkhOLxbEwPjMQ==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-27.4.6.tgz", + "integrity": "sha512-mfaethuYF8scV8ntPpiVGIHQgS0XIALbpY2jt2l7wb/bvq4Q5pDLk4EP4D7SAvYT1QrPOPVZAtbdGAOOyIgs7A==", "dev": true, "requires": { - "@jest/types": "^27.1.1", - "@sinonjs/fake-timers": "^7.0.2", + "@jest/types": "^27.4.2", + "@sinonjs/fake-timers": "^8.0.1", "@types/node": "*", - "jest-message-util": "^27.1.1", - "jest-mock": "^27.1.1", - "jest-util": "^27.1.1" + "jest-message-util": "^27.4.6", + "jest-mock": "^27.4.6", + "jest-util": "^27.4.2" } }, "@jest/globals": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.1.1.tgz", - "integrity": "sha512-Q3JcTPmY+DAEHnr4MpnBV3mwy50EGrTC6oSDTNnW7FNGGacTJAfpWNk02D7xv422T1OzK2A2BKx+26xJOvHkyw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-27.4.6.tgz", + "integrity": "sha512-kAiwMGZ7UxrgPzu8Yv9uvWmXXxsy0GciNejlHvfPIfWkSxChzv6bgTS3YqBkGuHcis+ouMFI2696n2t+XYIeFw==", "dev": true, "requires": { - "@jest/environment": "^27.1.1", - "@jest/types": "^27.1.1", - "expect": "^27.1.1" + "@jest/environment": "^27.4.6", + "@jest/types": "^27.4.2", + "expect": "^27.4.6" } }, "@jest/reporters": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.1.1.tgz", - "integrity": "sha512-cEERs62n1P4Pqox9HWyNOEkP57G95aK2mBjB6D8Ruz1Yc98fKH53b58rlVEnsY5nLmkLNZk65fxNi9C0Yds/8w==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-27.4.6.tgz", + "integrity": "sha512-+Zo9gV81R14+PSq4wzee4GC2mhAN9i9a7qgJWL90Gpx7fHYkWpTBvwWNZUXvJByYR9tAVBdc8VxDWqfJyIUrIQ==", "dev": true, "requires": { "@bcoe/v8-coverage": "^0.2.3", - "@jest/console": "^27.1.1", - "@jest/test-result": "^27.1.1", - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/console": "^27.4.6", + "@jest/test-result": "^27.4.6", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", + "@types/node": "*", "chalk": "^4.0.0", "collect-v8-coverage": "^1.0.0", "exit": "^0.1.2", "glob": "^7.1.2", "graceful-fs": "^4.2.4", "istanbul-lib-coverage": "^3.0.0", - "istanbul-lib-instrument": "^4.0.3", + "istanbul-lib-instrument": "^5.1.0", "istanbul-lib-report": "^3.0.0", "istanbul-lib-source-maps": "^4.0.0", - "istanbul-reports": "^3.0.2", - "jest-haste-map": "^27.1.1", - "jest-resolve": "^27.1.1", - "jest-util": "^27.1.1", - "jest-worker": "^27.1.1", + "istanbul-reports": "^3.1.3", + "jest-haste-map": "^27.4.6", + "jest-resolve": "^27.4.6", + "jest-util": "^27.4.2", + "jest-worker": "^27.4.6", "slash": "^3.0.0", "source-map": "^0.6.0", "string-length": "^4.0.1", "terminal-link": "^2.0.0", - "v8-to-istanbul": "^8.0.0" + "v8-to-istanbul": "^8.1.0" } }, "@jest/source-map": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.0.6.tgz", - "integrity": "sha512-Fek4mi5KQrqmlY07T23JRi0e7Z9bXTOOD86V/uS0EIW4PClvPDqZOyFlLpNJheS6QI0FNX1CgmPjtJ4EA/2M+g==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-27.4.0.tgz", + "integrity": "sha512-Ntjx9jzP26Bvhbm93z/AKcPRj/9wrkI88/gK60glXDx1q+IeI0rf7Lw2c89Ch6ofonB0On/iRDreQuQ6te9pgQ==", "dev": true, "requires": { "callsites": "^3.0.0", @@ -598,56 +580,56 @@ } }, "@jest/test-result": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.1.1.tgz", - "integrity": "sha512-8vy75A0Jtfz9DqXFUkjC5Co/wRla+D7qRFdShUY8SbPqBS3GBx3tpba7sGKFos8mQrdbe39n+c1zgVKtarfy6A==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-27.4.6.tgz", + "integrity": "sha512-fi9IGj3fkOrlMmhQqa/t9xum8jaJOOAi/lZlm6JXSc55rJMXKHxNDN1oCP39B0/DhNOa2OMupF9BcKZnNtXMOQ==", "dev": true, "requires": { - "@jest/console": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/console": "^27.4.6", + "@jest/types": "^27.4.2", "@types/istanbul-lib-coverage": "^2.0.0", "collect-v8-coverage": "^1.0.0" } }, "@jest/test-sequencer": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.1.1.tgz", - "integrity": "sha512-l8zD3EdeixvwmLNlJoMX3hhj8iIze95okj4sqmBzOq/zW8gZLElUveH4bpKEMuR+Nweazjlwc7L6g4C26M/y6Q==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-27.4.6.tgz", + "integrity": "sha512-3GL+nsf6E1PsyNsJuvPyIz+DwFuCtBdtvPpm/LMXVkBJbdFvQYCDpccYT56qq5BGniXWlE81n2qk1sdXfZebnw==", "dev": true, "requires": { - "@jest/test-result": "^27.1.1", + "@jest/test-result": "^27.4.6", "graceful-fs": "^4.2.4", - "jest-haste-map": "^27.1.1", - "jest-runtime": "^27.1.1" + "jest-haste-map": "^27.4.6", + "jest-runtime": "^27.4.6" } }, "@jest/transform": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.1.1.tgz", - "integrity": "sha512-qM19Eu75U6Jc5zosXXVnq900Nl9JDpoGaZ4Mg6wZs7oqbu3heYSMOZS19DlwjlhWdfNRjF4UeAgkrCJCK3fEXg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-27.4.6.tgz", + "integrity": "sha512-9MsufmJC8t5JTpWEQJ0OcOOAXaH5ioaIX6uHVBLBMoCZPfKKQF+EqP8kACAvCZ0Y1h2Zr3uOccg8re+Dr5jxyw==", "dev": true, "requires": { "@babel/core": "^7.1.0", - "@jest/types": "^27.1.1", - "babel-plugin-istanbul": "^6.0.0", + "@jest/types": "^27.4.2", + "babel-plugin-istanbul": "^6.1.1", "chalk": "^4.0.0", "convert-source-map": "^1.4.0", "fast-json-stable-stringify": "^2.0.0", "graceful-fs": "^4.2.4", - "jest-haste-map": "^27.1.1", - "jest-regex-util": "^27.0.6", - "jest-util": "^27.1.1", + "jest-haste-map": "^27.4.6", + "jest-regex-util": "^27.4.0", + "jest-util": "^27.4.2", "micromatch": "^4.0.4", - "pirates": "^4.0.1", + "pirates": "^4.0.4", "slash": "^3.0.0", "source-map": "^0.6.1", "write-file-atomic": "^3.0.0" } }, "@jest/types": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.1.1.tgz", - "integrity": "sha512-yqJPDDseb0mXgKqmNqypCsb85C22K1aY5+LUxh7syIM9n/b0AsaltxNy+o6tt29VcfGDpYEve175bm3uOhcehA==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-27.4.2.tgz", + "integrity": "sha512-j35yw0PMTPpZsUoOBiuHzr1zTYoad1cVIE0ajEjcrJONxxrko/IRGKkXx3os0Nsi4Hu3+5VmDbVfq5WhG/pWAg==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.0", @@ -657,6 +639,22 @@ "chalk": "^4.0.0" } }, + "@jridgewell/resolve-uri": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.4.tgz", + "integrity": "sha512-cz8HFjOFfUBtvN+NXYSFMHYRdxZMaEl0XypVrhzxBgadKIXhIkRd8aMeHhmF56Sl7SuS8OnUpQ73/k9LE4VnLg==", + "dev": true + }, + "@jridgewell/trace-mapping": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.2.6.tgz", + "integrity": "sha512-rVJf5dSMEBxnDEwtAT5x8+p6tZ+xU6Ocm+cR1MYL2gMsRi4MMzVf9Pvq6JaxIsEeKAyYmo2U+yPQN4QfdTfFnA==", + "dev": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "sourcemap-codec": "1.4.8" + } + }, "@sinonjs/commons": { "version": "1.8.3", "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", @@ -667,9 +665,9 @@ } }, "@sinonjs/fake-timers": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz", - "integrity": "sha512-iQADsW4LBMISqZ6Ci1dupJL9pprqwcVFTcOsEmQOEhW+KLCVn/Y4Jrvg2k19fIHCp+iFprriYPTdRcQR8NbUPg==", + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz", + "integrity": "sha512-OAPJUAtgeINhh/TAlUID4QTs53Njm7xzddaVlEs/SXwgtiD1tW22zAB/W1wdqfrpmikgaWQ9Fw6Ws+hsiRm5Vg==", "dev": true, "requires": { "@sinonjs/commons": "^1.7.0" @@ -682,9 +680,9 @@ "dev": true }, "@types/babel__core": { - "version": "7.1.16", - "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.16.tgz", - "integrity": "sha512-EAEHtisTMM+KaKwfWdC3oyllIqswlznXCIVCt7/oRNrh+DhgT4UEBNC/jlADNjvw7UnfbcdkGQcPVZ1xYiLcrQ==", + "version": "7.1.18", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.1.18.tgz", + "integrity": "sha512-S7unDjm/C7z2A2R9NzfKCK1I+BAALDtxEmsJBwlB3EzNfb929ykjL++1CK9LO++EIp2fQrC8O+BwjKvz6UeDyQ==", "dev": true, "requires": { "@babel/parser": "^7.1.0", @@ -695,9 +693,9 @@ } }, "@types/babel__generator": { - "version": "7.6.3", - "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.3.tgz", - "integrity": "sha512-/GWCmzJWqV7diQW54smJZzWbSFf4QYtF71WCKhcx6Ru/tFyQIY2eiiITcCAeuPbNSvT9YCGkVMqqvSk2Z0mXiA==", + "version": "7.6.4", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.4.tgz", + "integrity": "sha512-tFkciB9j2K755yrTALxD44McOrk+gfpIpvC3sxHjRawj6PfnQxrse4Clq5y/Rq+G3mrBurMax/lG8Qn2t9mSsg==", "dev": true, "requires": { "@babel/types": "^7.0.0" @@ -729,9 +727,9 @@ "dev": true }, "@types/emscripten": { - "version": "1.39.5", - "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.39.5.tgz", - "integrity": "sha512-DIOOg+POSrYl+OlNRHQuIEqCd8DCtynG57H862UCce16nXJX7J8eWxNGgOcf8Eyge8zXeSs27mz1UcFu8L/L7g==" + "version": "1.39.6", + "resolved": "https://registry.npmjs.org/@types/emscripten/-/emscripten-1.39.6.tgz", + "integrity": "sha512-H90aoynNhhkQP6DRweEjJp5vfUVdIj7tdPLsu7pq89vODD/lcugKfZOsfgwpvM6XUewEp2N5dCg1Uf3Qe55Dcg==" }, "@types/graceful-fs": { "version": "4.1.5", @@ -743,9 +741,9 @@ } }, "@types/istanbul-lib-coverage": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz", - "integrity": "sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw==", + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.4.tgz", + "integrity": "sha512-z/QT1XN4K4KYuslS23k62yDIDLwLFkzxOuMplDtObz0+y7VqJCaO2o+SPwHCvLFZh7xazvvoor2tA/hPz9ee7g==", "dev": true }, "@types/istanbul-lib-report": { @@ -767,9 +765,9 @@ } }, "@types/jest": { - "version": "27.0.1", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.0.1.tgz", - "integrity": "sha512-HTLpVXHrY69556ozYkcq47TtQJXpcWAWfkoqz+ZGz2JnmZhzlRjprCIyFnetSy8gpDWwTTGBcRVv1J1I1vBrHw==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-27.4.0.tgz", + "integrity": "sha512-gHl8XuC1RZ8H2j5sHv/JqsaxXkDDM9iDOgu0Wp8sjs4u/snb2PVehyWXJPr+ORA0RPpgw231mnutWI1+0hgjIQ==", "dev": true, "requires": { "jest-diff": "^27.0.0", @@ -777,15 +775,15 @@ } }, "@types/node": { - "version": "12.20.24", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.24.tgz", - "integrity": "sha512-yxDeaQIAJlMav7fH5AQqPH1u8YIuhYJXYBzxaQ4PifsU0GDO38MSdmEDeRlIxrKbC6NbEaaEHDanWb+y30U8SQ==", + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", "dev": true }, "@types/prettier": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.3.2.tgz", - "integrity": "sha512-eI5Yrz3Qv4KPUa/nSIAi0h+qX0XyewOliug5F2QAtuRg6Kjg6jfmxe1GIwoIRhZspD1A0RP8ANrPwvEXXtRFog==", + "version": "2.4.3", + "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.4.3.tgz", + "integrity": "sha512-QzSuZMBuG5u8HqYz01qtMdg/Jfctlnvj1z/lYnIDXs/golxw0fxtRAHd9KrzjR7Yxz1qVeI00o0kiO3PmVdJ9w==", "dev": true }, "@types/stack-utils": { @@ -795,9 +793,9 @@ "dev": true }, "@types/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-7cTXwKP/HLOPVgjg+YhBdQ7bMiobGMuoBmrGmqwIWJv8elC6t1DfVc/mn4fD9UE1IjhwmhaQ5pGVXkmXbH0rhg==", + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@types/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-dDZH/tXzwjutnuk4UacGgFRwV+JSLaXL1ikvidfJprkb7L9Nx1njcRHHmi3Dsvt7pgqqTEeucQuOrWHPFgzVHA==", "dev": true }, "@types/yargs": { @@ -816,9 +814,9 @@ "dev": true }, "@yarnpkg/fslib": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/@yarnpkg/fslib/-/fslib-2.5.2.tgz", - "integrity": "sha512-Tonb3jXsxsGB+uTlzG6r2zcAUekIG0FfCZV/kM14fPvTE3Z1wC0cMtiD4IUzxSqyhC/zR+T1pLaTPT3xDzSvqg==", + "version": "2.6.0", + "resolved": "https://registry.npmjs.org/@yarnpkg/fslib/-/fslib-2.6.0.tgz", + "integrity": "sha512-NgK7aYYNrFOFJGcWWEDXHHYNt93L3A0Ojv90Xl9+f72T+CaFkOAXyK2XBwEYreot7lT1NeopcnkmFRjHgKRKgQ==", "requires": { "@yarnpkg/libzip": "^2.2.2", "tslib": "^1.13.0" @@ -840,9 +838,9 @@ "dev": true }, "acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "dev": true }, "acorn-globals": { @@ -888,9 +886,9 @@ } }, "ansi-regex": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", - "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "dev": true }, "ansi-styles": { @@ -931,38 +929,38 @@ "dev": true }, "babel-jest": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.1.1.tgz", - "integrity": "sha512-JA+dzJl4n2RBvWQEnph6HJaTHrsIPiXGQYatt/D8nR4UpX9UG4GaDzykVVPQBbrdTebZREkRb6SOxyIXJRab6Q==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-27.4.6.tgz", + "integrity": "sha512-qZL0JT0HS1L+lOuH+xC2DVASR3nunZi/ozGhpgauJHgmI7f8rudxf6hUjEHympdQ/J64CdKmPkgfJ+A3U6QCrg==", "dev": true, "requires": { - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", "@types/babel__core": "^7.1.14", - "babel-plugin-istanbul": "^6.0.0", - "babel-preset-jest": "^27.0.6", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^27.4.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.4", "slash": "^3.0.0" } }, "babel-plugin-istanbul": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz", - "integrity": "sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ==", + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", "dev": true, "requires": { "@babel/helper-plugin-utils": "^7.0.0", "@istanbuljs/load-nyc-config": "^1.0.0", "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-instrument": "^5.0.4", "test-exclude": "^6.0.0" } }, "babel-plugin-jest-hoist": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.0.6.tgz", - "integrity": "sha512-CewFeM9Vv2gM7Yr9n5eyyLVPRSiBnk6lKZRjgwYnGKSl9M14TMn2vkN02wTF04OGuSDLEzlWiMzvjXuW9mB6Gw==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-27.4.0.tgz", + "integrity": "sha512-Jcu7qS4OX5kTWBc45Hz7BMmgXuJqRnhatqpUhnzGC3OBYpOmf2tv6jFNwZpwM7wU7MUuv2r9IPS/ZlYOuburVw==", "dev": true, "requires": { "@babel/template": "^7.3.3", @@ -992,12 +990,12 @@ } }, "babel-preset-jest": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.0.6.tgz", - "integrity": "sha512-WObA0/Biw2LrVVwZkF/2GqbOdzhKD6Fkdwhoy9ASIrOWr/zodcSpQh72JOkEn6NWyjmnPDjNSqaGN4KnpKzhXw==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-27.4.0.tgz", + "integrity": "sha512-NK4jGYpnBvNxcGo7/ZpZJr51jCGT+3bwwpVIDY2oNfTxJJldRtB4VAcYdgp1loDE50ODuTu+yBjpMAswv5tlpg==", "dev": true, "requires": { - "babel-plugin-jest-hoist": "^27.0.6", + "babel-plugin-jest-hoist": "^27.4.0", "babel-preset-current-node-syntax": "^1.0.0" } }, @@ -1035,16 +1033,16 @@ "dev": true }, "browserslist": { - "version": "4.17.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.17.0.tgz", - "integrity": "sha512-g2BJ2a0nEYvEFQC208q8mVAhfNwpZ5Mu8BwgtCdZKO3qx98HChmeg448fPdUzld8aFmfLgVh7yymqV+q1lJZ5g==", + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001254", - "colorette": "^1.3.0", - "electron-to-chromium": "^1.3.830", + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", "escalade": "^3.1.1", - "node-releases": "^1.1.75" + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" } }, "bs-logger": { @@ -1083,9 +1081,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001255", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001255.tgz", - "integrity": "sha512-F+A3N9jTZL882f/fg/WWVnKSu6IOo3ueLz4zwaOPbPYHNmM/ZaDUyzyJwS1mZhX7Ex5jqTyW599Gdelh5PDYLQ==", + "version": "1.0.30001307", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001307.tgz", + "integrity": "sha512-+MXEMczJ4FuxJAUp0jvAl6Df0NI/OfW1RWEE61eSmzS7hw6lz4IKutbhbXendwq8BljfFuHtu26VWsg4afQ7Ng==", "dev": true }, "chalk": { @@ -1104,9 +1102,9 @@ "dev": true }, "chokidar": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", - "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.3.tgz", + "integrity": "sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==", "requires": { "anymatch": "~3.1.2", "braces": "~3.0.2", @@ -1119,9 +1117,9 @@ } }, "ci-info": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.2.0.tgz", - "integrity": "sha512-dVqRX7fLUm8J6FgHJ418XuIgDLZDkYcDFTeL6TA2gt5WlIZUQrrH6EZrNClwT/H0FateUsZkGIOPRrLbP+PR9A==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz", + "integrity": "sha512-riT/3vI5YpVH6/qomlDnJow6TBee2PBKSEpx3O32EGPYbWGIRsIlGRms3Sm74wYE1JMo8RnO04Hb12+v1J5ICw==", "dev": true }, "cjs-module-lexer": { @@ -1166,12 +1164,6 @@ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" }, - "colorette": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", - "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==", - "dev": true - }, "combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -1182,9 +1174,9 @@ } }, "command-line-args": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.0.tgz", - "integrity": "sha512-4zqtU1hYsSJzcJBOcNZIbW5Fbk9BkjCp1pZVhQKoRaWL5J7N4XphDLwo8aWwdQpTugxwu+jf9u2ZhkXiqp5Z6A==", + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz", + "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==", "requires": { "array-back": "^3.1.0", "find-replace": "^3.0.0", @@ -1252,9 +1244,9 @@ } }, "debug": { - "version": "4.3.2", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", - "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", "dev": true, "requires": { "ms": "2.1.2" @@ -1297,9 +1289,9 @@ "dev": true }, "diff-sequences": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.0.6.tgz", - "integrity": "sha512-ag6wfpBFyNXZ0p8pcuIDS//D8H062ZQJ3fzYxjpmeKjnz8W4pekL3AI8VohmyZmsWW2PWaHgjsmqR6L13101VQ==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-27.4.0.tgz", + "integrity": "sha512-YqiQzkrsmHMH5uuh8OdQFU9/ZpADnwzml8z0O5HvRNda+5UZsaX/xN+AAxfR2hWq1Y7HZnAzO9J5lJXOuDz2Ww==", "dev": true }, "domexception": { @@ -1320,9 +1312,9 @@ } }, "electron-to-chromium": { - "version": "1.3.833", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.833.tgz", - "integrity": "sha512-h+9aVaUHjyunLqtCjJF2jrJ73tYcJqo2cCGKtVAXH9WmnBsb8hiChRQ0P1uXjdxR6Wcfxibephy41c1YlZA/pA==", + "version": "1.4.64", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.64.tgz", + "integrity": "sha512-8mec/99xgLUZCIZZq3wt61Tpxg55jnOSpxGYapE/1Ma9MpFEYYaz4QNYm0CM1rrnCo7i3FRHhbaWjeCLsveGjQ==", "dev": true }, "emittery": { @@ -1369,9 +1361,9 @@ "dev": true }, "estraverse": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", - "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true }, "esutils": { @@ -1404,25 +1396,15 @@ "dev": true }, "expect": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/expect/-/expect-27.1.1.tgz", - "integrity": "sha512-JQAzp0CJoFFHF1RnOtrMUNMdsfx/Tl0+FhRzVl8q0fa23N+JyWdPXwb3T5rkHCvyo9uttnK7lVdKCBl1b/9EDw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/expect/-/expect-27.4.6.tgz", + "integrity": "sha512-1M/0kAALIaj5LaG66sFJTbRsWTADnylly82cu4bspI0nl+pgP4E6Bh/aqdHlTUjul06K7xQnnrAoqfxVU0+/ag==", "dev": true, "requires": { - "@jest/types": "^27.1.1", - "ansi-styles": "^5.0.0", - "jest-get-type": "^27.0.6", - "jest-matcher-utils": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-regex-util": "^27.0.6" - }, - "dependencies": { - "ansi-styles": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", - "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", - "dev": true - } + "@jest/types": "^27.4.2", + "jest-get-type": "^27.4.0", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6" } }, "fast-json-stable-stringify": { @@ -1525,9 +1507,9 @@ "dev": true }, "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -1552,9 +1534,9 @@ "dev": true }, "graceful-fs": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", - "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", "dev": true }, "has": { @@ -1623,9 +1605,9 @@ } }, "import-local": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.2.tgz", - "integrity": "sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "requires": { "pkg-dir": "^4.2.0", @@ -1666,19 +1648,10 @@ "binary-extensions": "^2.0.0" } }, - "is-ci": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-3.0.0.tgz", - "integrity": "sha512-kDXyttuLeslKAHYL/K28F2YkM3x5jvFPEw3yXbRptXydjD9rpLEz+C5K5iutY9ZiUu6AP41JdvRQwF4Iqs4ZCQ==", - "dev": true, - "requires": { - "ci-info": "^3.1.1" - } - }, "is-core-module": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", - "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", "dev": true, "requires": { "has": "^1.0.3" @@ -1702,9 +1675,9 @@ "dev": true }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "requires": { "is-extglob": "^2.1.1" } @@ -1739,20 +1712,21 @@ "dev": true }, "istanbul-lib-coverage": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", - "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.0.tgz", + "integrity": "sha512-eOeJ5BHCmHYvQK7xt9GkdHuzuCGS1Y6g9Gvnx3Ym33fz/HpLRYxiS0wHNr+m/MBC8B647Xt608vCDEvhl9c6Mw==", "dev": true }, "istanbul-lib-instrument": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", - "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.1.0.tgz", + "integrity": "sha512-czwUz525rkOFDJxfKK6mYfIs9zBKILyrZQxjz3ABhjQXhbhFsSbo1HW/BFcsDnfJYJWA6thRR5/TUY2qs5W99Q==", "dev": true, "requires": { - "@babel/core": "^7.7.5", + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-coverage": "^3.2.0", "semver": "^6.3.0" } }, @@ -1768,9 +1742,9 @@ } }, "istanbul-lib-source-maps": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz", - "integrity": "sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", "dev": true, "requires": { "debug": "^4.1.1", @@ -1779,9 +1753,9 @@ } }, "istanbul-reports": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", - "integrity": "sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.3.tgz", + "integrity": "sha512-x9LtDVtfm/t1GFiLl3NffC7hz+I1ragvgX1P/Lg1NlIagifZDKUkuuaAxH/qpwj2IuEfD8G2Bs/UKp+sZ/pKkg==", "dev": true, "requires": { "html-escaper": "^2.0.0", @@ -1789,296 +1763,279 @@ } }, "jest": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest/-/jest-27.1.1.tgz", - "integrity": "sha512-LFTEZOhoZNR/2DQM3OCaK5xC6c55c1OWhYh0njRsoHX0qd6x4nkcgenkSH0JKjsAGMTmmJAoL7/oqYHMfwhruA==", + "version": "27.4.7", + "resolved": "https://registry.npmjs.org/jest/-/jest-27.4.7.tgz", + "integrity": "sha512-8heYvsx7nV/m8m24Vk26Y87g73Ba6ueUd0MWed/NXMhSZIm62U/llVbS0PJe1SHunbyXjJ/BqG1z9bFjGUIvTg==", "dev": true, "requires": { - "@jest/core": "^27.1.1", + "@jest/core": "^27.4.7", "import-local": "^3.0.2", - "jest-cli": "^27.1.1" + "jest-cli": "^27.4.7" }, "dependencies": { "jest-cli": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.1.1.tgz", - "integrity": "sha512-LCjfEYp9D3bcOeVUUpEol9Y1ijZYMWVqflSmtw/wX+6Fb7zP4IlO14/6s9v1pxsoM4Pn46+M2zABgKuQjyDpTw==", + "version": "27.4.7", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-27.4.7.tgz", + "integrity": "sha512-zREYhvjjqe1KsGV15mdnxjThKNDgza1fhDT+iUsXWLCq3sxe9w5xnvyctcYVT5PcdLSjv7Y5dCwTS3FCF1tiuw==", "dev": true, "requires": { - "@jest/core": "^27.1.1", - "@jest/test-result": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/core": "^27.4.7", + "@jest/test-result": "^27.4.6", + "@jest/types": "^27.4.2", "chalk": "^4.0.0", "exit": "^0.1.2", "graceful-fs": "^4.2.4", "import-local": "^3.0.2", - "jest-config": "^27.1.1", - "jest-util": "^27.1.1", - "jest-validate": "^27.1.1", + "jest-config": "^27.4.7", + "jest-util": "^27.4.2", + "jest-validate": "^27.4.6", "prompts": "^2.0.1", - "yargs": "^16.0.3" + "yargs": "^16.2.0" } } } }, "jest-changed-files": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.1.1.tgz", - "integrity": "sha512-5TV9+fYlC2A6hu3qtoyGHprBwCAn0AuGA77bZdUgYvVlRMjHXo063VcWTEAyx6XAZ85DYHqp0+aHKbPlfRDRvA==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-27.4.2.tgz", + "integrity": "sha512-/9x8MjekuzUQoPjDHbBiXbNEBauhrPU2ct7m8TfCg69ywt1y/N+yYwGh3gCpnqUS3klYWDU/lSNgv+JhoD2k1A==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "execa": "^5.0.0", "throat": "^6.0.1" } }, "jest-circus": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.1.1.tgz", - "integrity": "sha512-Xed1ApiMFu/yzqGMBToHr8sp2gkX/ARZf4nXoGrHJrXrTUdVIWiVYheayfcOaPdQvQEE/uyBLgW7I7YBLIrAXQ==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-27.4.6.tgz", + "integrity": "sha512-UA7AI5HZrW4wRM72Ro80uRR2Fg+7nR0GESbSI/2M+ambbzVuA63mn5T1p3Z/wlhntzGpIG1xx78GP2YIkf6PhQ==", "dev": true, "requires": { - "@jest/environment": "^27.1.1", - "@jest/test-result": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/environment": "^27.4.6", + "@jest/test-result": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", "chalk": "^4.0.0", "co": "^4.6.0", "dedent": "^0.7.0", - "expect": "^27.1.1", + "expect": "^27.4.6", "is-generator-fn": "^2.0.0", - "jest-each": "^27.1.1", - "jest-matcher-utils": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-runtime": "^27.1.1", - "jest-snapshot": "^27.1.1", - "jest-util": "^27.1.1", - "pretty-format": "^27.1.1", + "jest-each": "^27.4.6", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-runtime": "^27.4.6", + "jest-snapshot": "^27.4.6", + "jest-util": "^27.4.2", + "pretty-format": "^27.4.6", "slash": "^3.0.0", "stack-utils": "^2.0.3", "throat": "^6.0.1" } }, "jest-config": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.1.1.tgz", - "integrity": "sha512-2iSd5zoJV4MsWPcLCGwUVUY/j6pZXm4Qd3rnbCtrd9EHNTg458iHw8PZztPQXfxKBKJxLfBk7tbZqYF8MGtxJA==", + "version": "27.4.7", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-27.4.7.tgz", + "integrity": "sha512-xz/o/KJJEedHMrIY9v2ParIoYSrSVY6IVeE4z5Z3i101GoA5XgfbJz+1C8EYPsv7u7f39dS8F9v46BHDhn0vlw==", "dev": true, "requires": { - "@babel/core": "^7.1.0", - "@jest/test-sequencer": "^27.1.1", - "@jest/types": "^27.1.1", - "babel-jest": "^27.1.1", + "@babel/core": "^7.8.0", + "@jest/test-sequencer": "^27.4.6", + "@jest/types": "^27.4.2", + "babel-jest": "^27.4.6", "chalk": "^4.0.0", + "ci-info": "^3.2.0", "deepmerge": "^4.2.2", "glob": "^7.1.1", "graceful-fs": "^4.2.4", - "is-ci": "^3.0.0", - "jest-circus": "^27.1.1", - "jest-environment-jsdom": "^27.1.1", - "jest-environment-node": "^27.1.1", - "jest-get-type": "^27.0.6", - "jest-jasmine2": "^27.1.1", - "jest-regex-util": "^27.0.6", - "jest-resolve": "^27.1.1", - "jest-runner": "^27.1.1", - "jest-util": "^27.1.1", - "jest-validate": "^27.1.1", + "jest-circus": "^27.4.6", + "jest-environment-jsdom": "^27.4.6", + "jest-environment-node": "^27.4.6", + "jest-get-type": "^27.4.0", + "jest-jasmine2": "^27.4.6", + "jest-regex-util": "^27.4.0", + "jest-resolve": "^27.4.6", + "jest-runner": "^27.4.6", + "jest-util": "^27.4.2", + "jest-validate": "^27.4.6", "micromatch": "^4.0.4", - "pretty-format": "^27.1.1" + "pretty-format": "^27.4.6", + "slash": "^3.0.0" } }, "jest-diff": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.1.1.tgz", - "integrity": "sha512-m/6n5158rqEriTazqHtBpOa2B/gGgXJijX6nsEgZfbJ/3pxQcdpVXBe+FP39b1dxWHyLVVmuVXddmAwtqFO4Lg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-27.4.6.tgz", + "integrity": "sha512-zjaB0sh0Lb13VyPsd92V7HkqF6yKRH9vm33rwBt7rPYrpQvS1nCvlIy2pICbKta+ZjWngYLNn4cCK4nyZkjS/w==", "dev": true, "requires": { "chalk": "^4.0.0", - "diff-sequences": "^27.0.6", - "jest-get-type": "^27.0.6", - "pretty-format": "^27.1.1" + "diff-sequences": "^27.4.0", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" } }, "jest-docblock": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.0.6.tgz", - "integrity": "sha512-Fid6dPcjwepTFraz0YxIMCi7dejjJ/KL9FBjPYhBp4Sv1Y9PdhImlKZqYU555BlN4TQKaTc+F2Av1z+anVyGkA==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-27.4.0.tgz", + "integrity": "sha512-7TBazUdCKGV7svZ+gh7C8esAnweJoG+SvcF6Cjqj4l17zA2q1cMwx2JObSioubk317H+cjcHgP+7fTs60paulg==", "dev": true, "requires": { "detect-newline": "^3.0.0" } }, "jest-each": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.1.1.tgz", - "integrity": "sha512-r6hOsTLavUBb1xN0uDa89jdDeBmJ+K49fWpbyxeGRA2pLY46PlC4z551/cWNQzrj+IUa5/gSRsCIV/01HdNPug==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-27.4.6.tgz", + "integrity": "sha512-n6QDq8y2Hsmn22tRkgAk+z6MCX7MeVlAzxmZDshfS2jLcaBlyhpF3tZSJLR+kXmh23GEvS0ojMR8i6ZeRvpQcA==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "chalk": "^4.0.0", - "jest-get-type": "^27.0.6", - "jest-util": "^27.1.1", - "pretty-format": "^27.1.1" + "jest-get-type": "^27.4.0", + "jest-util": "^27.4.2", + "pretty-format": "^27.4.6" } }, "jest-environment-jsdom": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.1.1.tgz", - "integrity": "sha512-6vOnoZ6IaExuw7FvnuJhA1qFYv1DDSnN0sQowzolNwxQp7bG1YhLxj2YU1sVXAYA3IR3MbH2mbnJUsLUWfyfzw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-environment-jsdom/-/jest-environment-jsdom-27.4.6.tgz", + "integrity": "sha512-o3dx5p/kHPbUlRvSNjypEcEtgs6LmvESMzgRFQE6c+Prwl2JLA4RZ7qAnxc5VM8kutsGRTB15jXeeSbJsKN9iA==", "dev": true, "requires": { - "@jest/environment": "^27.1.1", - "@jest/fake-timers": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/environment": "^27.4.6", + "@jest/fake-timers": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", - "jest-mock": "^27.1.1", - "jest-util": "^27.1.1", + "jest-mock": "^27.4.6", + "jest-util": "^27.4.2", "jsdom": "^16.6.0" } }, "jest-environment-node": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.1.1.tgz", - "integrity": "sha512-OEGeZh0PwzngNIYWYgWrvTcLygopV8OJbC9HNb0j70VBKgEIsdZkYhwcFnaURX83OHACMqf1pa9Tv5Pw5jemrg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-27.4.6.tgz", + "integrity": "sha512-yfHlZ9m+kzTKZV0hVfhVu6GuDxKAYeFHrfulmy7Jxwsq4V7+ZK7f+c0XP/tbVDMQW7E4neG2u147hFkuVz0MlQ==", "dev": true, "requires": { - "@jest/environment": "^27.1.1", - "@jest/fake-timers": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/environment": "^27.4.6", + "@jest/fake-timers": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", - "jest-mock": "^27.1.1", - "jest-util": "^27.1.1" + "jest-mock": "^27.4.6", + "jest-util": "^27.4.2" } }, "jest-get-type": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.0.6.tgz", - "integrity": "sha512-XTkK5exIeUbbveehcSR8w0bhH+c0yloW/Wpl+9vZrjzztCPWrxhHwkIFpZzCt71oRBsgxmuUfxEqOYoZI2macg==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-27.4.0.tgz", + "integrity": "sha512-tk9o+ld5TWq41DkK14L4wox4s2D9MtTpKaAVzXfr5CUKm5ZK2ExcaFE0qls2W71zE/6R2TxxrK9w2r6svAFDBQ==", "dev": true }, "jest-haste-map": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.1.1.tgz", - "integrity": "sha512-NGLYVAdh5C8Ezg5QBFzrNeYsfxptDBPlhvZNaicLiZX77F/rS27a9M6u9ripWAaaD54xnWdZNZpEkdjD5Eo5aQ==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-27.4.6.tgz", + "integrity": "sha512-0tNpgxg7BKurZeFkIOvGCkbmOHbLFf4LUQOxrQSMjvrQaQe3l6E8x6jYC1NuWkGo5WDdbr8FEzUxV2+LWNawKQ==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "@types/graceful-fs": "^4.1.2", "@types/node": "*", "anymatch": "^3.0.3", "fb-watchman": "^2.0.0", "fsevents": "^2.3.2", "graceful-fs": "^4.2.4", - "jest-regex-util": "^27.0.6", - "jest-serializer": "^27.0.6", - "jest-util": "^27.1.1", - "jest-worker": "^27.1.1", + "jest-regex-util": "^27.4.0", + "jest-serializer": "^27.4.0", + "jest-util": "^27.4.2", + "jest-worker": "^27.4.6", "micromatch": "^4.0.4", "walker": "^1.0.7" } }, "jest-jasmine2": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.1.1.tgz", - "integrity": "sha512-0LAzUmcmvQwjIdJt0cXUVX4G5qjVXE8ELt6nbMNDzv2yAs2hYCCUtQq+Eje70GwAysWCGcS64QeYj5VPHYVxPg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-jasmine2/-/jest-jasmine2-27.4.6.tgz", + "integrity": "sha512-uAGNXF644I/whzhsf7/qf74gqy9OuhvJ0XYp8SDecX2ooGeaPnmJMjXjKt0mqh1Rl5dtRGxJgNrHlBQIBfS5Nw==", "dev": true, "requires": { - "@babel/traverse": "^7.1.0", - "@jest/environment": "^27.1.1", - "@jest/source-map": "^27.0.6", - "@jest/test-result": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/environment": "^27.4.6", + "@jest/source-map": "^27.4.0", + "@jest/test-result": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", "chalk": "^4.0.0", "co": "^4.6.0", - "expect": "^27.1.1", + "expect": "^27.4.6", "is-generator-fn": "^2.0.0", - "jest-each": "^27.1.1", - "jest-matcher-utils": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-runtime": "^27.1.1", - "jest-snapshot": "^27.1.1", - "jest-util": "^27.1.1", - "pretty-format": "^27.1.1", + "jest-each": "^27.4.6", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-runtime": "^27.4.6", + "jest-snapshot": "^27.4.6", + "jest-util": "^27.4.2", + "pretty-format": "^27.4.6", "throat": "^6.0.1" } }, "jest-junit": { - "version": "12.2.0", - "resolved": "https://registry.npmjs.org/jest-junit/-/jest-junit-12.2.0.tgz", - "integrity": "sha512-ecGzF3KEQwLbMP5xMO7wqmgmyZlY/5yWDvgE/vFa+/uIT0KsU5nluf0D2fjIlOKB+tb6DiuSSpZuGpsmwbf7Fw==", + "version": "13.0.0", + "resolved": "https://registry.npmjs.org/jest-junit/-/jest-junit-13.0.0.tgz", + "integrity": "sha512-JSHR+Dhb32FGJaiKkqsB7AR3OqWKtldLd6ZH2+FJ8D4tsweb8Id8zEVReU4+OlrRO1ZluqJLQEETm+Q6/KilBg==", "dev": true, "requires": { "mkdirp": "^1.0.4", - "strip-ansi": "^5.2.0", + "strip-ansi": "^6.0.1", "uuid": "^8.3.2", "xml": "^1.0.1" - }, - "dependencies": { - "ansi-regex": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", - "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==", - "dev": true - }, - "strip-ansi": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", - "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", - "dev": true, - "requires": { - "ansi-regex": "^4.1.0" - } - } } }, "jest-leak-detector": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.1.1.tgz", - "integrity": "sha512-gwSgzmqShoeEsEVpgObymQPrM9P6557jt1EsFW5aCeJ46Cme0EdjYU7xr6llQZ5GpWDl56eOstUaPXiZOfiTKw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-27.4.6.tgz", + "integrity": "sha512-kkaGixDf9R7CjHm2pOzfTxZTQQQ2gHTIWKY/JZSiYTc90bZp8kSZnUMS3uLAfwTZwc0tcMRoEX74e14LG1WapA==", "dev": true, "requires": { - "jest-get-type": "^27.0.6", - "pretty-format": "^27.1.1" + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" } }, "jest-matcher-utils": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.1.1.tgz", - "integrity": "sha512-Q1a10w9Y4sh0wegkdP6reQOa/Dtz7nAvDqBgrat1ItZAUvk4jzXAqyhXPu/ZuEtDaXaNKpdRPRQA8bvkOh2Eaw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-27.4.6.tgz", + "integrity": "sha512-XD4PKT3Wn1LQnRAq7ZsTI0VRuEc9OrCPFiO1XL7bftTGmfNF0DcEwMHRgqiu7NGf8ZoZDREpGrCniDkjt79WbA==", "dev": true, "requires": { "chalk": "^4.0.0", - "jest-diff": "^27.1.1", - "jest-get-type": "^27.0.6", - "pretty-format": "^27.1.1" + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "pretty-format": "^27.4.6" } }, "jest-message-util": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.1.1.tgz", - "integrity": "sha512-b697BOJV93+AVGvzLRtVZ0cTVRbd59OaWnbB2D75GRaIMc4I+Z9W0wHxbfjW01JWO+TqqW4yevT0aN7Fd0XWng==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-27.4.6.tgz", + "integrity": "sha512-0p5szriFU0U74czRSFjH6RyS7UYIAkn/ntwMuOwTGWrQIOh5NzXXrq72LOqIkJKKvFbPq+byZKuBz78fjBERBA==", "dev": true, "requires": { "@babel/code-frame": "^7.12.13", - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "@types/stack-utils": "^2.0.0", "chalk": "^4.0.0", "graceful-fs": "^4.2.4", "micromatch": "^4.0.4", - "pretty-format": "^27.1.1", + "pretty-format": "^27.4.6", "slash": "^3.0.0", "stack-utils": "^2.0.3" } }, "jest-mock": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.1.1.tgz", - "integrity": "sha512-SClsFKuYBf+6SSi8jtAYOuPw8DDMsTElUWEae3zq7vDhH01ayVSIHUSIa8UgbDOUalCFp6gNsaikN0rbxN4dbw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-27.4.6.tgz", + "integrity": "sha512-kvojdYRkst8iVSZ1EJ+vc1RRD9llueBjKzXzeCytH3dMM7zvPV/ULcfI2nr0v0VUgm3Bjt3hBCQvOeaBz+ZTHw==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "@types/node": "*" } }, @@ -2089,109 +2046,104 @@ "dev": true }, "jest-regex-util": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.0.6.tgz", - "integrity": "sha512-SUhPzBsGa1IKm8hx2F4NfTGGp+r7BXJ4CulsZ1k2kI+mGLG+lxGrs76veN2LF/aUdGosJBzKgXmNCw+BzFqBDQ==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-27.4.0.tgz", + "integrity": "sha512-WeCpMpNnqJYMQoOjm1nTtsgbR4XHAk1u00qDoNBQoykM280+/TmgA5Qh5giC1ecy6a5d4hbSsHzpBtu5yvlbEg==", "dev": true }, "jest-resolve": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.1.1.tgz", - "integrity": "sha512-M41YFmWhvDVstwe7XuV21zynOiBLJB5Sk0GrIsYYgTkjfEWNLVXDjAyq1W7PHseaYNOxIc0nOGq/r5iwcZNC1A==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-27.4.6.tgz", + "integrity": "sha512-SFfITVApqtirbITKFAO7jOVN45UgFzcRdQanOFzjnbd+CACDoyeX7206JyU92l4cRr73+Qy/TlW51+4vHGt+zw==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "chalk": "^4.0.0", - "escalade": "^3.1.1", "graceful-fs": "^4.2.4", - "jest-haste-map": "^27.1.1", + "jest-haste-map": "^27.4.6", "jest-pnp-resolver": "^1.2.2", - "jest-util": "^27.1.1", - "jest-validate": "^27.1.1", + "jest-util": "^27.4.2", + "jest-validate": "^27.4.6", "resolve": "^1.20.0", + "resolve.exports": "^1.1.0", "slash": "^3.0.0" } }, "jest-resolve-dependencies": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.1.1.tgz", - "integrity": "sha512-sYZR+uBjFDCo4VhYeazZf/T+ryYItvdLKu9vHatqkUqHGjDMrdEPOykiqC2iEpaCFTS+3iL/21CYiJuKdRbniw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-27.4.6.tgz", + "integrity": "sha512-W85uJZcFXEVZ7+MZqIPCscdjuctruNGXUZ3OHSXOfXR9ITgbUKeHj+uGcies+0SsvI5GtUfTw4dY7u9qjTvQOw==", "dev": true, "requires": { - "@jest/types": "^27.1.1", - "jest-regex-util": "^27.0.6", - "jest-snapshot": "^27.1.1" + "@jest/types": "^27.4.2", + "jest-regex-util": "^27.4.0", + "jest-snapshot": "^27.4.6" } }, "jest-runner": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.1.1.tgz", - "integrity": "sha512-lP3MBNQhg75/sQtVkC8dsAQZumvy3lHK/YIwYPfEyqGIX1qEcnYIRxP89q0ZgC5ngvi1vN2P5UFHszQxguWdng==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-27.4.6.tgz", + "integrity": "sha512-IDeFt2SG4DzqalYBZRgbbPmpwV3X0DcntjezPBERvnhwKGWTW7C5pbbA5lVkmvgteeNfdd/23gwqv3aiilpYPg==", "dev": true, "requires": { - "@jest/console": "^27.1.1", - "@jest/environment": "^27.1.1", - "@jest/test-result": "^27.1.1", - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/console": "^27.4.6", + "@jest/environment": "^27.4.6", + "@jest/test-result": "^27.4.6", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", "chalk": "^4.0.0", "emittery": "^0.8.1", "exit": "^0.1.2", "graceful-fs": "^4.2.4", - "jest-docblock": "^27.0.6", - "jest-environment-jsdom": "^27.1.1", - "jest-environment-node": "^27.1.1", - "jest-haste-map": "^27.1.1", - "jest-leak-detector": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-resolve": "^27.1.1", - "jest-runtime": "^27.1.1", - "jest-util": "^27.1.1", - "jest-worker": "^27.1.1", + "jest-docblock": "^27.4.0", + "jest-environment-jsdom": "^27.4.6", + "jest-environment-node": "^27.4.6", + "jest-haste-map": "^27.4.6", + "jest-leak-detector": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-resolve": "^27.4.6", + "jest-runtime": "^27.4.6", + "jest-util": "^27.4.2", + "jest-worker": "^27.4.6", "source-map-support": "^0.5.6", "throat": "^6.0.1" } }, "jest-runtime": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.1.1.tgz", - "integrity": "sha512-FEwy+tSzmsvuKaQpyYsUyk31KG5vMmA2r2BSTHgv0yNfcooQdm2Ke91LM9Ud8D3xz8CLDHJWAI24haMFTwrsPg==", - "dev": true, - "requires": { - "@jest/console": "^27.1.1", - "@jest/environment": "^27.1.1", - "@jest/fake-timers": "^27.1.1", - "@jest/globals": "^27.1.1", - "@jest/source-map": "^27.0.6", - "@jest/test-result": "^27.1.1", - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", - "@types/yargs": "^16.0.0", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-27.4.6.tgz", + "integrity": "sha512-eXYeoR/MbIpVDrjqy5d6cGCFOYBFFDeKaNWqTp0h6E74dK0zLHzASQXJpl5a2/40euBmKnprNLJ0Kh0LCndnWQ==", + "dev": true, + "requires": { + "@jest/environment": "^27.4.6", + "@jest/fake-timers": "^27.4.6", + "@jest/globals": "^27.4.6", + "@jest/source-map": "^27.4.0", + "@jest/test-result": "^27.4.6", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", "chalk": "^4.0.0", "cjs-module-lexer": "^1.0.0", "collect-v8-coverage": "^1.0.0", "execa": "^5.0.0", - "exit": "^0.1.2", "glob": "^7.1.3", "graceful-fs": "^4.2.4", - "jest-haste-map": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-mock": "^27.1.1", - "jest-regex-util": "^27.0.6", - "jest-resolve": "^27.1.1", - "jest-snapshot": "^27.1.1", - "jest-util": "^27.1.1", - "jest-validate": "^27.1.1", + "jest-haste-map": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-mock": "^27.4.6", + "jest-regex-util": "^27.4.0", + "jest-resolve": "^27.4.6", + "jest-snapshot": "^27.4.6", + "jest-util": "^27.4.2", "slash": "^3.0.0", - "strip-bom": "^4.0.0", - "yargs": "^16.0.3" + "strip-bom": "^4.0.0" } }, "jest-serializer": { - "version": "27.0.6", - "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.0.6.tgz", - "integrity": "sha512-PtGdVK9EGC7dsaziskfqaAPib6wTViY3G8E5wz9tLVPhHyiDNTZn/xjZ4khAw+09QkoOVpn7vF5nPSN6dtBexA==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jest-serializer/-/jest-serializer-27.4.0.tgz", + "integrity": "sha512-RDhpcn5f1JYTX2pvJAGDcnsNTnsV9bjYPU8xcV+xPwOXnUPOQwf4ZEuiU6G9H1UztH+OapMgu/ckEVwO87PwnQ==", "dev": true, "requires": { "@types/node": "*", @@ -2199,34 +2151,32 @@ } }, "jest-snapshot": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.1.1.tgz", - "integrity": "sha512-Wi3QGiuRFo3lU+EbQmZnBOks0CJyAMPHvYoG7iJk00Do10jeOyuOEO0Jfoaoun8+8TDv+Nzl7Aswir/IK9+1jg==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-27.4.6.tgz", + "integrity": "sha512-fafUCDLQfzuNP9IRcEqaFAMzEe7u5BF7mude51wyWv7VRex60WznZIC7DfKTgSIlJa8aFzYmXclmN328aqSDmQ==", "dev": true, "requires": { "@babel/core": "^7.7.2", "@babel/generator": "^7.7.2", - "@babel/parser": "^7.7.2", "@babel/plugin-syntax-typescript": "^7.7.2", "@babel/traverse": "^7.7.2", "@babel/types": "^7.0.0", - "@jest/transform": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/transform": "^27.4.6", + "@jest/types": "^27.4.2", "@types/babel__traverse": "^7.0.4", "@types/prettier": "^2.1.5", "babel-preset-current-node-syntax": "^1.0.0", "chalk": "^4.0.0", - "expect": "^27.1.1", + "expect": "^27.4.6", "graceful-fs": "^4.2.4", - "jest-diff": "^27.1.1", - "jest-get-type": "^27.0.6", - "jest-haste-map": "^27.1.1", - "jest-matcher-utils": "^27.1.1", - "jest-message-util": "^27.1.1", - "jest-resolve": "^27.1.1", - "jest-util": "^27.1.1", + "jest-diff": "^27.4.6", + "jest-get-type": "^27.4.0", + "jest-haste-map": "^27.4.6", + "jest-matcher-utils": "^27.4.6", + "jest-message-util": "^27.4.6", + "jest-util": "^27.4.2", "natural-compare": "^1.4.0", - "pretty-format": "^27.1.1", + "pretty-format": "^27.4.6", "semver": "^7.3.2" }, "dependencies": { @@ -2242,60 +2192,60 @@ } }, "jest-util": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.1.1.tgz", - "integrity": "sha512-zf9nEbrASWn2mC/L91nNb0K+GkhFvi4MP6XJG2HqnHzHvLYcs7ou/In68xYU1i1dSkJlrWcYfWXQE8nVR+nbOA==", + "version": "27.4.2", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-27.4.2.tgz", + "integrity": "sha512-YuxxpXU6nlMan9qyLuxHaMMOzXAl5aGZWCSzben5DhLHemYQxCc4YK+4L3ZrCutT8GPQ+ui9k5D8rUJoDioMnA==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "@types/node": "*", "chalk": "^4.0.0", + "ci-info": "^3.2.0", "graceful-fs": "^4.2.4", - "is-ci": "^3.0.0", "picomatch": "^2.2.3" } }, "jest-validate": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.1.1.tgz", - "integrity": "sha512-N5Er5FKav/8m2dJwn7BGnZwnoD1BSc8jx5T+diG2OvyeugvZDhPeAt5DrNaGkkaKCrSUvuE7A5E4uHyT7Vj0Mw==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-27.4.6.tgz", + "integrity": "sha512-872mEmCPVlBqbA5dToC57vA3yJaMRfIdpCoD3cyHWJOMx+SJwLNw0I71EkWs41oza/Er9Zno9XuTkRYCPDUJXQ==", "dev": true, "requires": { - "@jest/types": "^27.1.1", + "@jest/types": "^27.4.2", "camelcase": "^6.2.0", "chalk": "^4.0.0", - "jest-get-type": "^27.0.6", + "jest-get-type": "^27.4.0", "leven": "^3.1.0", - "pretty-format": "^27.1.1" + "pretty-format": "^27.4.6" }, "dependencies": { "camelcase": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", - "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "dev": true } } }, "jest-watcher": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.1.1.tgz", - "integrity": "sha512-XQzyHbxziDe+lZM6Dzs40fEt4q9akOGwitJnxQasJ9WG0bv3JGiRlsBgjw13znGapeMtFaEsyhL0Cl04IbaoWQ==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-27.4.6.tgz", + "integrity": "sha512-yKQ20OMBiCDigbD0quhQKLkBO+ObGN79MO4nT7YaCuQ5SM+dkBNWE8cZX0FjU6czwMvWw6StWbe+Wv4jJPJ+fw==", "dev": true, "requires": { - "@jest/test-result": "^27.1.1", - "@jest/types": "^27.1.1", + "@jest/test-result": "^27.4.6", + "@jest/types": "^27.4.2", "@types/node": "*", "ansi-escapes": "^4.2.1", "chalk": "^4.0.0", - "jest-util": "^27.1.1", + "jest-util": "^27.4.2", "string-length": "^4.0.1" } }, "jest-worker": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.1.1.tgz", - "integrity": "sha512-XJKCL7tu+362IUYTWvw8+3S75U7qMiYiRU6u5yqscB48bTvzwN6i8L/7wVTXiFLwkRsxARNM7TISnTvcgv9hxA==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.6.tgz", + "integrity": "sha512-gHWJF/6Xi5CTG5QCvROr6GcmpIqNYpDJyc8A1h/DyXqH1tD6SnRCM0d3U5msV31D2LB/U+E0M+W4oyvKV44oNw==", "dev": true, "requires": { "@types/node": "*", @@ -2426,6 +2376,12 @@ "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", "integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=" }, + "lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha1-vMbEmkKihA7Zl/Mj6tpezRguC/4=", + "dev": true + }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -2451,12 +2407,12 @@ "dev": true }, "makeerror": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.11.tgz", - "integrity": "sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw=", + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", "dev": true, "requires": { - "tmpl": "1.0.x" + "tmpl": "1.0.5" } }, "merge-stream": { @@ -2476,18 +2432,18 @@ } }, "mime-db": { - "version": "1.49.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "dev": true }, "mime-types": { - "version": "2.1.32", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", - "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dev": true, "requires": { - "mime-db": "1.49.0" + "mime-db": "1.51.0" } }, "mimic-fn": { @@ -2534,16 +2490,10 @@ "integrity": "sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs=", "dev": true }, - "node-modules-regexp": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz", - "integrity": "sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA=", - "dev": true - }, "node-releases": { - "version": "1.1.75", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", - "integrity": "sha512-Qe5OUajvqrqDSy6wrWFmMwfJ0jVgwiw4T3KqmbTcZ62qW0gQkheXYhcFM1+lOVcGUoRxcEcfyvFMAnDgaF1VWw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", "dev": true }, "normalize-path": { @@ -2597,12 +2547,6 @@ "word-wrap": "~1.2.3" } }, - "p-each-series": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/p-each-series/-/p-each-series-2.2.0.tgz", - "integrity": "sha512-ycIL2+1V32th+8scbpTvyHNaHe02z0sjgh91XXjAk+ZeXoPN4Z46DVUnzdso0aX4KckKw0FNNFHdjZ2UsZvxiA==", - "dev": true - }, "p-limit": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", @@ -2656,19 +2600,22 @@ "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", "dev": true }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==" + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==" }, "pirates": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.1.tgz", - "integrity": "sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA==", - "dev": true, - "requires": { - "node-modules-regexp": "^1.0.0" - } + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.5.tgz", + "integrity": "sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ==", + "dev": true }, "pkg-dir": { "version": "4.2.0", @@ -2686,13 +2633,12 @@ "dev": true }, "pretty-format": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.1.1.tgz", - "integrity": "sha512-zdBi/xlstKJL42UH7goQti5Hip/B415w1Mfj+WWWYMBylAYtKESnXGUtVVcMVid9ReVjypCotUV6CEevYPHv2g==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-27.4.6.tgz", + "integrity": "sha512-NblstegA1y/RJW2VyML+3LlpFjzx62cUrtBIKIWDXEDkjNeleA7Od7nrzcs/VLQvAeV4CgSYhrN39DRN88Qi/g==", "dev": true, "requires": { - "@jest/types": "^27.1.1", - "ansi-regex": "^5.0.0", + "ansi-regex": "^5.0.1", "ansi-styles": "^5.0.0", "react-is": "^17.0.1" }, @@ -2706,9 +2652,9 @@ } }, "prompts": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.1.tgz", - "integrity": "sha512-EQyfIuO2hPDsX1L/blblV+H7I0knhgAd82cVneCwcdND9B8AuCDuRcBH6yIcG4dFzlOUqbazQqwGjx5xmsNLuQ==", + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", "dev": true, "requires": { "kleur": "^3.0.3", @@ -2757,13 +2703,14 @@ "dev": true }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -2781,6 +2728,12 @@ "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", "dev": true }, + "resolve.exports": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-1.1.0.tgz", + "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", + "dev": true + }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", @@ -2832,9 +2785,9 @@ "dev": true }, "shelljs": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", - "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dev": true, "requires": { "glob": "^7.0.0", @@ -2843,19 +2796,19 @@ } }, "shx": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.3.tgz", - "integrity": "sha512-nZJ3HFWVoTSyyB+evEKjJ1STiixGztlqwKLTUNV5KqMWtGey9fTd4KU1gdZ1X9BV6215pswQ/Jew9NsuS/fNDA==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", "dev": true, "requires": { "minimist": "^1.2.3", - "shelljs": "^0.8.4" + "shelljs": "^0.8.5" } }, "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, "sisteransi": { @@ -2876,14 +2829,20 @@ "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "source-map-support": { - "version": "0.5.20", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.20.tgz", - "integrity": "sha512-n1lZZ8Ve4ksRqizaBQgxXDgKwttHDhyfQjA6YZZn8+AroHbsIz+JjwxQDxbp+7y5OYCI8t1Yk7etjD9CRd2hIw==", + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, + "sourcemap-codec": { + "version": "1.4.8", + "resolved": "https://registry.npmjs.org/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz", + "integrity": "sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==", + "dev": true + }, "sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", @@ -2891,9 +2850,9 @@ "dev": true }, "stack-utils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.3.tgz", - "integrity": "sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.5.tgz", + "integrity": "sha512-xrQcmYhOsn/1kX+Vraq+7j4oE2j/6BFscZ0etmYg81xuM8Gq0022Pxb8+IqgOFUIaxHs0KaSb7T1+OegiNrNFA==", "dev": true, "requires": { "escape-string-regexp": "^2.0.0" @@ -2918,23 +2877,23 @@ } }, "string-width": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", - "integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dev": true, "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.0" + "strip-ansi": "^6.0.1" } }, "strip-ansi": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", - "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dev": true, "requires": { - "ansi-regex": "^5.0.0" + "ansi-regex": "^5.0.1" } }, "strip-bom": { @@ -2967,6 +2926,12 @@ "supports-color": "^7.0.0" } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "symbol-tree": { "version": "3.2.4", "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz", @@ -3049,16 +3014,16 @@ } }, "ts-jest": { - "version": "27.0.5", - "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-27.0.5.tgz", - "integrity": "sha512-lIJApzfTaSSbtlksfFNHkWOzLJuuSm4faFAfo5kvzOiRAuoN4/eKxVJ2zEAho8aecE04qX6K1pAzfH5QHL1/8w==", + "version": "27.1.3", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-27.1.3.tgz", + "integrity": "sha512-6Nlura7s6uM9BVUAoqLH7JHyMXjz8gluryjpPXxr3IxZdAXnU6FhjvVLHFtfd1vsE1p8zD1OJfskkc0jhTSnkA==", "dev": true, "requires": { "bs-logger": "0.x", "fast-json-stable-stringify": "2.x", "jest-util": "^27.0.0", "json5": "2.x", - "lodash": "4.x", + "lodash.memoize": "4.x", "make-error": "1.x", "semver": "7.x", "yargs-parser": "20.x" @@ -3111,9 +3076,9 @@ } }, "typescript": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz", - "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.4.tgz", + "integrity": "sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==", "dev": true }, "typescript-char": { @@ -3139,9 +3104,9 @@ "dev": true }, "v8-to-istanbul": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.0.0.tgz", - "integrity": "sha512-LkmXi8UUNxnCC+JlH7/fsfsKr5AU110l+SYGJimWNkWhxbN5EyeOtm1MJ0hhvqMMOhGwBj1Fp70Yv9i+hX0QAg==", + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-8.1.1.tgz", + "integrity": "sha512-FGtKtv3xIpR6BYhvgH8MI/y78oT7d8Au3ww4QIxymrCtZEh5b8gCw2siywE+puhEmuWKDtmfrvF5UlB298ut3w==", "dev": true, "requires": { "@types/istanbul-lib-coverage": "^2.0.1", @@ -3180,9 +3145,9 @@ } }, "vscode-languageserver-textdocument": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.1.tgz", - "integrity": "sha512-UIcJDjX7IFkck7cSkNNyzIz5FyvpQfY7sdzVy+wkKN/BLaD4DQ0ppXQrKePomCxTS7RrolK1I0pey0bG9eh8dA==" + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/vscode-languageserver-textdocument/-/vscode-languageserver-textdocument-1.0.4.tgz", + "integrity": "sha512-/xhqXP/2A2RSs+J8JNXpiiNVvvNM0oTosNVmQnunlKvq9o4mupHOBAnnzH0lwIPKazXKvAKsVp1kr+H/K4lgoQ==" }, "vscode-languageserver-types": { "version": "3.16.0", @@ -3190,9 +3155,9 @@ "integrity": "sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==" }, "vscode-uri": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.2.tgz", - "integrity": "sha512-jkjy6pjU1fxUvI51P+gCsxg1u2n8LSt0W6KrCNQceaziKzff74GoWmjVG46KieVzybO1sttPQmYfrwSHey7GUA==" + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/vscode-uri/-/vscode-uri-3.0.3.tgz", + "integrity": "sha512-EcswR2S8bpR7fD0YPeS7r2xXExrScVMxg4MedACaWHEtx9ftCF/qHG1xGkolzTPcEmjTavCQgbVzHUIdTMzFGA==" }, "w3c-hr-time": { "version": "1.0.2", @@ -3213,12 +3178,12 @@ } }, "walker": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.7.tgz", - "integrity": "sha1-L3+bj9ENZ3JisYqITijRlhjgKPs=", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", "dev": true, "requires": { - "makeerror": "1.0.x" + "makeerror": "1.0.12" } }, "webidl-conversions": { @@ -3297,9 +3262,9 @@ } }, "ws": { - "version": "7.5.5", - "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.5.tgz", - "integrity": "sha512-BAkMFcAzl8as1G/hArkxOxq3G7pjUqQ3gzYbLL0/5zNkph70e+lCoxBGnm6AW1+/aiNeV4fnKqZ8m4GZewmH2w==", + "version": "7.5.6", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.6.tgz", + "integrity": "sha512-6GLgCqo2cy2A2rjCNFlxQS6ZljG/coZfZXclldI8FB/1G3CCI36Zd8xy2HrFVACi8tfk5XrgLQEk+P0Tnz9UcA==", "dev": true }, "xml": { diff --git a/packages/pyright-internal/package.json b/packages/pyright-internal/package.json index e8388d0c0f02..1e627cffba33 100644 --- a/packages/pyright-internal/package.json +++ b/packages/pyright-internal/package.json @@ -2,7 +2,7 @@ "name": "pyright-internal", "displayName": "pyright", "description": "Type checker for the Python language", - "version": "1.1.170", + "version": "1.1.225", "license": "MIT", "private": true, "files": [ @@ -16,31 +16,31 @@ }, "dependencies": { "@iarna/toml": "2.2.5", - "@yarnpkg/fslib": "2.5.2", + "@yarnpkg/fslib": "2.6.0", "@yarnpkg/libzip": "2.2.2", "chalk": "^4.1.2", - "chokidar": "^3.5.2", - "command-line-args": "^5.2.0", + "chokidar": "^3.5.3", + "command-line-args": "^5.2.1", "jsonc-parser": "^3.0.0", "leven": "^3.1.0", - "source-map-support": "^0.5.20", + "source-map-support": "^0.5.21", "tmp": "^0.2.1", "typescript-char": "^0.0.0", "vscode-jsonrpc": "6.0.0", "vscode-languageserver": "7.0.0", - "vscode-languageserver-textdocument": "^1.0.1", + "vscode-languageserver-textdocument": "^1.0.4", "vscode-languageserver-types": "3.16.0", - "vscode-uri": "^3.0.2" + "vscode-uri": "^3.0.3" }, "devDependencies": { "@types/command-line-args": "^5.2.0", - "@types/jest": "^27.0.1", - "@types/node": "^12.20.24", - "@types/tmp": "^0.2.1", - "jest": "^27.1.1", - "jest-junit": "^12.2.0", - "shx": "^0.3.3", - "ts-jest": "^27.0.5", - "typescript": "~4.4.2" + "@types/jest": "^27.4.0", + "@types/node": "^17.0.14", + "@types/tmp": "^0.2.3", + "jest": "^27.4.7", + "jest-junit": "^13.0.0", + "shx": "^0.3.4", + "ts-jest": "^27.1.3", + "typescript": "~4.4.4" } } diff --git a/packages/pyright-internal/src/analyzer/aliasDeclarationUtils.ts b/packages/pyright-internal/src/analyzer/aliasDeclarationUtils.ts index 161faf895f7c..e4a4efce4a95 100644 --- a/packages/pyright-internal/src/analyzer/aliasDeclarationUtils.ts +++ b/packages/pyright-internal/src/analyzer/aliasDeclarationUtils.ts @@ -11,7 +11,7 @@ import { Declaration, DeclarationType } from './declaration'; import { Symbol } from './symbol'; export interface ResolvedAliasInfo { - declaration: Declaration; + declaration: Declaration | undefined; isPrivate: boolean; privatePyTypedImported?: string; privatePyTypedImporter?: string; @@ -57,7 +57,7 @@ export function resolveAliasDeclaration( } let lookupResult: ImportLookupResult | undefined; - if (curDeclaration.path) { + if (curDeclaration.path && curDeclaration.loadSymbolsFromPath) { lookupResult = importLookup(curDeclaration.path); } @@ -73,6 +73,16 @@ export function resolveAliasDeclaration( allowExternallyHiddenAccess ); } + + // If the symbol comes from a native library, we won't + // be able to resolve its type directly. + if (curDeclaration.isNativeLib) { + return { + declaration: undefined, + isPrivate, + }; + } + return undefined; } @@ -95,9 +105,14 @@ export function resolveAliasDeclaration( } } - // Prefer the last declaration in the list. This ensures that + // Prefer the last unvisited declaration in the list. This ensures that // we use all of the overloads if it's an overloaded function. - curDeclaration = declarations[declarations.length - 1]; + const unvisitedDecls = declarations.filter((decl) => !alreadyVisited.includes(decl)); + if (unvisitedDecls.length > 0) { + curDeclaration = unvisitedDecls[unvisitedDecls.length - 1]; + } else { + curDeclaration = declarations[declarations.length - 1]; + } if (isPrivatePyTypedImport) { privatePyTypedImported = privatePyTypedImported ?? curDeclaration?.moduleName; diff --git a/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts b/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts index 4e4180ca932d..d7010de230ad 100644 --- a/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts +++ b/packages/pyright-internal/src/analyzer/analyzerFileInfo.ts @@ -16,11 +16,16 @@ import { Scope } from './scope'; import { SymbolTable } from './symbol'; // Maps import paths to the symbol table for the imported module. -export type ImportLookup = (filePath: string) => ImportLookupResult | undefined; +export interface AbsoluteModuleDescriptor { + importingFilePath: string; + nameParts: string[]; +} +export type ImportLookup = (filePathOrModule: string | AbsoluteModuleDescriptor) => ImportLookupResult | undefined; export interface ImportLookupResult { symbolTable: SymbolTable; dunderAllNames: string[] | undefined; + usesUnsupportedDunderAllForm: boolean; docString: string | undefined; } @@ -28,14 +33,12 @@ export interface AnalyzerFileInfo { importLookup: ImportLookup; futureImports: Map; builtinsScope?: Scope | undefined; - typingModulePath?: string | undefined; - typeshedModulePath?: string | undefined; - collectionsModulePath?: string | undefined; diagnosticSink: TextRangeDiagnosticSink; executionEnvironment: ExecutionEnvironment; diagnosticRuleSet: DiagnosticRuleSet; fileContents: string; lines: TextRangeCollection; + typingSymbolAliases: Map; filePath: string; moduleName: string; isStubFile: boolean; @@ -43,5 +46,6 @@ export interface AnalyzerFileInfo { isTypingExtensionsStubFile: boolean; isBuiltInStubFile: boolean; isInPyTypedPackage: boolean; + isIPythonMode: boolean; accessedSymbolMap: Map; } diff --git a/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts b/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts index eedfcb889a7b..6a586484b0d7 100644 --- a/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts +++ b/packages/pyright-internal/src/analyzer/analyzerNodeInfo.ts @@ -22,7 +22,7 @@ import { StringNode, } from '../parser/parseNodes'; import { AnalyzerFileInfo } from './analyzerFileInfo'; -import { FlowFlags, FlowNode } from './codeFlow'; +import { FlowFlags, FlowNode } from './codeFlowTypes'; import { Declaration } from './declaration'; import { ImportResult } from './importResult'; import { Scope } from './scope'; @@ -30,6 +30,7 @@ import { Scope } from './scope'; export interface DunderAllInfo { names: string[]; stringNodes: StringNode[]; + usesUnsupportedDunderAllForm: boolean; } interface AnalyzerNodeInfo { @@ -63,6 +64,10 @@ interface AnalyzerNodeInfo { // function or lambda) that requires code flow analysis. codeFlowExpressions?: Set; + // Number that represents the complexity of a function's code + // flow graph. + codeFlowComplexity?: number; + // List of __all__ symbols in the module. dunderAllInfo?: DunderAllInfo | undefined; } @@ -78,6 +83,9 @@ export function cleanNodeAnalysisInfo(node: ParseNode) { delete analyzerNode.flowNode; delete analyzerNode.afterFlowNode; delete analyzerNode.fileInfo; + delete analyzerNode.codeFlowExpressions; + delete analyzerNode.codeFlowComplexity; + delete analyzerNode.dunderAllInfo; } export function getImportInfo(node: ParseNode): ImportResult | undefined { @@ -153,6 +161,16 @@ export function setCodeFlowExpressions(node: ExecutionScopeNode, expressions: Se analyzerNode.codeFlowExpressions = expressions; } +export function getCodeFlowComplexity(node: FunctionNode) { + const analyzerNode = node as AnalyzerNodeInfo; + return analyzerNode.codeFlowComplexity ?? 0; +} + +export function setCodeFlowComplexity(node: FunctionNode, complexity: number) { + const analyzerNode = node as AnalyzerNodeInfo; + analyzerNode.codeFlowComplexity = complexity; +} + export function getDunderAllInfo(node: ModuleNode): DunderAllInfo | undefined { const analyzerNode = node as AnalyzerNodeInfo; return analyzerNode.dunderAllInfo; diff --git a/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts b/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts index 2ca47dd4d191..5d0a60bb3797 100644 --- a/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts +++ b/packages/pyright-internal/src/analyzer/backgroundAnalysisProgram.ts @@ -10,7 +10,7 @@ import { CancellationToken } from 'vscode-languageserver'; import { TextDocumentContentChangeEvent } from 'vscode-languageserver-textdocument'; -import { BackgroundAnalysisBase } from '../backgroundAnalysisBase'; +import { BackgroundAnalysisBase, IndexOptions } from '../backgroundAnalysisBase'; import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; import { ConsoleInterface } from '../common/console'; import { Diagnostic } from '../common/diagnostic'; @@ -21,7 +21,7 @@ import { IndexResults } from '../languageService/documentSymbolProvider'; import { FileSet } from '../tests/harness/vfs/filesystem'; import { AnalysisCompleteCallback, analyzeProgram } from './analysis'; import { ImportResolver } from './importResolver'; -import { Indices, MaxAnalysisTime, Program } from './program'; +import { Indices, MaxAnalysisTime, OpenFileOptions, Program } from './program'; export class BackgroundAnalysisProgram { private _program: Program; @@ -92,9 +92,9 @@ export class BackgroundAnalysisProgram { this._program.setAllowedThirdPartyImports(importNames); } - setFileOpened(filePath: string, version: number | null, contents: string, isTracked: boolean) { - this._backgroundAnalysis?.setFileOpened(filePath, version, [{ text: contents }], isTracked); - this._program.setFileOpened(filePath, version, [{ text: contents }], isTracked); + setFileOpened(filePath: string, version: number | null, contents: string, options: OpenFileOptions) { + this._backgroundAnalysis?.setFileOpened(filePath, version, [{ text: contents }], options); + this._program.setFileOpened(filePath, version, [{ text: contents }], options); } initializeFileSystem(files: Record) { @@ -105,10 +105,10 @@ export class BackgroundAnalysisProgram { path: string, version: number | null, contents: TextDocumentContentChangeEvent[], - isTracked: boolean + options: OpenFileOptions ) { - this._backgroundAnalysis?.setFileOpened(path, version, contents, isTracked); - this._program.setFileOpened(path, version, contents, isTracked); + this._backgroundAnalysis?.setFileOpened(path, version, contents, options); + this._program.setFileOpened(path, version, contents, options); this.markFilesDirty([path], true); } @@ -118,14 +118,14 @@ export class BackgroundAnalysisProgram { this._reportDiagnosticsForRemovedFiles(diagnostics); } - markAllFilesDirty(evenIfContentsAreSame: boolean) { - this._backgroundAnalysis?.markAllFilesDirty(evenIfContentsAreSame); - this._program.markAllFilesDirty(evenIfContentsAreSame); + markAllFilesDirty(evenIfContentsAreSame: boolean, indexingNeeded = true) { + this._backgroundAnalysis?.markAllFilesDirty(evenIfContentsAreSame, indexingNeeded); + this._program.markAllFilesDirty(evenIfContentsAreSame, indexingNeeded); } - markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean) { - this._backgroundAnalysis?.markFilesDirty(filePaths, evenIfContentsAreSame); - this._program.markFilesDirty(filePaths, evenIfContentsAreSame); + markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean, indexingNeeded = true) { + this._backgroundAnalysis?.markFilesDirty(filePaths, evenIfContentsAreSame, indexingNeeded); + this._program.markFilesDirty(filePaths, evenIfContentsAreSame, indexingNeeded); } setCompletionCallback(callback?: AnalysisCompleteCallback) { @@ -165,20 +165,23 @@ export class BackgroundAnalysisProgram { } } - startIndexing() { - if (!this._configOptions.indexing) { - return; - } - - this._backgroundAnalysis?.startIndexing(this._configOptions, this.host.kind, this._getIndices()); + startIndexing(indexOptions: IndexOptions) { + this._backgroundAnalysis?.startIndexing( + indexOptions, + this._configOptions, + this.importResolver, + this.host.kind, + this._getIndices() + ); } refreshIndexing() { - if (!this._configOptions.indexing) { - return; - } - - this._backgroundAnalysis?.refreshIndexing(this._configOptions, this.host.kind, this._indices); + this._backgroundAnalysis?.refreshIndexing( + this._configOptions, + this.importResolver, + this.host.kind, + this._indices + ); } cancelIndexing() { @@ -211,19 +214,19 @@ export class BackgroundAnalysisProgram { return this._program.writeTypeStub(targetImportPath, targetIsSingleFile, stubPath, token); } - invalidateAndForceReanalysis(rebuildLibraryIndexing: boolean) { + invalidateAndForceReanalysis(rebuildUserFileIndexing: boolean, rebuildLibraryIndexing: boolean) { if (rebuildLibraryIndexing) { this.refreshIndexing(); } - this._backgroundAnalysis?.invalidateAndForceReanalysis(); + this._backgroundAnalysis?.invalidateAndForceReanalysis(rebuildUserFileIndexing); // Make sure the import resolver doesn't have invalid // cached entries. this._importResolver.invalidateCache(); // Mark all files with one or more errors dirty. - this._program.markAllFilesDirty(true); + this._program.markAllFilesDirty(true, rebuildUserFileIndexing); } restart() { diff --git a/packages/pyright-internal/src/analyzer/binder.ts b/packages/pyright-internal/src/analyzer/binder.ts index 3916c4a1388b..5dd39d3e1fb1 100644 --- a/packages/pyright-internal/src/analyzer/binder.ts +++ b/packages/pyright-internal/src/analyzer/binder.ts @@ -77,14 +77,13 @@ import { YieldFromNode, YieldNode, } from '../parser/parseNodes'; -import { KeywordType, OperatorType, StringTokenFlags } from '../parser/tokenizerTypes'; +import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; import { AnalyzerFileInfo, ImportLookupResult } from './analyzerFileInfo'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; import { CodeFlowReferenceExpressionNode, createKeyForReference, FlowAssignment, - FlowAssignmentAlias, FlowBranchLabel, FlowCall, FlowCondition, @@ -100,7 +99,7 @@ import { FlowWildcardImport, getUniqueFlowNodeId, isCodeFlowSupportedForReference, -} from './codeFlow'; +} from './codeFlowTypes'; import { AliasDeclaration, ClassDeclaration, @@ -137,6 +136,11 @@ interface FinalInfo { finalTypeNode: ExpressionNode | undefined; } +interface ClassVarInfo { + isClassVar: boolean; + classVarTypeNode: ExpressionNode | undefined; +} + export class Binder extends ParseTreeWalker { private readonly _fileInfo: AnalyzerFileInfo; @@ -183,16 +187,30 @@ export class Binder extends ParseTreeWalker { // Aliases of "sys". private _sysImportAliases: string[] = []; + // Aliases of "dataclasses". + private _dataclassesImportAliases: string[] = []; + // Map of imports of specific symbols imported from "typing" and "typing_extensions" // and the names they alias to. private _typingSymbolAliases: Map = new Map(); + // Map of imports of specific symbols imported from "dataclasses" + // and the names they alias to. + private _dataclassesSymbolAliases: Map = new Map(); + // List of names statically assigned to __all__ symbol. private _dunderAllNames: string[] | undefined; // List of string nodes associated with the "__all__" symbol. private _dunderAllStringNodes: StringNode[] = []; + // One or more statements are manipulating __all__ in a manner that a + // static analyzer doesn't understand. + private _usesUnsupportedDunderAllForm = false; + + // Are we currently binding code located within an except block? + private _isInExceptSuite = false; + // Flow node that is used for unreachable code. private static _unreachableFlowNode: FlowNode = { flags: FlowFlags.Unreachable, @@ -207,6 +225,10 @@ export class Binder extends ParseTreeWalker { // on whether they are listed in the __all__ list. private _potentialPrivateSymbols = new Map(); + // Estimates the overall complexity of the code flow graph for + // the current function. + private _functionCodeFlowComplexity = 0; + constructor(fileInfo: AnalyzerFileInfo, private _moduleSymbolOnly = false) { super(); @@ -227,17 +249,18 @@ export class Binder extends ParseTreeWalker { // Bind implicit names. // List taken from https://docs.python.org/3/reference/import.html#__name__ - this._addBuiltInSymbolToCurrentScope('__doc__', node, 'str | None'); - this._addBuiltInSymbolToCurrentScope('__name__', node, 'str'); - this._addBuiltInSymbolToCurrentScope('__loader__', node, 'Any'); - this._addBuiltInSymbolToCurrentScope('__package__', node, 'str'); - this._addBuiltInSymbolToCurrentScope('__spec__', node, 'Any'); - this._addBuiltInSymbolToCurrentScope('__path__', node, 'Iterable[str]'); - this._addBuiltInSymbolToCurrentScope('__file__', node, 'str'); - this._addBuiltInSymbolToCurrentScope('__cached__', node, 'str'); - this._addBuiltInSymbolToCurrentScope('__dict__', node, 'Dict[str, Any]'); - this._addBuiltInSymbolToCurrentScope('__annotations__', node, 'Dict[str, Any]'); - this._addBuiltInSymbolToCurrentScope('__builtins__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__doc__', node, 'str | None'); + this._addImplicitSymbolToCurrentScope('__name__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__qualname__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__loader__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__package__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__spec__', node, 'Any'); + this._addImplicitSymbolToCurrentScope('__path__', node, 'Iterable[str]'); + this._addImplicitSymbolToCurrentScope('__file__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__cached__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__dict__', node, 'Dict[str, Any]'); + this._addImplicitSymbolToCurrentScope('__annotations__', node, 'Dict[str, Any]'); + this._addImplicitSymbolToCurrentScope('__builtins__', node, 'Any'); // Create a start node for the module. this._currentFlowNode = this._createStartFlowNode(); @@ -276,6 +299,7 @@ export class Binder extends ParseTreeWalker { AnalyzerNodeInfo.setDunderAllInfo(node, { names: this._dunderAllNames, stringNodes: this._dunderAllStringNodes, + usesUnsupportedDunderAllForm: this._usesUnsupportedDunderAllForm, }); } else { AnalyzerNodeInfo.setDunderAllInfo(node, undefined); @@ -374,6 +398,7 @@ export class Binder extends ParseTreeWalker { path: this._fileInfo.filePath, range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; const symbol = this._bindNameToScope(this._currentScope, node.name.value); @@ -396,8 +421,8 @@ export class Binder extends ParseTreeWalker { this._createNewScope(ScopeType.Class, parentScope, () => { AnalyzerNodeInfo.setScope(node, this._currentScope); - this._addBuiltInSymbolToCurrentScope('__doc__', node, 'str | None'); - this._addBuiltInSymbolToCurrentScope('__module__', node, 'str'); + this._addImplicitSymbolToCurrentScope('__doc__', node, 'str | None'); + this._addImplicitSymbolToCurrentScope('__module__', node, 'str'); if (!this._moduleSymbolOnly) { // Analyze the suite. @@ -411,6 +436,8 @@ export class Binder extends ParseTreeWalker { } override visitFunction(node: FunctionNode): boolean { + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + const symbol = this._bindNameToScope(this._currentScope, node.name.value); const containingClassNode = ParseTreeUtils.getEnclosingClass(node, true); const functionDeclaration: FunctionDeclaration = { @@ -421,6 +448,7 @@ export class Binder extends ParseTreeWalker { path: this._fileInfo.filePath, range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; if (symbol) { @@ -453,39 +481,21 @@ export class Binder extends ParseTreeWalker { this.walk(node.functionAnnotationComment); } - // Find the function or module that contains this function and use its scope. - // We can't simply use this._currentScope because functions within a class use - // the scope of the containing function or module when they execute. - let functionOrModuleNode: ParseNode | undefined = node.parent; - while (functionOrModuleNode) { - if ( - functionOrModuleNode.nodeType === ParseNodeType.Module || - functionOrModuleNode.nodeType === ParseNodeType.Function - ) { - break; - } - - functionOrModuleNode = functionOrModuleNode.parent; - } - assert(functionOrModuleNode !== undefined); - - const functionOrModuleScope = AnalyzerNodeInfo.getScope(functionOrModuleNode!); - assert(functionOrModuleScope !== undefined); - // Don't walk the body of the function until we're done analyzing // the current scope. - this._createNewScope(ScopeType.Function, functionOrModuleScope, () => { + this._createNewScope(ScopeType.Function, this._getNonClassParentScope(), () => { AnalyzerNodeInfo.setScope(node, this._currentScope); const enclosingClass = ParseTreeUtils.getEnclosingClass(node); if (enclosingClass) { // Add the implicit "__class__" symbol described in PEP 3135. - this._addBuiltInSymbolToCurrentScope('__class__', node, 'class'); + this._addImplicitSymbolToCurrentScope('__class__', node, 'class'); } this._deferBinding(() => { // Create a start node for the function. this._currentFlowNode = this._createStartFlowNode(); + this._functionCodeFlowComplexity = 0; node.parameters.forEach((paramNode) => { if (paramNode.name) { @@ -501,6 +511,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(paramDeclaration); @@ -526,10 +537,12 @@ export class Binder extends ParseTreeWalker { // the function never returns. this._addAntecedent(this._currentReturnTarget, this._currentFlowNode); const returnFlowNode = this._finishFlowLabel(this._currentReturnTarget); + AnalyzerNodeInfo.setAfterFlowNode(node, returnFlowNode); - }); - AnalyzerNodeInfo.setCodeFlowExpressions(node, this._currentScopeCodeFlowExpressions!); + AnalyzerNodeInfo.setCodeFlowExpressions(node, this._currentScopeCodeFlowExpressions!); + AnalyzerNodeInfo.setCodeFlowComplexity(node, this._functionCodeFlowComplexity); + }); }); this._createAssignmentTargetFlowNodes(node.name, /* walkTargets */ false, /* unbound */ false); @@ -539,6 +552,8 @@ export class Binder extends ParseTreeWalker { } override visitLambda(node: LambdaNode): boolean { + AnalyzerNodeInfo.setFlowNode(node, this._currentFlowNode!); + // Analyze the parameter defaults in the context of the parent's scope // before we add any names from the function's scope. node.parameters.forEach((param) => { @@ -547,7 +562,7 @@ export class Binder extends ParseTreeWalker { } }); - this._createNewScope(ScopeType.Function, this._currentScope, () => { + this._createNewScope(ScopeType.Function, this._getNonClassParentScope(), () => { AnalyzerNodeInfo.setScope(node, this._currentScope); this._deferBinding(() => { @@ -568,6 +583,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(paramDeclaration); @@ -667,6 +683,8 @@ export class Binder extends ParseTreeWalker { } if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + this._addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnsupportedDunderAll, DiagnosticRule.reportUnsupportedDunderAll, @@ -755,6 +773,8 @@ export class Binder extends ParseTreeWalker { } if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + this._addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnsupportedDunderAll, DiagnosticRule.reportUnsupportedDunderAll, @@ -864,8 +884,6 @@ export class Binder extends ParseTreeWalker { this.walk(node.leftExpression); this.walk(node.rightExpression); - this._addInferredTypeAssignmentForVariable(node.destExpression, node.rightExpression); - this._bindPossibleTupleNamedTarget(node.destExpression); this._createAssignmentTargetFlowNodes(node.destExpression, /* walkTargets */ false, /* unbound */ false); @@ -910,6 +928,8 @@ export class Binder extends ParseTreeWalker { } if (emitDunderAllWarning) { + this._usesUnsupportedDunderAllForm = true; + this._addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnsupportedDunderAll, DiagnosticRule.reportUnsupportedDunderAll, @@ -1043,7 +1063,7 @@ export class Binder extends ParseTreeWalker { } override visitYield(node: YieldNode): boolean { - if (this._isInListComprehension(node)) { + if (this._isInListComprehension(node, /* ignoreOutermostIterable */ true)) { this._addError(Localizer.Diagnostic.yieldWithinListCompr(), node); } @@ -1052,7 +1072,7 @@ export class Binder extends ParseTreeWalker { } override visitYieldFrom(node: YieldFromNode): boolean { - if (this._isInListComprehension(node)) { + if (this._isInListComprehension(node, /* ignoreOutermostIterable */ true)) { this._addError(Localizer.Diagnostic.yieldWithinListCompr(), node); } @@ -1190,12 +1210,16 @@ export class Binder extends ParseTreeWalker { path: this._fileInfo.filePath, range: convertOffsetsToRange(node.name.start, TextRange.getEnd(node.name), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(declaration); } } + const wasInExceptSuite = this._isInExceptSuite; + this._isInExceptSuite = true; this.walk(node.exceptSuite); + this._isInExceptSuite = wasInExceptSuite; if (node.name) { // The exception name is implicitly unbound at the end of @@ -1362,6 +1386,9 @@ export class Binder extends ParseTreeWalker { this._currentFlowNode = isAfterElseAndExceptsReachable ? postFinallyNode : Binder._unreachableFlowNode; } + // Try blocks are expensive to analyze, so add to the complexity metric. + this._functionCodeFlowComplexity += 4; + return false; } @@ -1369,6 +1396,11 @@ export class Binder extends ParseTreeWalker { // Make sure this is within an async lambda or function. const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); if (enclosingFunction === undefined || !enclosingFunction.isAsync) { + if (this._fileInfo.isIPythonMode && enclosingFunction === undefined) { + // Top level await is allowed in ipython mode. + return true; + } + // Allow if it's within a generator expression. Execution of // generator expressions is deferred and therefore can be // run within the context of an async function later. @@ -1483,9 +1515,11 @@ export class Binder extends ParseTreeWalker { if (node.module.nameParts.length === 1) { if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { - this._typingImportAliases.push(node.alias?.value || firstNamePartValue); + this._typingImportAliases.push(node.alias?.value ?? firstNamePartValue); } else if (firstNamePartValue === 'sys') { - this._sysImportAliases.push(node.alias?.value || firstNamePartValue); + this._sysImportAliases.push(node.alias?.value ?? firstNamePartValue); + } else if (firstNamePartValue === 'dataclasses') { + this._dataclassesImportAliases.push(node.alias?.value ?? firstNamePartValue); } } } @@ -1494,7 +1528,8 @@ export class Binder extends ParseTreeWalker { } override visitImportFrom(node: ImportFromNode): boolean { - const typingSymbolsOfInterest = ['Final', 'TypeAlias', 'ClassVar', 'Required', 'NotRequired']; + const typingSymbolsOfInterest = ['Final', 'TypeAlias', 'ClassVar', 'Required', 'NotRequired', 'Annotated']; + const dataclassesSymbolsOfInterest = ['InitVar']; const importInfo = AnalyzerNodeInfo.getImportInfo(node.module); let resolvedPath = ''; @@ -1513,11 +1548,17 @@ export class Binder extends ParseTreeWalker { fileName === '__init__' && node.module.leadingDots === 1 && node.module.nameParts.length === 1; let isTypingImport = false; + let isDataclassesImport = false; + if (node.module.nameParts.length === 1) { const firstNamePartValue = node.module.nameParts[0].value; if (firstNamePartValue === 'typing' || firstNamePartValue === 'typing_extensions') { isTypingImport = true; } + + if (firstNamePartValue === 'dataclasses') { + isDataclassesImport = true; + } } if (node.isWildcardImport) { @@ -1557,10 +1598,12 @@ export class Binder extends ParseTreeWalker { type: DeclarationType.Alias, node, path: resolvedPath, + loadSymbolsFromPath: true, range: getEmptyRange(), usesLocalName: false, symbolName: name, moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; localSymbol.addDeclaration(aliasDecl); names.push(name); @@ -1577,20 +1620,24 @@ export class Binder extends ParseTreeWalker { type: DeclarationType.Alias, node, path: implicitImport.path, + loadSymbolsFromPath: true, range: getEmptyRange(), usesLocalName: false, moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; const aliasDecl: AliasDeclaration = { type: DeclarationType.Alias, node, path: resolvedPath, + loadSymbolsFromPath: true, usesLocalName: false, symbolName: name, submoduleFallback, range: getEmptyRange(), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; localSymbol.addDeclaration(aliasDecl); @@ -1608,6 +1655,12 @@ export class Binder extends ParseTreeWalker { this._typingSymbolAliases.set(s, s); }); } + + if (isDataclassesImport) { + dataclassesSymbolsOfInterest.forEach((s) => { + this._dataclassesSymbolAliases.set(s, s); + }); + } } } else { if (isModuleInitFile) { @@ -1649,14 +1702,17 @@ export class Binder extends ParseTreeWalker { } let submoduleFallback: AliasDeclaration | undefined; + let loadSymbolsFromPath = true; if (implicitImport) { submoduleFallback = { type: DeclarationType.Alias, node: importSymbolNode, path: implicitImport.path, + loadSymbolsFromPath: true, range: getEmptyRange(), usesLocalName: false, moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; // Handle the case of "from . import X" within an __init__ file. @@ -1667,7 +1723,7 @@ export class Binder extends ParseTreeWalker { node.module.leadingDots === 1 && node.module.nameParts.length === 0 ) { - resolvedPath = ''; + loadSymbolsFromPath = false; } } @@ -1675,11 +1731,14 @@ export class Binder extends ParseTreeWalker { type: DeclarationType.Alias, node: importSymbolNode, path: resolvedPath, + loadSymbolsFromPath, usesLocalName: !!importSymbolNode.alias, symbolName: importedName, submoduleFallback, range: getEmptyRange(), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, + isNativeLib: importInfo?.isNativeLib, }; symbol.addDeclaration(aliasDecl); @@ -1690,6 +1749,12 @@ export class Binder extends ParseTreeWalker { this._typingSymbolAliases.set(nameNode.value, importSymbolNode.name.value); } } + + if (isDataclassesImport) { + if (dataclassesSymbolsOfInterest.some((s) => s === importSymbolNode.name.value)) { + this._dataclassesSymbolAliases.set(nameNode.value, importSymbolNode.name.value); + } + } } }); } @@ -1720,33 +1785,51 @@ export class Binder extends ParseTreeWalker { // |<--------------------| // (with suite)<--------------| // ^ | - // | ContextManagerExceptionTarget + // | ContextManagerSwallowExceptionTarget // | ^ - // | PostContextManagerLabel + // | PostContextManagerLabel // | ^ // |---------------------| // | // (after with) // + // In addition to the ContextManagerSwallowExceptionTarget, we'll create + // a second target called ContextManagerForwardExceptionTarget that forwards + // exceptions to existing exception targets if they exist. + + const contextManagerSwallowExceptionTarget = this._createContextManagerLabel( + node.withItems.map((item) => item.expression), + !!node.isAsync, + /* blockIfSwallowsExceptions */ false + ); + this._addAntecedent(contextManagerSwallowExceptionTarget, this._currentFlowNode!); - const contextManagerExceptionTarget = this._createContextManagerLabel( + const contextManagerForwardExceptionTarget = this._createContextManagerLabel( node.withItems.map((item) => item.expression), - !!node.isAsync + !!node.isAsync, + /* blockIfSwallowsExceptions */ true ); - this._addAntecedent(contextManagerExceptionTarget, this._currentFlowNode!); + this._currentExceptTargets.forEach((exceptionTarget) => { + this._addAntecedent(exceptionTarget, contextManagerForwardExceptionTarget); + }); const preWithSuiteNode = this._currentFlowNode!; const postContextManagerLabel = this._createBranchLabel(preWithSuiteNode); - this._addAntecedent(postContextManagerLabel, contextManagerExceptionTarget!); + this._addAntecedent(postContextManagerLabel, contextManagerSwallowExceptionTarget!); postContextManagerLabel.affectedExpressions = this._trackCodeFlowExpressions(() => { - this._useExceptTargets([contextManagerExceptionTarget], () => { + this._useExceptTargets([contextManagerSwallowExceptionTarget, contextManagerForwardExceptionTarget], () => { this.walk(node.suite); }); this._addAntecedent(postContextManagerLabel, this._currentFlowNode!); this._currentFlowNode = postContextManagerLabel; + // Model the call to `__exit__` as a potential exception generator. + if (!this._isCodeUnreachable()) { + this._addExceptTargets(this._currentFlowNode!); + } + if (node.asyncToken) { const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); if (!enclosingFunction || !enclosingFunction.isAsync) { @@ -1839,17 +1922,16 @@ export class Binder extends ParseTreeWalker { override visitListComprehension(node: ListComprehensionNode): boolean { const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); - this._createNewScope(ScopeType.ListComprehension, this._currentScope, () => { + this._createNewScope(ScopeType.ListComprehension, this._getNonClassParentScope(), () => { AnalyzerNodeInfo.setScope(node, this._currentScope); const falseLabel = this._createBranchLabel(); - // We'll walk the comprehensions list twice. The first time we'll + // We'll walk the forIfNodes list twice. The first time we'll // bind targets of for statements. The second time we'll walk // expressions and create the control flow graph. - const boundSymbols: Map[] = []; - for (let i = 0; i < node.comprehensions.length; i++) { - const compr = node.comprehensions[i]; + for (let i = 0; i < node.forIfNodes.length; i++) { + const compr = node.forIfNodes[i]; const addedSymbols = new Map(); if (compr.nodeType === ParseNodeType.ListComprehensionFor) { this._bindPossibleTupleNamedTarget(compr.targetExpression, addedSymbols); @@ -1867,24 +1949,11 @@ export class Binder extends ParseTreeWalker { } } } - boundSymbols.push(addedSymbols); } - for (let i = 0; i < node.comprehensions.length; i++) { - const compr = node.comprehensions[i]; + for (let i = 0; i < node.forIfNodes.length; i++) { + const compr = node.forIfNodes[i]; if (compr.nodeType === ParseNodeType.ListComprehensionFor) { - const addedSymbols = boundSymbols[i]; - - // Determine if we added a new symbol to this scope. If so, see - // if it's the same name as a symbol in an outer scope. If so, we'll - // create an alias node in the control flow graph. - for (const addedSymbol of addedSymbols) { - const aliasSymbol = this._currentScope.parent!.lookUpSymbol(addedSymbol[0]); - if (aliasSymbol) { - this._createAssignmentAliasFlowNode(addedSymbol[1].id, aliasSymbol.id); - } - } - this.walk(compr.iterableExpression); this._createAssignmentTargetFlowNodes( @@ -2012,6 +2081,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(declaration); } @@ -2036,6 +2106,17 @@ export class Binder extends ParseTreeWalker { return true; } + private _getNonClassParentScope() { + // We may not be able to use the current scope if it's a class scope. + // Walk up until we find a non-class scope instead. + let parentScope = this._currentScope; + while (parentScope.type === ScopeType.Class) { + parentScope = parentScope.parent!; + } + + return parentScope; + } + private _addSlotsToCurrentScope(slotNameNodes: StringListNode[]) { assert(this._currentScope.type === ScopeType.Class); @@ -2053,7 +2134,7 @@ export class Binder extends ParseTreeWalker { if (!symbol) { symbol = this._currentScope.addSymbol( slotName, - SymbolFlags.InitiallyUnbound | SymbolFlags.ClassMember | SymbolFlags.InstanceMember + SymbolFlags.InitiallyUnbound | SymbolFlags.InstanceMember ); const honorPrivateNaming = this._fileInfo.diagnosticRuleSet.reportPrivateUsage !== 'none'; if (isPrivateOrProtectedName(slotName) && honorPrivateNaming) { @@ -2073,6 +2154,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(declaration); } @@ -2082,12 +2164,27 @@ export class Binder extends ParseTreeWalker { } } - private _isInListComprehension(node: ParseNode) { + private _isInListComprehension(node: ParseNode, ignoreOutermostIterable = false) { let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + let prevPrevNode: ParseNode | undefined; + while (curNode) { if (curNode.nodeType === ParseNodeType.ListComprehension) { + if (ignoreOutermostIterable && curNode.forIfNodes.length > 0) { + const outermostCompr = curNode.forIfNodes[0]; + if (prevNode === outermostCompr && outermostCompr.nodeType === ParseNodeType.ListComprehensionFor) { + if (prevPrevNode === outermostCompr.iterableExpression) { + return false; + } + } + } + return true; } + + prevPrevNode = prevNode; + prevNode = curNode; curNode = curNode.parent; } return false; @@ -2106,6 +2203,7 @@ export class Binder extends ParseTreeWalker { path: this._fileInfo.filePath, range: convertOffsetsToRange(target.start, TextRange.getEnd(target), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(declaration); } @@ -2130,7 +2228,12 @@ export class Binder extends ParseTreeWalker { const aliasDecl = varSymbol.getDeclarations().find((decl) => decl.type === DeclarationType.Alias) as | AliasDeclaration | undefined; - const resolvedPath = aliasDecl?.path || aliasDecl?.submoduleFallback?.path; + const resolvedPath = + aliasDecl?.path && aliasDecl.loadSymbolsFromPath + ? aliasDecl.path + : aliasDecl?.submoduleFallback?.path && aliasDecl.submoduleFallback.loadSymbolsFromPath + ? aliasDecl.submoduleFallback.path + : undefined; if (!resolvedPath) { return undefined; } @@ -2180,8 +2283,10 @@ export class Binder extends ParseTreeWalker { newDecl = { type: DeclarationType.Alias, node, - path: '', + path: importInfo.resolvedPaths[importInfo.resolvedPaths.length - 1], + loadSymbolsFromPath: false, moduleName: importInfo.importName, + isInExceptSuite: this._isInExceptSuite, range: getEmptyRange(), firstNamePart: firstNamePartValue, usesLocalName: !!importAlias, @@ -2192,6 +2297,7 @@ export class Binder extends ParseTreeWalker { // name part we're resolving. if (importAlias || node.module.nameParts.length === 1) { newDecl.path = importInfo.resolvedPaths[importInfo.resolvedPaths.length - 1]; + newDecl.loadSymbolsFromPath = true; this._addImplicitImportsToLoaderActions(importInfo, newDecl); } else { // Fill in the remaining name parts. @@ -2211,7 +2317,8 @@ export class Binder extends ParseTreeWalker { if (!loaderActions) { // Allocate a new loader action. loaderActions = { - path: '', + path: importInfo.resolvedPaths[i], + loadSymbolsFromPath: false, implicitImports: new Map(), }; if (!curLoaderActions.implicitImports) { @@ -2224,6 +2331,7 @@ export class Binder extends ParseTreeWalker { // implicit imports as well. if (i === node.module.nameParts.length - 1) { loaderActions.path = importInfo.resolvedPaths[i]; + loaderActions.loadSymbolsFromPath = true; this._addImplicitImportsToLoaderActions(importInfo, loaderActions); } @@ -2242,25 +2350,31 @@ export class Binder extends ParseTreeWalker { type: DeclarationType.Alias, node, path: '*** unresolved ***', + loadSymbolsFromPath: true, range: getEmptyRange(), usesLocalName: !!importAlias, moduleName: '', isUnresolved: true, + isInExceptSuite: this._isInExceptSuite, }; symbol.addDeclaration(newDecl); } } private _getWildcardImportNames(lookupInfo: ImportLookupResult): string[] { + const namesToImport: string[] = []; + // If a dunder all symbol is defined, it takes precedence. if (lookupInfo.dunderAllNames) { - return lookupInfo.dunderAllNames; + if (!lookupInfo.usesUnsupportedDunderAllForm) { + return lookupInfo.dunderAllNames; + } + + namesToImport.push(...lookupInfo.dunderAllNames); } - // Import all names that don't begin with an underscore. - const namesToImport: string[] = []; lookupInfo.symbolTable.forEach((symbol, name) => { - if (!symbol.isExternallyHidden()) { + if (!symbol.isExternallyHidden() && !isPrivateOrProtectedName(name)) { namesToImport!.push(name); } }); @@ -2330,7 +2444,11 @@ export class Binder extends ParseTreeWalker { this._currentFlowNode! = flowNode; } - private _createContextManagerLabel(expressions: ExpressionNode[], isAsync: boolean) { + private _createContextManagerLabel( + expressions: ExpressionNode[], + isAsync: boolean, + blockIfSwallowsExceptions: boolean + ) { const flowNode: FlowPostContextManagerLabel = { flags: FlowFlags.PostContextManager | FlowFlags.BranchLabel, id: getUniqueFlowNodeId(), @@ -2338,6 +2456,7 @@ export class Binder extends ParseTreeWalker { expressions, affectedExpressions: undefined, isAsync, + blockIfSwallowsExceptions, }; return flowNode; } @@ -2349,6 +2468,7 @@ export class Binder extends ParseTreeWalker { antecedents: [], affectedExpressions: undefined, }; + return flowNode; } @@ -2364,6 +2484,9 @@ export class Binder extends ParseTreeWalker { return node.antecedents[0]; } + // Add one to the code flow complexity for each antecedent. + this._functionCodeFlowComplexity += node.antecedents.length; + return node; } @@ -2566,6 +2689,12 @@ export class Binder extends ParseTreeWalker { case ParseNodeType.AssignmentExpression: { expressionList.push(expression.name); + this._isNarrowingExpression( + expression.rightExpression, + expressionList, + filterForNeverNarrowing, + /* isComplexExpression */ true + ); return true; } @@ -2634,6 +2763,7 @@ export class Binder extends ParseTreeWalker { // Look for " in Y" or " not in Y". if (expression.operator === OperatorType.In || expression.operator === OperatorType.NotIn) { if ( + expression.leftExpression.nodeType === ParseNodeType.StringList && this._isNarrowingExpression( expression.rightExpression, expressionList, @@ -2804,20 +2934,6 @@ export class Binder extends ParseTreeWalker { } } - private _createAssignmentAliasFlowNode(targetSymbolId: number, aliasSymbolId: number) { - if (!this._isCodeUnreachable()) { - const flowNode: FlowAssignmentAlias = { - flags: FlowFlags.AssignmentAlias, - id: getUniqueFlowNodeId(), - antecedent: this._currentFlowNode!, - targetSymbolId, - aliasSymbolId, - }; - - this._currentFlowNode = flowNode; - } - } - private _createVariableAnnotationFlowNode() { if (!this._isCodeUnreachable()) { const flowNode: FlowVariableAnnotation = { @@ -2940,13 +3056,21 @@ export class Binder extends ParseTreeWalker { private _bindLoopStatement(preLoopLabel: FlowLabel, postLoopLabel: FlowLabel, callback: () => void) { const savedContinueTarget = this._currentContinueTarget; const savedBreakTarget = this._currentBreakTarget; + const savedCodeFlowComplexity = this._functionCodeFlowComplexity; + this._currentContinueTarget = preLoopLabel; this._currentBreakTarget = postLoopLabel; + this._functionCodeFlowComplexity = 1; preLoopLabel.affectedExpressions = this._trackCodeFlowExpressions(callback); this._currentContinueTarget = savedContinueTarget; this._currentBreakTarget = savedBreakTarget; + + // For each loop, double the complexity of the complexity of the + // contained code flow. This reflects the fact that nested loops + // are very expensive to analyze. + this._functionCodeFlowComplexity = this._functionCodeFlowComplexity * 2 + savedCodeFlowComplexity; } private _addAntecedent(label: FlowLabel, antecedent: FlowNode) { @@ -2977,16 +3101,6 @@ export class Binder extends ParseTreeWalker { if (!symbol) { symbol = scope.addSymbol(name, SymbolFlags.InitiallyUnbound | SymbolFlags.ClassMember); - // Handle the case where a new symbol is being added to a class - // but the expression assigned to it uses a symbol of the same - // name that is declared in an outer scope. - if (scope.type === ScopeType.Class) { - const aliasSymbol = scope.parent!.lookUpSymbol(name); - if (aliasSymbol) { - this._createAssignmentAliasFlowNode(symbol.id, aliasSymbol.id); - } - } - if (this._currentScope.type === ScopeType.Module || this._currentScope.type === ScopeType.Builtin) { if (isPrivateOrProtectedName(name)) { if (isPrivateName(name)) { @@ -3049,7 +3163,7 @@ export class Binder extends ParseTreeWalker { } } - private _addBuiltInSymbolToCurrentScope( + private _addImplicitSymbolToCurrentScope( nameValue: string, node: ModuleNode | ClassNode | FunctionNode, type: IntrinsicType @@ -3063,6 +3177,7 @@ export class Binder extends ParseTreeWalker { path: this._fileInfo.filePath, range: getEmptyRange(), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }); symbol.setIsIgnoredForProtocolMatch(); } @@ -3132,10 +3247,12 @@ export class Binder extends ParseTreeWalker { node: target, isConstant: isConstantName(target.value), inferredTypeSource: source, + isInferenceAllowedInPyTyped: this._isInferenceAllowedInPyTyped(name.value), typeAliasName: isPossibleTypeAlias ? target : undefined, path: this._fileInfo.filePath, range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, docString: this._getVariableDocString(target), }; symbolWithScope.symbol.addDeclaration(declaration); @@ -3187,6 +3304,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, docString: this._getVariableDocString(target), }; symbol.addDeclaration(declaration); @@ -3220,6 +3338,11 @@ export class Binder extends ParseTreeWalker { } } + private _isInferenceAllowedInPyTyped(symbolName: string): boolean { + const exemptSymbols = ['__match_args__', '__slots__', '__all__']; + return exemptSymbols.some((name) => name === symbolName); + } + private _addTypeDeclarationForVariable(target: ExpressionNode, typeAnnotation: ExpressionNode) { let declarationHandled = false; @@ -3232,18 +3355,53 @@ export class Binder extends ParseTreeWalker { const isExplicitTypeAlias = this._isAnnotationTypeAlias(typeAnnotation); let typeAnnotationNode: ExpressionNode | undefined = typeAnnotation; + let innerTypeAnnotationNode: ExpressionNode | undefined = typeAnnotation; if (isExplicitTypeAlias) { typeAnnotationNode = undefined; + innerTypeAnnotationNode = undefined; - // Type aliases are allowed only in the global scope. + // Type aliases are allowed only in the global or class scope. if ( + this._currentScope.type !== ScopeType.Class && this._currentScope.type !== ScopeType.Module && this._currentScope.type !== ScopeType.Builtin ) { - this._addError(Localizer.Diagnostic.typeAliasNotInModule(), typeAnnotation); + this._addError(Localizer.Diagnostic.typeAliasNotInModuleOrClass(), typeAnnotation); } } else if (finalInfo.isFinal) { - typeAnnotationNode = finalInfo.finalTypeNode; + innerTypeAnnotationNode = finalInfo.finalTypeNode; + if (!finalInfo.finalTypeNode) { + typeAnnotationNode = undefined; + } + } + + // Is this annotation indicating that the variable is a "ClassVar"? + let classVarInfo = this._isAnnotationClassVar(typeAnnotation); + + if (classVarInfo.isClassVar) { + innerTypeAnnotationNode = classVarInfo.classVarTypeNode; + + if (!classVarInfo.classVarTypeNode) { + typeAnnotationNode = undefined; + } + } + + // PEP 591 indicates that a Final variable initialized within a class + // body should also be considered a ClassVar. + if (finalInfo.isFinal) { + const containingClass = ParseTreeUtils.getEnclosingClassOrFunction(target); + if (containingClass && containingClass.nodeType === ParseNodeType.Class) { + // Make sure it's part of an assignment. + if ( + target.parent?.nodeType === ParseNodeType.Assignment || + target.parent?.parent?.nodeType === ParseNodeType.Assignment + ) { + classVarInfo = { + isClassVar: true, + classVarTypeNode: undefined, + }; + } + } } const declaration: VariableDeclaration = { @@ -3251,43 +3409,43 @@ export class Binder extends ParseTreeWalker { node: target, isConstant: isConstantName(name.value), isFinal: finalInfo.isFinal, - isRequired: this._isRequiredAnnotation(typeAnnotationNode), - isNotRequired: this._isNotRequiredAnnotation(typeAnnotationNode), + isClassVar: classVarInfo.isClassVar, + isRequired: this._isRequiredAnnotation(innerTypeAnnotationNode), + isNotRequired: this._isNotRequiredAnnotation(innerTypeAnnotationNode), typeAliasAnnotation: isExplicitTypeAlias ? typeAnnotation : undefined, typeAliasName: isExplicitTypeAlias ? target : undefined, path: this._fileInfo.filePath, typeAnnotationNode, range: convertOffsetsToRange(name.start, TextRange.getEnd(name), this._fileInfo.lines), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, docString: this._getVariableDocString(target), }; symbolWithScope.symbol.addDeclaration(declaration); - // Is this annotation indicating that the variable is a "ClassVar"? - let isClassVar = - typeAnnotation.nodeType === ParseNodeType.Index && - this._isTypingAnnotation(typeAnnotation.baseExpression, 'ClassVar'); + if (classVarInfo.isClassVar) { + symbolWithScope.symbol.setIsClassVar(); + } else { + symbolWithScope.symbol.setIsInstanceMember(); + } - // PEP 591 indicates that a Final variable initialized within a class - // body should also be considered a ClassVar. - if (finalInfo.isFinal) { - const containingClass = ParseTreeUtils.getEnclosingClassOrFunction(target); - if (containingClass && containingClass.nodeType === ParseNodeType.Class) { - // Make sure it's part of an assignment. + // Look for an 'InitVar' either by itself or wrapped in an 'Annotated'. + if (typeAnnotation.nodeType === ParseNodeType.Index) { + if (this._isDataclassesAnnotation(typeAnnotation.baseExpression, 'InitVar')) { + symbolWithScope.symbol.setIsInitVar(); + } else if ( + this._isTypingAnnotation(typeAnnotation.baseExpression, 'Annotated') && + typeAnnotation.items.length > 0 + ) { + const item0Expr = typeAnnotation.items[0].valueExpression; if ( - target.parent?.nodeType === ParseNodeType.Assignment || - target.parent?.parent?.nodeType === ParseNodeType.Assignment + item0Expr.nodeType === ParseNodeType.Index && + this._isDataclassesAnnotation(item0Expr.baseExpression, 'InitVar') ) { - isClassVar = true; + symbolWithScope.symbol.setIsInitVar(); } } } - - if (isClassVar) { - symbolWithScope.symbol.setIsClassVar(); - } else { - symbolWithScope.symbol.setIsInstanceMember(); - } } declarationHandled = true; @@ -3330,13 +3488,14 @@ export class Binder extends ParseTreeWalker { isDefinedByMemberAccess: true, isFinal: finalInfo.isFinal, path: this._fileInfo.filePath, - typeAnnotationNode: finalInfo.isFinal ? finalInfo.finalTypeNode : typeAnnotation, + typeAnnotationNode: finalInfo.isFinal && !finalInfo.finalTypeNode ? undefined : typeAnnotation, range: convertOffsetsToRange( target.memberName.start, target.memberName.start + target.memberName.length, this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, docString: this._getVariableDocString(target), }; symbol.addDeclaration(declaration); @@ -3357,8 +3516,26 @@ export class Binder extends ParseTreeWalker { // time. We assume here that the code isn't making use of some custom type alias // to refer to the typing types. private _isTypingAnnotation(typeAnnotation: ExpressionNode, name: string): boolean { + return this._isKnownAnnotation(typeAnnotation, name, this._typingImportAliases, this._typingSymbolAliases); + } + + private _isDataclassesAnnotation(typeAnnotation: ExpressionNode, name: string): boolean { + return this._isKnownAnnotation( + typeAnnotation, + name, + this._dataclassesImportAliases, + this._dataclassesSymbolAliases + ); + } + + private _isKnownAnnotation( + typeAnnotation: ExpressionNode, + name: string, + importAliases: string[], + symbolAliases: Map + ) { if (typeAnnotation.nodeType === ParseNodeType.Name) { - const alias = this._typingSymbolAliases.get(typeAnnotation.value); + const alias = symbolAliases.get(typeAnnotation.value); if (alias === name) { return true; } @@ -3368,7 +3545,7 @@ export class Binder extends ParseTreeWalker { typeAnnotation.memberName.value === name ) { const baseName = typeAnnotation.leftExpression.value; - return this._typingImportAliases.some((alias) => alias === baseName); + return importAliases.some((alias) => alias === baseName); } } @@ -3422,19 +3599,8 @@ export class Binder extends ParseTreeWalker { } const nextStatement = suiteOrModule.statements[assignmentIndex + 1]; - if ( - nextStatement.nodeType !== ParseNodeType.StatementList || - nextStatement.statements.length === 0 || - nextStatement.statements[0].nodeType !== ParseNodeType.StringList - ) { - return undefined; - } - - // A docstring can consist of multiple joined strings in a single expression. - const strings = nextStatement.statements[0].strings; - // Any f-strings invalidate the entire docstring. - if (strings.some((n) => (n.token.flags & StringTokenFlags.Format) !== 0)) { + if (nextStatement.nodeType !== ParseNodeType.StatementList || !ParseTreeUtils.isDocString(nextStatement)) { return undefined; } @@ -3466,6 +3632,8 @@ export class Binder extends ParseTreeWalker { return undefined; } + // A docstring can consist of multiple joined strings in a single expression. + const strings = (nextStatement.statements[0] as StringListNode).strings; if (strings.length === 1) { // Common case. return strings[0].value; @@ -3502,6 +3670,44 @@ export class Binder extends ParseTreeWalker { return { isFinal, finalTypeNode }; } + // Determines if the specified type annotation expression is a "ClassVar". + // It returns a value indicating whether the expression is a "ClassVar" + // expression and whether it's a "raw" ClassVar with no type arguments. + private _isAnnotationClassVar(typeAnnotation: ExpressionNode | undefined): ClassVarInfo { + let isClassVar = false; + let classVarTypeNode: ExpressionNode | undefined; + + while (typeAnnotation) { + if ( + typeAnnotation.nodeType === ParseNodeType.Index && + typeAnnotation.items.length > 0 && + this._isTypingAnnotation(typeAnnotation.baseExpression, 'Annotated') + ) { + typeAnnotation = typeAnnotation.items[0].valueExpression; + } else if (this._isTypingAnnotation(typeAnnotation, 'ClassVar')) { + isClassVar = true; + break; + } else if (typeAnnotation.nodeType === ParseNodeType.Index && typeAnnotation.items.length === 1) { + // Recursively call to see if the base expression is "ClassVar". + const finalInfo = this._isAnnotationClassVar(typeAnnotation.baseExpression); + if ( + finalInfo.isClassVar && + typeAnnotation.items[0].argumentCategory === ArgumentCategory.Simple && + !typeAnnotation.items[0].name && + !typeAnnotation.trailingComma + ) { + isClassVar = true; + classVarTypeNode = typeAnnotation.items[0].valueExpression; + } + break; + } else { + break; + } + } + + return { isClassVar, classVarTypeNode }; + } + // Determines if the specified type annotation is wrapped in a "Required". private _isRequiredAnnotation(typeAnnotation: ExpressionNode | undefined): boolean { if (typeAnnotation && typeAnnotation.nodeType === ParseNodeType.Index && typeAnnotation.items.length === 1) { @@ -3551,7 +3757,7 @@ export class Binder extends ParseTreeWalker { return undefined; } - const classNode = ParseTreeUtils.getEnclosingClass(methodNode); + const classNode = ParseTreeUtils.getEnclosingClass(methodNode, /* stopAtFunction */ true); if (!classNode) { return undefined; } @@ -3620,12 +3826,14 @@ export class Binder extends ParseTreeWalker { : undefined; if (existingLoaderAction) { existingLoaderAction.path = implicitImport.path; + existingLoaderAction.loadSymbolsFromPath = true; } else { if (!loaderActions.implicitImports) { loaderActions.implicitImports = new Map(); } loaderActions.implicitImports.set(implicitImport.name, { path: implicitImport.path, + loadSymbolsFromPath: true, implicitImports: new Map(), }); } @@ -3673,7 +3881,12 @@ export class Binder extends ParseTreeWalker { ['OrderedDict', true], ['Concatenate', true], ['TypeGuard', true], + ['StrictTypeGuard', true], ['Unpack', true], + ['Self', true], + ['NoReturn', true], + ['Never', true], + ['LiteralString', true], ]); const assignedName = assignedNameNode.value; @@ -3694,6 +3907,7 @@ export class Binder extends ParseTreeWalker { this._fileInfo.lines ), moduleName: this._fileInfo.moduleName, + isInExceptSuite: this._isInExceptSuite, }); } return true; diff --git a/packages/pyright-internal/src/analyzer/checker.ts b/packages/pyright-internal/src/analyzer/checker.ts index ece4b1aa2f0a..f3fc6af43c1b 100644 --- a/packages/pyright-internal/src/analyzer/checker.ts +++ b/packages/pyright-internal/src/analyzer/checker.ts @@ -17,6 +17,8 @@ import { DiagnosticLevel } from '../common/configOptions'; import { assert } from '../common/debug'; import { Diagnostic, DiagnosticAddendum } from '../common/diagnostic'; import { DiagnosticRule } from '../common/diagnosticRules'; +import { getFileExtension } from '../common/pathUtils'; +import { PythonVersion, versionToString } from '../common/pythonVersion'; import { TextRange } from '../common/textRange'; import { Localizer } from '../localization/localize'; import { @@ -55,6 +57,7 @@ import { ParameterCategory, ParseNode, ParseNodeType, + PatternClassNode, RaiseNode, ReturnNode, SetNode, @@ -65,6 +68,7 @@ import { StringNode, SuiteNode, TernaryNode, + TryNode, TupleNode, TypeAnnotationNode, UnaryOperationNode, @@ -84,6 +88,7 @@ import { ImportType } from './importResult'; import { getTopLevelImports } from './importStatementUtils'; import * as ParseTreeUtils from './parseTreeUtils'; import { ParseTreeWalker } from './parseTreeWalker'; +import { validateClassPattern } from './patternMatching'; import { ScopeType } from './scope'; import { getScopeForNode } from './scopeUtils'; import { evaluateStaticBoolExpression } from './staticExpressions'; @@ -92,10 +97,10 @@ import * as SymbolNameUtils from './symbolNameUtils'; import { getLastTypedDeclaredForSymbol, isFinalVariable } from './symbolUtils'; import { TypeEvaluator } from './typeEvaluatorTypes'; import { - AnyType, ClassType, combineTypes, FunctionType, + FunctionTypeFlags, isAnyOrUnknown, isClass, isClassInstance, @@ -103,11 +108,13 @@ import { isInstantiableClass, isModule, isNever, - isNone, + isNoneInstance, isOverloadedFunction, isParamSpec, + isPossiblyUnbound, isTypeSame, isTypeVar, + isUnbound, isUnion, isUnknown, isVariadicTypeVar, @@ -122,22 +129,27 @@ import { import { applySolvedTypeVars, CanAssignFlags, + ClassMember, ClassMemberLookupFlags, convertToInstance, derivesFromAnyOrUnknown, derivesFromClassRecursive, doForEachSubtype, + getClassFieldsRecursive, getDeclaredGeneratorReturnType, getGeneratorTypeArgs, + getGeneratorYieldType, + getParameterListDetails, + getProtocolSymbols, + getTypeVarArgumentsRecursive, getTypeVarScopeId, isEllipsisType, isLiteralType, isLiteralTypeOrUnion, - isNoReturnType, - isOpenEndedTupleClass, isPartlyUnknown, isProperty, isTupleClass, + isUnboundedTupleClass, lookUpClassMember, mapSubtypes, partiallySpecializeType, @@ -150,10 +162,56 @@ interface LocalTypeVarInfo { nodes: NameNode[]; } +interface DeprecatedForm { + version: PythonVersion; + fullName: string; + replacementText: string; +} + +const deprecatedAliases = new Map([ + ['Tuple', { version: PythonVersion.V3_9, fullName: 'builtins.tuple', replacementText: 'tuple' }], + ['List', { version: PythonVersion.V3_9, fullName: 'builtins.list', replacementText: 'list' }], + ['Dict', { version: PythonVersion.V3_9, fullName: 'builtins.dict', replacementText: 'dict' }], + ['Set', { version: PythonVersion.V3_9, fullName: 'builtins.set', replacementText: 'set' }], + ['FrozenSet', { version: PythonVersion.V3_9, fullName: 'builtins.frozenset', replacementText: 'frozenset' }], + ['Type', { version: PythonVersion.V3_9, fullName: 'builtins.type', replacementText: 'type' }], + ['Deque', { version: PythonVersion.V3_9, fullName: 'collections.deque', replacementText: 'collections.deque' }], + [ + 'DefaultDict', + { + version: PythonVersion.V3_9, + fullName: 'collections.defaultdict', + replacementText: 'collections.defaultdict', + }, + ], + [ + 'OrderedDict', + { + version: PythonVersion.V3_9, + fullName: 'collections.OrderedDict', + replacementText: 'collections.OrderedDict', + }, + ], + [ + 'Counter', + { version: PythonVersion.V3_9, fullName: 'collections.Counter', replacementText: 'collections.Counter' }, + ], + [ + 'ChainMap', + { version: PythonVersion.V3_9, fullName: 'collections.ChainMap', replacementText: 'collections.ChainMap' }, + ], +]); + +const deprecatedSpecialForms = new Map([ + ['Optional', { version: PythonVersion.V3_10, fullName: 'typing.Optional', replacementText: '| None' }], + ['Union', { version: PythonVersion.V3_10, fullName: 'typing.Union', replacementText: '|' }], +]); + export class Checker extends ParseTreeWalker { private readonly _moduleNode: ModuleNode; private readonly _fileInfo: AnalyzerFileInfo; private readonly _evaluator: TypeEvaluator; + private _isUnboundCheckSuppressed = false; // A list of all nodes that are defined within the module that // have their own scopes. @@ -264,15 +322,29 @@ export class Checker extends ParseTreeWalker { this._validateSlotsClassVarConflict(classTypeResult.classType); } + this._validateMultipleInheritanceCompatibility(classTypeResult.classType, node.name); + + this._validateConstructorConsistency(classTypeResult.classType); + this._validateFinalMemberOverrides(classTypeResult.classType); this._validateInstanceVariableInitialization(classTypeResult.classType); this._validateFinalClassNotAbstract(classTypeResult.classType, node); + this._validateDataClassPostInit(classTypeResult.classType, node); + + this._validateProtocolCompatibility(classTypeResult.classType, node); + + this._reportDuplicateEnumMembers(classTypeResult.classType); + if (ClassType.isTypedDictClass(classTypeResult.classType)) { this._validateTypedDictClassSuite(node.suite); } + + if (ClassType.isEnumClass(classTypeResult.classType)) { + this._validateEnumClassOverride(node, classTypeResult.classType); + } } this._scopedNodes.push(node); @@ -282,14 +354,14 @@ export class Checker extends ParseTreeWalker { override visitFunction(node: FunctionNode): boolean { const functionTypeResult = this._evaluator.getTypeOfFunction(node); - const containingClassNode = ParseTreeUtils.getEnclosingClass(node, true); + const containingClassNode = ParseTreeUtils.getEnclosingClass(node, /* stopAtFunction */ true); if (functionTypeResult) { // Track whether we have seen a *args: P.args parameter. Named // parameters after this need to be flagged as an error. let sawParamSpecArgs = false; - // Report any unknown parameter types. + // Report any unknown or missing parameter types. node.parameters.forEach((param, index) => { if (param.name) { // Determine whether this is a P.args parameter. @@ -319,16 +391,19 @@ export class Checker extends ParseTreeWalker { ); } - // Allow unknown param types if the param is named '_'. + // Allow unknown and missing param types if the param is named '_'. if (param.name && param.name.value !== '_') { - if (index < functionTypeResult.functionType.details.parameters.length) { - const paramType = functionTypeResult.functionType.details.parameters[index].type; + const functionTypeParam = functionTypeResult.functionType.details.parameters.find( + (p) => p.name === param.name?.value + ); + if (functionTypeParam) { + const paramType = functionTypeParam.type; if ( isUnknown(paramType) || (isTypeVar(paramType) && paramType.details.isSynthesized && - !paramType.details.isSynthesizedSelfCls) + !paramType.details.isSynthesizedSelf) ) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnknownParameterType, @@ -352,6 +427,26 @@ export class Checker extends ParseTreeWalker { param.name ); } + + let hasAnnotation = false; + + if (functionTypeResult.functionType.details.parameters[index].typeAnnotation) { + hasAnnotation = true; + } else { + // See if this is a "self" and "cls" parameter. They are exempt from this rule. + if (isTypeVar(paramType) && paramType.details.isSynthesizedSelf) { + hasAnnotation = true; + } + } + + if (!hasAnnotation) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportMissingParameterType, + DiagnosticRule.reportMissingParameterType, + Localizer.Diagnostic.paramAnnotationMissing().format({ name: param.name.value }), + param.name + ); + } } } @@ -370,27 +465,20 @@ export class Checker extends ParseTreeWalker { }); // Check for invalid use of ParamSpec P.args and P.kwargs. - const paramSpecParams = node.parameters.filter((param, index) => { - const paramInfo = functionTypeResult.functionType.details.parameters[index]; - if (paramInfo.typeAnnotation && isTypeVar(paramInfo.type) && isParamSpec(paramInfo.type)) { - if (paramInfo.category !== ParameterCategory.Simple) { - const paramAnnotation = - paramInfo.typeAnnotation.nodeType === ParseNodeType.StringList - ? paramInfo.typeAnnotation.typeAnnotation - : paramInfo.typeAnnotation; - if (paramAnnotation?.nodeType === ParseNodeType.MemberAccess) { - return true; - } + const paramSpecParams = functionTypeResult.functionType.details.parameters.filter((param) => { + if (param.typeAnnotation && isTypeVar(param.type) && isParamSpec(param.type)) { + if (param.category !== ParameterCategory.Simple && param.name && param.type.paramSpecAccess) { + return true; } } return false; }); - if (paramSpecParams.length === 1) { + if (paramSpecParams.length === 1 && paramSpecParams[0].typeAnnotation) { this._evaluator.addError( Localizer.Diagnostic.paramSpecArgsKwargsUsage(), - paramSpecParams[0].typeAnnotation || paramSpecParams[0].typeAnnotationComment! + paramSpecParams[0].typeAnnotation ); } @@ -428,7 +516,7 @@ export class Checker extends ParseTreeWalker { if (functionTypeResult) { const annotationNode = param.typeAnnotation || param.typeAnnotationComment; - if (annotationNode) { + if (annotationNode && index < functionTypeResult.functionType.details.parameters.length) { const paramType = functionTypeResult.functionType.details.parameters[index].type; if ( isTypeVar(paramType) && @@ -471,6 +559,14 @@ export class Checker extends ParseTreeWalker { // Verify common dunder signatures. this._validateDunderSignatures(node, functionTypeResult.functionType, containingClassNode !== undefined); + + // Verify that strict type guard functions don't violate the constraints + // of strict type guards. + this._validateStrictTypeGuardFunction( + node, + functionTypeResult.functionType, + containingClassNode !== undefined + ); } // If we're at the module level within a stub file, report a diagnostic @@ -578,9 +674,11 @@ export class Checker extends ParseTreeWalker { this._fileInfo.diagnosticRuleSet.reportUnusedCoroutine !== 'none' ) { if (node.parent?.nodeType === ParseNodeType.StatementList) { + const isRevealTypeCall = + node.leftExpression.nodeType === ParseNodeType.Name && node.leftExpression.value === 'reveal_type'; const returnType = this._evaluator.getType(node); - if (returnType && this._isTypeValidForUnusedValueTest(returnType)) { + if (!isRevealTypeCall && returnType && this._isTypeValidForUnusedValueTest(returnType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnusedCallResult, DiagnosticRule.reportUnusedCallResult, @@ -687,9 +785,9 @@ export class Checker extends ParseTreeWalker { returnType = NoneType.createInstance(); } - if (this._evaluator.isNodeReachable(node) && enclosingFunctionNode) { + if (this._evaluator.isNodeReachable(node, /* sourceNode */ undefined) && enclosingFunctionNode) { if (declaredReturnType) { - if (isNoReturnType(declaredReturnType)) { + if (isNever(declaredReturnType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -698,16 +796,58 @@ export class Checker extends ParseTreeWalker { ); } else { const diagAddendum = new DiagnosticAddendum(); + let returnTypeMatches = false; if ( - !this._evaluator.canAssignType( + this._evaluator.canAssignType( declaredReturnType, returnType, diagAddendum, - /* typeVarMap */ undefined, + new TypeVarMap(), CanAssignFlags.AllowBoolTypeGuard ) ) { + returnTypeMatches = true; + } else { + // See if the declared return type includes one or more constrained TypeVars. If so, + // try to narrow these TypeVars to a single type. + const uniqueTypeVars = getTypeVarArgumentsRecursive(declaredReturnType); + + if ( + uniqueTypeVars && + uniqueTypeVars.some((typeVar) => typeVar.details.constraints.length > 0) + ) { + const typeVarMap = new TypeVarMap(); + + for (const typeVar of uniqueTypeVars) { + if (typeVar.details.constraints.length > 0) { + const narrowedType = this._evaluator.narrowConstrainedTypeVar(node, typeVar); + if (narrowedType) { + typeVarMap.setTypeVarType(typeVar, narrowedType); + typeVarMap.addSolveForScope(getTypeVarScopeId(typeVar)); + } + } + } + + if (!typeVarMap.isEmpty()) { + const adjustedReturnType = applySolvedTypeVars(declaredReturnType, typeVarMap); + + if ( + this._evaluator.canAssignType( + adjustedReturnType, + returnType, + diagAddendum, + /* typeVarMap */ undefined, + CanAssignFlags.AllowBoolTypeGuard + ) + ) { + returnTypeMatches = true; + } + } + } + } + + if (!returnTypeMatches) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -751,18 +891,26 @@ export class Checker extends ParseTreeWalker { override visitYieldFrom(node: YieldFromNode) { const yieldFromType = this._evaluator.getType(node.expression) || UnknownType.create(); - let yieldType = - this._evaluator.getTypeFromIterable(yieldFromType, /* isAsync */ false, node) || UnknownType.create(); - - // Does the iterator return a Generator? If so, get the yield type from it. - // If the iterator doesn't return a Generator, use the iterator return type - // directly. - const generatorTypeArgs = getGeneratorTypeArgs(yieldType); - if (generatorTypeArgs) { - yieldType = generatorTypeArgs.length >= 1 ? generatorTypeArgs[0] : UnknownType.create(); + let yieldType: Type | undefined; + + if (isClassInstance(yieldFromType) && ClassType.isBuiltIn(yieldFromType, 'Coroutine')) { + // Handle the case of old-style (pre-await) coroutines. + yieldType = UnknownType.create(); } else { yieldType = - this._evaluator.getTypeFromIterator(yieldFromType, /* isAsync */ false, node) || UnknownType.create(); + this._evaluator.getTypeFromIterable(yieldFromType, /* isAsync */ false, node) || UnknownType.create(); + + // Does the iterator return a Generator? If so, get the yield type from it. + // If the iterator doesn't return a Generator, use the iterator return type + // directly. + const generatorTypeArgs = getGeneratorTypeArgs(yieldType); + if (generatorTypeArgs) { + yieldType = generatorTypeArgs.length >= 1 ? generatorTypeArgs[0] : UnknownType.create(); + } else { + yieldType = + this._evaluator.getTypeFromIterator(yieldFromType, /* isAsync */ false, node) || + UnknownType.create(); + } } this._validateYieldType(node, yieldType); @@ -784,7 +932,7 @@ export class Checker extends ParseTreeWalker { doForEachSubtype(exceptionType, (subtype) => { subtype = this._evaluator.makeTopLevelTypeVarsConcrete(subtype); - if (!isAnyOrUnknown(subtype) && !isNone(subtype)) { + if (!isAnyOrUnknown(subtype) && !isNoneInstance(subtype)) { if (isClass(subtype)) { if (!derivesFromClassRecursive(subtype, baseExceptionType, /* ignoreUnknown */ false)) { diagAddendum.addMessage( @@ -840,7 +988,7 @@ export class Checker extends ParseTreeWalker { if (type && isClassInstance(type)) { if (isTupleClass(type) && type.tupleTypeArguments) { if (type.tupleTypeArguments.length > 0) { - if (!isOpenEndedTupleClass(type)) { + if (!isUnboundedTupleClass(type)) { this._evaluator.addDiagnosticForTextRange( this._fileInfo, this._fileInfo.diagnosticRuleSet.reportAssertAlwaysTrue, @@ -881,40 +1029,43 @@ export class Checker extends ParseTreeWalker { // If the index is a literal integer, see if this is a tuple with // a known length and the integer value exceeds the length. const baseType = this._evaluator.getType(node.baseExpression); - if (baseType && isClassInstance(baseType) && baseType.tupleTypeArguments && !isOpenEndedTupleClass(baseType)) { - const tupleLength = baseType.tupleTypeArguments.length; - - if ( - node.items.length === 1 && - !node.trailingComma && - node.items[0].argumentCategory === ArgumentCategory.Simple && - !node.items[0].name - ) { - const subscriptType = this._evaluator.getType(node.items[0].valueExpression); - if ( - subscriptType && - isClassInstance(subscriptType) && - ClassType.isBuiltIn(subscriptType, 'int') && - isLiteralType(subscriptType) - ) { - const subscriptValue = subscriptType.literalValue as number; + if (baseType) { + doForEachSubtype(baseType, (subtype) => { + if (isClassInstance(subtype) && subtype.tupleTypeArguments && !isUnboundedTupleClass(subtype)) { + const tupleLength = subtype.tupleTypeArguments.length; if ( - (subscriptValue >= 0 && subscriptValue >= tupleLength) || - (subscriptValue < 0 && subscriptValue + tupleLength < 0) + node.items.length === 1 && + !node.trailingComma && + node.items[0].argumentCategory === ArgumentCategory.Simple && + !node.items[0].name ) { - this._evaluator.addDiagnostic( - this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.tupleIndexOutOfRange().format({ - length: tupleLength, - index: subscriptValue, - }), - node - ); + const subscriptType = this._evaluator.getType(node.items[0].valueExpression); + if ( + subscriptType && + isClassInstance(subscriptType) && + ClassType.isBuiltIn(subscriptType, 'int') && + isLiteralType(subscriptType) && + typeof subscriptType.literalValue === 'number' + ) { + if ( + (subscriptType.literalValue >= 0 && subscriptType.literalValue >= tupleLength) || + (subscriptType.literalValue < 0 && subscriptType.literalValue + tupleLength < 0) + ) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.tupleIndexOutOfRange().format({ + index: subscriptType.literalValue, + type: this._evaluator.printType(subtype), + }), + node + ); + } + } } } - } + }); } return true; @@ -1011,7 +1162,7 @@ export class Checker extends ParseTreeWalker { this._evaluator.getType(node); } - if (node.strings.length > 1) { + if (node.strings.length > 1 && !node.isParenthesized) { this._evaluator.addDiagnosticForTextRange( this._fileInfo, this._fileInfo.diagnosticRuleSet.reportImplicitStringConcatenation, @@ -1033,31 +1184,55 @@ export class Checker extends ParseTreeWalker { } override visitGlobal(node: GlobalNode): boolean { - node.nameList.forEach((name) => { - this._evaluator.getType(name); + this._suppressUnboundCheck(() => { + node.nameList.forEach((name) => { + this._evaluator.getType(name); + + this.walk(name); + }); }); - return true; + + return false; } override visitNonlocal(node: NonlocalNode): boolean { - node.nameList.forEach((name) => { - this._evaluator.getType(name); + this._suppressUnboundCheck(() => { + node.nameList.forEach((name) => { + this._evaluator.getType(name); + + this.walk(name); + }); }); - return true; + + return false; } override visitName(node: NameNode) { // Determine if we should log information about private usage. this._conditionallyReportPrivateUsage(node); + + // Determine if the name is possibly unbound. + if (!this._isUnboundCheckSuppressed) { + this._reportUnboundName(node); + } + + // Report the use of a deprecated symbol. For now, this functionality + // is disabled. We'll leave it in place for the future. + // this._reportDeprecatedUse(node); + return true; } override visitDel(node: DelNode) { - node.expressions.forEach((expr) => { - this._evaluator.verifyDeleteExpression(expr); + this._suppressUnboundCheck(() => { + node.expressions.forEach((expr) => { + this._evaluator.verifyDeleteExpression(expr); + + this.walk(expr); + }); }); - return true; + return false; } override visitMemberAccess(node: MemberAccessNode) { @@ -1108,6 +1283,7 @@ export class Checker extends ParseTreeWalker { override visitMatch(node: MatchNode): boolean { this._evaluator.getType(node.subjectExpression); + this._validateExhaustiveMatch(node); return true; } @@ -1120,6 +1296,16 @@ export class Checker extends ParseTreeWalker { return true; } + override visitPatternClass(node: PatternClassNode): boolean { + validateClassPattern(this._evaluator, node); + return true; + } + + override visitTry(node: TryNode): boolean { + this._reportUnusedExceptStatements(node); + return true; + } + override visitError(node: ErrorNode) { // Get the type of the child so it's available to // the completion provider. @@ -1131,6 +1317,45 @@ export class Checker extends ParseTreeWalker { return false; } + private _validateExhaustiveMatch(node: MatchNode) { + // This check can be expensive, so skip it if it's disabled. + if (this._fileInfo.diagnosticRuleSet.reportMatchNotExhaustive === 'none') { + return; + } + + const narrowedTypeResult = this._evaluator.evaluateTypeForSubnode(node, () => { + this._evaluator.evaluateTypesForMatchNode(node); + }); + + if (narrowedTypeResult && !isNever(narrowedTypeResult.type)) { + const diagAddendum = new DiagnosticAddendum(); + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.matchIsNotExhaustiveType().format({ + type: this._evaluator.printType(narrowedTypeResult.type), + }) + ); + diagAddendum.addMessage(Localizer.DiagnosticAddendum.matchIsNotExhaustiveHint()); + + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportMatchNotExhaustive, + DiagnosticRule.reportMatchNotExhaustive, + Localizer.Diagnostic.matchIsNotExhaustive() + diagAddendum.getString(), + node.subjectExpression + ); + } + } + + private _suppressUnboundCheck(callback: () => void) { + const wasSuppressed = this._isUnboundCheckSuppressed; + this._isUnboundCheckSuppressed = true; + + try { + callback(); + } finally { + this._isUnboundCheckSuppressed = wasSuppressed; + } + } + private _validateIllegalDefaultParamInitializer(node: ParseNode) { if (this._fileInfo.diagnosticRuleSet.reportCallInDefaultInitializer !== 'none') { if (ParseTreeUtils.isWithinDefaultParamInitializer(node) && !this._fileInfo.isStubFile) { @@ -1160,7 +1385,7 @@ export class Checker extends ParseTreeWalker { let isPossiblyTrue = false; doForEachSubtype(leftType, (leftSubtype) => { - if (this._evaluator.canAssignType(rightType, leftSubtype, new DiagnosticAddendum())) { + if (this._evaluator.canAssignType(rightType, leftSubtype)) { isPossiblyTrue = true; } }); @@ -1237,12 +1462,15 @@ export class Checker extends ParseTreeWalker { return !isTypeSame(leftType, rightType); } - if (isNone(leftType) || isNone(rightType)) { + if (isNoneInstance(leftType) || isNoneInstance(rightType)) { return !isTypeSame(leftType, rightType); } - if (isInstantiableClass(leftType)) { - if (isInstantiableClass(rightType)) { + if (isInstantiableClass(leftType) || (isClassInstance(leftType) && ClassType.isBuiltIn(leftType, 'type'))) { + if ( + isInstantiableClass(rightType) || + (isClassInstance(rightType) && ClassType.isBuiltIn(rightType, 'type')) + ) { const genericLeftType = ClassType.cloneForSpecialization( leftType, /* typeArguments */ undefined, @@ -1255,8 +1483,8 @@ export class Checker extends ParseTreeWalker { ); if ( - this._evaluator.canAssignType(genericLeftType, genericRightType, new DiagnosticAddendum()) || - this._evaluator.canAssignType(genericRightType, genericLeftType, new DiagnosticAddendum()) + this._evaluator.canAssignType(genericLeftType, genericRightType) || + this._evaluator.canAssignType(genericRightType, genericLeftType) ) { return true; } @@ -1287,8 +1515,8 @@ export class Checker extends ParseTreeWalker { ); if ( - this._evaluator.canAssignType(genericLeftType, genericRightType, new DiagnosticAddendum()) || - this._evaluator.canAssignType(genericRightType, genericLeftType, new DiagnosticAddendum()) + this._evaluator.canAssignType(genericLeftType, genericRightType) || + this._evaluator.canAssignType(genericRightType, genericLeftType) ) { return true; } @@ -1314,7 +1542,7 @@ export class Checker extends ParseTreeWalker { // Determines whether the specified type is one that should trigger // an "unused" value diagnostic. private _isTypeValidForUnusedValueTest(type: Type) { - return !isNone(type) && !isNoReturnType(type) && !isNever(type) && !isAnyOrUnknown(type); + return !isNoneInstance(type) && !isNever(type) && !isAnyOrUnknown(type); } // Verifies that each local type variable is used more than once. @@ -1325,6 +1553,7 @@ export class Checker extends ParseTreeWalker { } const localTypeVarUsage = new Map(); + let exemptBoundTypeVar = true; const nameWalker = new ParseTreeUtils.NameNodeWalker((nameNode, subscriptIndex, baseExpression) => { const nameType = this._evaluator.getType(nameNode); @@ -1336,7 +1565,9 @@ export class Checker extends ParseTreeWalker { // instances in these particular cases. let isExempt = nameType.details.constraints.length > 0 || - (nameType.details.boundType !== undefined && subscriptIndex !== undefined) || + (exemptBoundTypeVar && + nameType.details.boundType !== undefined && + subscriptIndex !== undefined) || isParamSpec(nameType); if (!isExempt && baseExpression && subscriptIndex !== undefined) { @@ -1374,6 +1605,10 @@ export class Checker extends ParseTreeWalker { }); if (node.returnTypeAnnotation) { + // Don't exempt the use of a bound TypeVar when used as a type argument + // within a return type. This exemption applies only to input parameter + // annotations. + exemptBoundTypeVar = false; nameWalker.walk(node.returnTypeAnnotation); } @@ -1434,7 +1669,7 @@ export class Checker extends ParseTreeWalker { !this._evaluator.canAssignType( returnType, prevReturnType, - new DiagnosticAddendum(), + /* diag */ undefined, new TypeVarMap(), CanAssignFlags.SkipSolveTypeVars ) @@ -1490,18 +1725,18 @@ export class Checker extends ParseTreeWalker { return this._evaluator.canAssignType( functionType, prevOverload, - new DiagnosticAddendum(), - /* typeVarMap */ undefined, + /* diag */ undefined, + /* typeVarMap */ new TypeVarMap(getTypeVarScopeId(functionType)), CanAssignFlags.SkipSolveTypeVars | CanAssignFlags.SkipFunctionReturnTypeCheck | - CanAssignFlags.DisallowAssignFromAny + CanAssignFlags.OverloadOverlapCheck ); } private _isLegalOverloadImplementation( overload: FunctionType, implementation: FunctionType, - diag: DiagnosticAddendum + diag: DiagnosticAddendum | undefined ): boolean { const typeVarMap = new TypeVarMap(getTypeVarScopeId(implementation)); @@ -1511,12 +1746,14 @@ export class Checker extends ParseTreeWalker { implementation, diag, typeVarMap, - CanAssignFlags.SkipFunctionReturnTypeCheck | CanAssignFlags.ReverseTypeVarMatching + CanAssignFlags.SkipFunctionReturnTypeCheck | + CanAssignFlags.ReverseTypeVarMatching | + CanAssignFlags.SkipSelfClsTypeCheck ); // Now check the return types. const overloadReturnType = - overload.details.declaredReturnType || this._evaluator.getFunctionInferredReturnType(overload); + overload.details.declaredReturnType ?? this._evaluator.getFunctionInferredReturnType(overload); const implementationReturnType = applySolvedTypeVars( implementation.details.declaredReturnType || this._evaluator.getFunctionInferredReturnType(implementation), typeVarMap @@ -1524,6 +1761,7 @@ export class Checker extends ParseTreeWalker { const returnDiag = new DiagnosticAddendum(); if ( + !isNever(overloadReturnType) && !this._evaluator.canAssignType( implementationReturnType, overloadReturnType, @@ -1538,7 +1776,9 @@ export class Checker extends ParseTreeWalker { destType: this._evaluator.printType(implementationReturnType, /* expandTypeAlias */ false), }) ); - diag.addAddendum(returnDiag); + if (diag) { + diag.addAddendum(returnDiag); + } isLegal = false; } @@ -1547,12 +1787,13 @@ export class Checker extends ParseTreeWalker { private _walkStatementsAndReportUnreachable(statements: StatementNode[]) { let reportedUnreachable = false; + let prevStatement: StatementNode | undefined; for (const statement of statements) { // No need to report unreachable more than once since the first time // covers all remaining statements in the statement list. if (!reportedUnreachable) { - if (!this._evaluator.isNodeReachable(statement)) { + if (!this._evaluator.isNodeReachable(statement, prevStatement)) { // Create a text range that covers the next statement through // the end of the statement list. const start = statement.start; @@ -1569,6 +1810,8 @@ export class Checker extends ParseTreeWalker { } this.walk(statement); + + prevStatement = statement; } } @@ -1900,7 +2143,28 @@ export class Checker extends ParseTreeWalker { if (!sawAssignment && !this._fileInfo.isStubFile) { const firstDecl = decls.find((decl) => decl.type === DeclarationType.Variable && decl.isFinal); if (firstDecl) { - this._evaluator.addError(Localizer.Diagnostic.finalUnassigned().format({ name }), firstDecl.node); + // Is this an instance variable declared within a dataclass? If so, it + // is implicitly initialized by the synthesized `__init__` method and + // therefore has an implied assignment. + let isImplicitlyAssigned = false; + + if (symbol.isClassMember() && !symbol.isClassVar()) { + const containingClass = ParseTreeUtils.getEnclosingClass(firstDecl.node, /* stopAtFunction */ true); + if (containingClass) { + const classType = this._evaluator.getTypeOfClass(containingClass); + if ( + classType && + isClass(classType.decoratedType) && + ClassType.isDataClass(classType.decoratedType) + ) { + isImplicitlyAssigned = true; + } + } + } + + if (!isImplicitlyAssigned) { + this._evaluator.addError(Localizer.Diagnostic.finalUnassigned().format({ name }), firstDecl.node); + } } } } @@ -1953,20 +2217,21 @@ export class Checker extends ParseTreeWalker { return true; } + const decoratedType = primaryDeclTypeInfo + ? this._evaluator.makeTopLevelTypeVarsConcrete(primaryDeclTypeInfo.decoratedType) + : undefined; + // We need to handle properties in a careful manner because of // the way that setters and deleters are often defined using multiple // methods with the same name. if ( - primaryDeclTypeInfo && - isClassInstance(primaryDeclTypeInfo.decoratedType) && - ClassType.isPropertyClass(primaryDeclTypeInfo.decoratedType) && + decoratedType && + isClassInstance(decoratedType) && + ClassType.isPropertyClass(decoratedType) && isClassInstance(funcTypeInfo.decoratedType) && ClassType.isPropertyClass(funcTypeInfo.decoratedType) ) { - return ( - funcTypeInfo.decoratedType.details.typeSourceId !== - primaryDeclTypeInfo!.decoratedType.details.typeSourceId - ); + return funcTypeInfo.decoratedType.details.typeSourceId !== decoratedType.details.typeSourceId; } return !FunctionType.isOverloaded(funcTypeInfo.functionType); @@ -2026,15 +2291,43 @@ export class Checker extends ParseTreeWalker { ); addPrimaryDeclInfo(diag); } else if (otherDecl.type === DeclarationType.Function) { - const diag = this._evaluator.addDiagnostic( - this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - otherDecl.isMethod - ? Localizer.Diagnostic.obscuredMethodDeclaration().format({ name }) - : Localizer.Diagnostic.obscuredFunctionDeclaration().format({ name }), - otherDecl.node.name - ); - addPrimaryDeclInfo(diag); + const primaryType = this._evaluator.getTypeForDeclaration(primaryDecl); + + // If the return type has not yet been inferred, do so now. + if (primaryType && isFunction(primaryType)) { + this._evaluator.getFunctionInferredReturnType(primaryType); + } + + let duplicateIsOk = false; + const otherType = this._evaluator.getTypeForDeclaration(otherDecl); + + const suite1 = ParseTreeUtils.getEnclosingSuite(primaryDecl.node); + const suite2 = ParseTreeUtils.getEnclosingSuite(otherDecl.node); + + const isInSameStatementList = suite1 === suite2; + + // If the return type has not yet been inferred, do so now. + if (otherType && isFunction(otherType)) { + this._evaluator.getFunctionInferredReturnType(otherType); + } + + // If both declarations are functions, it's OK if they + // both have the same signatures. + if (primaryType && otherType && isTypeSame(primaryType, otherType)) { + duplicateIsOk = true; + } + + if (!duplicateIsOk || isInSameStatementList) { + const diag = this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + otherDecl.isMethod + ? Localizer.Diagnostic.obscuredMethodDeclaration().format({ name }) + : Localizer.Diagnostic.obscuredFunctionDeclaration().format({ name }), + otherDecl.node.name + ); + addPrimaryDeclInfo(diag); + } } else if (otherDecl.type === DeclarationType.Parameter) { if (otherDecl.node.name) { const diag = this._evaluator.addDiagnostic( @@ -2052,13 +2345,10 @@ export class Checker extends ParseTreeWalker { if (otherDecl.node.nodeType === ParseNodeType.Name) { let duplicateIsOk = false; - // If both declarations are variables, it's OK if they - // both have the same declared type. - if (primaryDecl.type === DeclarationType.Variable) { - const otherType = this._evaluator.getTypeForDeclaration(otherDecl); - if (primaryType && otherType && isTypeSame(primaryType, otherType)) { - duplicateIsOk = true; - } + // It's OK if they both have the same declared type. + const otherType = this._evaluator.getTypeForDeclaration(otherDecl); + if (primaryType && otherType && isTypeSame(primaryType, otherType)) { + duplicateIsOk = true; } if (!duplicateIsOk) { @@ -2185,6 +2475,12 @@ export class Checker extends ParseTreeWalker { if (decl.node.nodeType === ParseNodeType.Name) { nameNode = decl.node; + + // Don't emit a diagnostic if the name starts with an underscore. + // This indicates that the variable is unused. + if (nameNode.value.startsWith('_')) { + diagnosticLevel = 'none'; + } } else if (decl.node.nodeType === ParseNodeType.Parameter) { nameNode = decl.node.name; @@ -2280,7 +2576,7 @@ export class Checker extends ParseTreeWalker { if (isClassInstance(arg1Subtype) && ClassType.isTupleClass(arg1Subtype) && arg1Subtype.tupleTypeArguments) { if ( arg1Subtype.tupleTypeArguments.some( - (typeArg) => !this._isTypeSupportedTypeForIsInstance(typeArg, isInstanceCheck) + (typeArg) => !this._isTypeSupportedTypeForIsInstance(typeArg.type, isInstanceCheck) ) ) { isValidType = false; @@ -2333,28 +2629,47 @@ export class Checker extends ParseTreeWalker { ]; const classTypeList: ClassType[] = []; + let arg1IncludesSubclasses = false; + doForEachSubtype(arg1Type, (arg1Subtype) => { if (isClass(arg1Subtype)) { if (TypeBase.isInstantiable(arg1Subtype)) { - classTypeList.push(arg1Subtype); - if ( - ClassType.isBuiltIn(arg1Subtype) && - nonstandardClassTypes.some((name) => name === arg1Subtype.details.name) - ) { - isValidType = false; + if (arg1Subtype.literalValue === undefined) { + classTypeList.push(arg1Subtype); + if ( + ClassType.isBuiltIn(arg1Subtype) && + nonstandardClassTypes.some((name) => name === arg1Subtype.details.name) + ) { + isValidType = false; + } + + if (arg1Subtype.includeSubclasses) { + arg1IncludesSubclasses = true; + } } } else { // The isinstance and issubclass call supports a variation where the second // parameter is a tuple of classes. - if (isTupleClass(arg1Subtype) && arg1Subtype.tupleTypeArguments) { - arg1Subtype.tupleTypeArguments.forEach((typeArg) => { - if (isInstantiableClass(typeArg)) { - classTypeList.push(typeArg); - } else { - isValidType = false; - } - }); + if (isTupleClass(arg1Subtype)) { + if (arg1Subtype.tupleTypeArguments) { + arg1Subtype.tupleTypeArguments.forEach((typeArg) => { + if (isInstantiableClass(typeArg.type)) { + classTypeList.push(typeArg.type); + + if (typeArg.type.includeSubclasses) { + arg1IncludesSubclasses = true; + } + } else { + isValidType = false; + } + }); + } + } else { + if (arg1Subtype.includeSubclasses) { + arg1IncludesSubclasses = true; + } } + if ( ClassType.isBuiltIn(arg1Subtype) && nonstandardClassTypes.some((name) => name === arg1Subtype.details.name) @@ -2400,13 +2715,13 @@ export class Checker extends ParseTreeWalker { ClassType.isDerivedFrom(varType, filterType) || (isInstanceCheck && ClassType.isProtocolClass(filterType) && - this._evaluator.canAssignType(filterType, varType, new DiagnosticAddendum())) || + this._evaluator.canAssignType(filterType, varType)) || (ClassType.isBuiltIn(filterType, 'dict') && ClassType.isTypedDictClass(varType)); const filterIsSubclass = ClassType.isDerivedFrom(filterType, varType) || (isInstanceCheck && ClassType.isProtocolClass(varType) && - this._evaluator.canAssignType(varType, filterType, new DiagnosticAddendum())); + this._evaluator.canAssignType(varType, filterType)); // Normally, a class should never be both a subclass and a // superclass. However, this can happen if one of the classes @@ -2476,7 +2791,9 @@ export class Checker extends ParseTreeWalker { return combineTypes(objTypeList); }; - if (isTypeSame(filteredType, arg0Type, /* ignorePseudoGeneric */ true)) { + // If arg1IncludesSubclasses is true, it contains a Type[X] class rather than X. A Type[X] + // could be a subclass of X, so the "unnecessary isinstance check" may be legit. + if (!arg1IncludesSubclasses && isTypeSame(filteredType, arg0Type, /* ignorePseudoGeneric */ true)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportUnnecessaryIsInstance, DiagnosticRule.reportUnnecessaryIsInstance, @@ -2562,6 +2879,62 @@ export class Checker extends ParseTreeWalker { return false; } + private _reportDeprecatedUse(node: NameNode) { + const deprecatedForm = deprecatedAliases.get(node.value) ?? deprecatedSpecialForms.get(node.value); + + if (!deprecatedForm) { + return; + } + + const type = this._evaluator.getType(node); + + if (!type) { + return; + } + + if (!isInstantiableClass(type) || type.details.fullName !== deprecatedForm.fullName) { + return; + } + + if (this._fileInfo.executionEnvironment.pythonVersion >= deprecatedForm.version) { + this._evaluator.addDeprecated( + Localizer.Diagnostic.deprecatedType().format({ + version: versionToString(deprecatedForm.version), + replacement: deprecatedForm.replacementText, + }), + node + ); + } + } + + private _reportUnboundName(node: NameNode) { + if (this._fileInfo.diagnosticRuleSet.reportUnboundVariable === 'none') { + return; + } + + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { + const type = this._evaluator.getType(node); + + if (type) { + if (isUnbound(type)) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportUnboundVariable, + DiagnosticRule.reportUnboundVariable, + Localizer.Diagnostic.symbolIsUnbound().format({ name: node.value }), + node + ); + } else if (isPossiblyUnbound(type)) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportUnboundVariable, + DiagnosticRule.reportUnboundVariable, + Localizer.Diagnostic.symbolIsPossiblyUnbound().format({ name: node.value }), + node + ); + } + } + } + } + private _conditionallyReportPrivateUsage(node: NameNode) { if (this._fileInfo.diagnosticRuleSet.reportPrivateUsage === 'none') { return; @@ -2621,7 +2994,7 @@ export class Checker extends ParseTreeWalker { } } - if (primaryDeclaration.node === node) { + if (!primaryDeclaration || primaryDeclaration.node === node) { return; } @@ -2692,6 +3065,33 @@ export class Checker extends ParseTreeWalker { } } + // Validates that an enum class does not attempt to override another + // enum class that has already defined values. + private _validateEnumClassOverride(node: ClassNode, classType: ClassType) { + classType.details.baseClasses.forEach((baseClass, index) => { + if (isClass(baseClass) && ClassType.isEnumClass(baseClass)) { + // Determine whether the base enum class defines an enumerated value. + let baseEnumDefinesValue = false; + + baseClass.details.fields.forEach((symbol) => { + const symbolType = this._evaluator.getEffectiveTypeOfSymbol(symbol); + if (isClassInstance(symbolType) && ClassType.isSameGenericClass(symbolType, baseClass)) { + baseEnumDefinesValue = true; + } + }); + + if (baseEnumDefinesValue) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.enumClassOverride().format({ name: baseClass.details.name }), + node.arguments[index] + ); + } + } + }); + } + // Verifies the rules specified in PEP 589 about TypedDict classes. // They cannot have statements other than type annotations, doc // strings, and "pass" statements or ellipses. @@ -2720,6 +3120,48 @@ export class Checker extends ParseTreeWalker { }); } + private _validateStrictTypeGuardFunction(node: FunctionNode, functionType: FunctionType, isMethod: boolean) { + // Is this a strict type guard function? + if (!functionType.details.declaredReturnType) { + return; + } + + if ( + !isClassInstance(functionType.details.declaredReturnType) || + !ClassType.isBuiltIn(functionType.details.declaredReturnType, 'StrictTypeGuard') || + !functionType.details.declaredReturnType.typeArguments || + functionType.details.declaredReturnType.typeArguments.length < 1 + ) { + return; + } + + const typeGuardType = functionType.details.declaredReturnType.typeArguments[0]; + + // Determine the type of the first parameter. + const paramIndex = isMethod && !FunctionType.isStaticMethod(functionType) ? 1 : 0; + if (paramIndex >= functionType.details.parameters.length) { + return; + } + + const paramType = FunctionType.getEffectiveParameterType(functionType, paramIndex); + + // Verify that the typeGuardType is a narrower type than the paramType. + if (!this._evaluator.canAssignType(paramType, typeGuardType)) { + const returnAnnotation = node.returnTypeAnnotation || node.functionAnnotationComment?.returnTypeAnnotation; + if (returnAnnotation) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.strictTypeGuardReturnType().format({ + type: this._evaluator.printType(paramType), + returnType: this._evaluator.printType(typeGuardType), + }), + returnAnnotation + ); + } + } + } + private _validateDunderSignatures(node: FunctionNode, functionType: FunctionType, isMethod: boolean) { const functionName = functionType.details.name; @@ -2729,7 +3171,7 @@ export class Checker extends ParseTreeWalker { const declaredReturnType = functionType.details.declaredReturnType; if (returnAnnotation && declaredReturnType) { - if (!isNone(declaredReturnType) && !isNoReturnType(declaredReturnType)) { + if (!isNoneInstance(declaredReturnType) && !isNever(declaredReturnType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -2740,8 +3182,8 @@ export class Checker extends ParseTreeWalker { } else { const inferredReturnType = this._evaluator.getFunctionInferredReturnType(functionType); if ( - !isNoReturnType(inferredReturnType) && - !isNone(inferredReturnType) && + !isNever(inferredReturnType) && + !isNoneInstance(inferredReturnType) && !isAnyOrUnknown(inferredReturnType) ) { this._evaluator.addDiagnostic( @@ -2813,11 +3255,11 @@ export class Checker extends ParseTreeWalker { // against the declared type, but we need to verify the implicit None // at the end of the function. if (declaredReturnType && !functionNeverReturns && implicitlyReturnsNone) { - if (isNoReturnType(declaredReturnType)) { + if (isNever(declaredReturnType)) { // If the function consists entirely of "...", assume that it's // an abstract method or a protocol method and don't require that - // the return type matches. - if (!ParseTreeUtils.isSuiteEmpty(node.suite)) { + // the return type matches. This check can also be skipped for an overload. + if (!ParseTreeUtils.isSuiteEmpty(node.suite) && !FunctionType.isOverloaded(functionType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -2834,8 +3276,8 @@ export class Checker extends ParseTreeWalker { if (!this._evaluator.canAssignType(declaredReturnType, NoneType.createInstance(), diagAddendum)) { // If the function consists entirely of "...", assume that it's // an abstract method or a protocol method and don't require that - // the return type matches. - if (!ParseTreeUtils.isSuiteEmpty(node.suite)) { + // the return type matches. This check can also be skipped for an overload. + if (!ParseTreeUtils.isSuiteEmpty(node.suite) && !FunctionType.isOverloaded(functionType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -2896,19 +3338,226 @@ export class Checker extends ParseTreeWalker { }); } - // If a class is marked final, it must implement all abstract methods, - // otherwise it is of no use. - private _validateFinalClassNotAbstract(classType: ClassType, errorNode: ClassNode) { - if (!ClassType.isFinal(classType)) { + private _reportDuplicateEnumMembers(classType: ClassType) { + if (!ClassType.isEnumClass(classType) || ClassType.isBuiltIn(classType)) { return; } - if (!ClassType.supportsAbstractMethods(classType)) { + classType.details.fields.forEach((symbol, name) => { + // Enum members don't have type annotations. + if (symbol.getTypedDeclarations().length > 0) { + return; + } + + const decls = symbol.getDeclarations(); + if (decls.length >= 2 && decls[0].type === DeclarationType.Variable) { + const symbolType = this._evaluator.getEffectiveTypeOfSymbol(symbol); + + // Is this symbol a literal instance of the enum class? + if ( + isClassInstance(symbolType) && + ClassType.isSameGenericClass(symbolType, classType) && + symbolType.literalValue !== undefined + ) { + this._evaluator.addError( + Localizer.Diagnostic.duplicateEnumMember().format({ name }), + decls[1].node + ); + } + } + }); + } + + // If a non-protocol class explicitly inherits from a protocol class, this method + // verifies that any class or instance variables declared but not assigned + // in the protocol class are implemented in the subclass. It also checks that any + // empty functions declared in the protocol are implemented in the subclass. + private _validateProtocolCompatibility(classType: ClassType, errorNode: ClassNode) { + if (ClassType.isProtocolClass(classType)) { return; } - const abstractMethods = this._evaluator.getAbstractMethods(classType); - if (abstractMethods.length === 0) { + const diagAddendum = new DiagnosticAddendum(); + + const isSymbolImplemented = (name: string) => { + return classType.details.mro.some((mroClass) => { + return isClass(mroClass) && !ClassType.isProtocolClass(mroClass) && mroClass.details.fields.has(name); + }); + }; + + classType.details.baseClasses.forEach((baseClass) => { + if (!isClass(baseClass) || !ClassType.isProtocolClass(baseClass)) { + return; + } + + const protocolSymbols = getProtocolSymbols(baseClass); + + protocolSymbols.forEach((member, name) => { + const decls = member.symbol.getDeclarations(); + + if (decls.length === 0 || !isClass(member.classType)) { + return; + } + + if (decls[0].type === DeclarationType.Variable) { + // If none of the declarations involve assignments, assume it's + // not implemented in the protocol. + if (!decls.some((decl) => decl.type === DeclarationType.Variable && !!decl.inferredTypeSource)) { + // This is a variable declaration that is not implemented in the + // protocol base class. Make sure it's implemented in the derived class. + if (!isSymbolImplemented(name)) { + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.missingProtocolMember().format({ + name, + classType: member.classType.details.name, + }) + ); + } + } + } else if (decls[0].type === DeclarationType.Function) { + if (ParseTreeUtils.isSuiteEmpty(decls[0].node.suite) && decls[0]) { + if (getFileExtension(decls[0].path).toLowerCase() !== '.pyi') { + if (!isSymbolImplemented(name)) { + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.missingProtocolMember().format({ + name, + classType: member.classType.details.name, + }) + ); + } + } + } + } + }); + }); + + if (!diagAddendum.isEmpty()) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.missingProtocolMembers() + diagAddendum.getString(), + errorNode.name + ); + } + } + + // If a class is a dataclass with a `__post_init__` method, verify that its + // signature is correct. + private _validateDataClassPostInit(classType: ClassType, errorNode: ClassNode) { + if (!ClassType.isDataClass(classType)) { + return; + } + + const postInitMember = lookUpClassMember( + classType, + '__post_init__', + ClassMemberLookupFlags.SkipBaseClasses | ClassMemberLookupFlags.DeclaredTypesOnly + ); + + // If there's no __post_init__ method, there's nothing to check. + if (!postInitMember) { + return; + } + + // Collect the list of init-only variables in the order they were declared. + const initOnlySymbolMap = new Map(); + classType.details.fields.forEach((symbol, name) => { + if (symbol.isInitVar()) { + initOnlySymbolMap.set(name, symbol); + } + }); + + const postInitType = this._evaluator.getTypeOfMember(postInitMember); + if ( + !isFunction(postInitType) || + !FunctionType.isInstanceMethod(postInitType) || + !postInitType.details.declaration + ) { + return; + } + + const paramListDetails = getParameterListDetails(postInitType); + // If there is an *args or **kwargs parameter or a keyword-only separator, + // don't bother checking. + if ( + paramListDetails.argsIndex !== undefined || + paramListDetails.kwargsIndex !== undefined || + paramListDetails.firstKeywordOnlyIndex !== undefined + ) { + return; + } + + // Verify that the parameter count matches. + const nonDefaultParams = paramListDetails.params.filter((paramInfo) => !paramInfo.param.hasDefault); + + // We expect to see one param for "self" plus one for each of the InitVars. + const expectedParamCount = initOnlySymbolMap.size + 1; + + if (expectedParamCount < nonDefaultParams.length || expectedParamCount > paramListDetails.params.length) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.dataClassPostInitParamCount().format({ expected: initOnlySymbolMap.size }), + postInitType.details.declaration.node.name + ); + } + + // Verify that the parameter types match. + let paramIndex = 1; + + initOnlySymbolMap.forEach((symbol, fieldName) => { + if (paramIndex >= paramListDetails.params.length) { + return; + } + + const param = paramListDetails.params[paramIndex].param; + + if (param.hasDeclaredType && param.typeAnnotation) { + const fieldType = this._evaluator.getDeclaredTypeOfSymbol(symbol); + const paramType = FunctionType.getEffectiveParameterType( + postInitType, + paramListDetails.params[paramIndex].index + ); + const canAssignDiag = new DiagnosticAddendum(); + + if (fieldType && !this._evaluator.canAssignType(paramType, fieldType, canAssignDiag)) { + const diagnostic = this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.dataClassPostInitType().format({ fieldName }) + canAssignDiag.getString(), + param.typeAnnotation + ); + + if (diagnostic) { + const fieldDecls = symbol.getTypedDeclarations(); + if (fieldDecls.length > 0) { + diagnostic.addRelatedInfo( + Localizer.DiagnosticAddendum.dataClassFieldLocation(), + fieldDecls[0].path, + fieldDecls[0].range + ); + } + } + } + } + + paramIndex++; + }); + } + + // If a class is marked final, it must implement all abstract methods, + // otherwise it is of no use. + private _validateFinalClassNotAbstract(classType: ClassType, errorNode: ClassNode) { + if (!ClassType.isFinal(classType)) { + return; + } + + if (!ClassType.supportsAbstractMethods(classType)) { + return; + } + + const abstractMethods = this._evaluator.getAbstractMethods(classType); + if (abstractMethods.length === 0) { return; } @@ -3002,7 +3651,7 @@ export class Checker extends ParseTreeWalker { // the proper variance (invariant, covariant, contravariant). See PEP 544 // for an explanation for why this is important to enforce. private _validateProtocolTypeParamVariance(errorNode: ClassNode, classType: ClassType) { - const origTypeParams = classType.details.typeParameters; + const origTypeParams = classType.details.typeParameters.filter((typeParam) => !isParamSpec(typeParam)); // If this isn't a generic protocol, there's nothing to do here. if (origTypeParams.length === 0) { @@ -3015,24 +3664,42 @@ export class Checker extends ParseTreeWalker { } // Replace all of the type parameters with invariant TypeVars. - const updatedTypeParams = origTypeParams - .filter((typeParam) => !isParamSpec(typeParam) && !isVariadicTypeVar(typeParam)) - .map((typeParam) => TypeVarType.cloneAsInvariant(typeParam)); + const updatedTypeParams = origTypeParams.map((typeParam) => + isVariadicTypeVar(typeParam) ? typeParam : TypeVarType.cloneAsInvariant(typeParam) + ); const updatedClassType = ClassType.cloneWithNewTypeParameters(classType, updatedTypeParams); const objectObject = ClassType.cloneAsInstance(objectType); + const dummyTypeObject = ClassType.createInstantiable( + '__protocolVarianceDummy', + '', + '', + '', + 0, + 0, + undefined, + undefined + ); updatedTypeParams.forEach((param, paramIndex) => { - // Replace all type arguments with Any except for the + // Skip variadics. + if (param.details.isVariadic) { + return; + } + + // Replace all type arguments with a dummy type except for the // TypeVar of interest, which is replaced with an object instance. - const srcTypeArgs = updatedTypeParams.map((_, i) => { - return i === paramIndex ? objectObject : AnyType.create(); + const srcTypeArgs = updatedTypeParams.map((p, i) => { + if (p.details.isVariadic) { + return p; + } + return i === paramIndex ? objectObject : dummyTypeObject; }); - // Replace all type arguments with Any except for the + // Replace all type arguments with a dummy type except for the // TypeVar of interest, which is replaced with itself. const destTypeArgs = updatedTypeParams.map((p, i) => { - return i === paramIndex ? p : AnyType.create(); + return i === paramIndex || p.details.isVariadic ? p : dummyTypeObject; }); const srcType = ClassType.cloneForSpecialization( @@ -3124,6 +3791,338 @@ export class Checker extends ParseTreeWalker { }); } + // Validates that the __init__ and __new__ method signatures are consistent. + private _validateConstructorConsistency(classType: ClassType) { + const initMember = lookUpClassMember( + classType, + '__init__', + ClassMemberLookupFlags.SkipObjectBaseClass | ClassMemberLookupFlags.SkipInstanceVariables + ); + const newMember = lookUpClassMember( + classType, + '__new__', + ClassMemberLookupFlags.SkipObjectBaseClass | ClassMemberLookupFlags.SkipInstanceVariables + ); + + if (!initMember || !newMember || !isClass(initMember.classType) || !isClass(newMember.classType)) { + return; + } + + // If both the __new__ and __init__ come from subclasses, don't bother + // checking for this class. + if ( + !ClassType.isSameGenericClass(newMember.classType, classType) && + !ClassType.isSameGenericClass(initMember.classType, classType) + ) { + return; + } + + // If the class that provides the __new__ method has a custom metaclass with a + // __call__ method, skip this check. + const metaclass = newMember.classType.details.effectiveMetaclass; + if (metaclass && isClass(metaclass) && !ClassType.isBuiltIn(metaclass, 'type')) { + const callMethod = lookUpClassMember( + metaclass, + '__call__', + ClassMemberLookupFlags.SkipTypeBaseClass | ClassMemberLookupFlags.SkipInstanceVariables + ); + if (callMethod) { + return; + } + } + + let newMemberType: Type | undefined = this._evaluator.getTypeOfMember(newMember); + if (!isFunction(newMemberType) && !isOverloadedFunction(newMemberType)) { + return; + } + newMemberType = this._evaluator.bindFunctionToClassOrObject( + classType, + newMemberType, + /* memberClass */ undefined, + /* errorNode */ undefined, + /* recursionCount */ undefined, + /* treatConstructorAsClassMember */ true + ); + if (!newMemberType) { + return; + } + + if (isOverloadedFunction(newMemberType)) { + // Find the implementation, not the overloaded signatures. + newMemberType = newMemberType.overloads.find((func) => !FunctionType.isOverloaded(func)); + + if (!newMemberType) { + return; + } + } + + let initMemberType: Type | undefined = this._evaluator.getTypeOfMember(initMember); + if (!isFunction(initMemberType) && !isOverloadedFunction(initMemberType)) { + return; + } + initMemberType = this._evaluator.bindFunctionToClassOrObject( + ClassType.cloneAsInstance(classType), + initMemberType + ); + + if (!initMemberType) { + return; + } + + if (isOverloadedFunction(initMemberType)) { + // Find the implementation, not the overloaded signatures. + initMemberType = initMemberType.overloads.find((func) => !FunctionType.isOverloaded(func)); + + if (!initMemberType) { + return; + } + } + + if (!isFunction(initMemberType) || !isFunction(newMemberType)) { + return; + } + + // If either of the functions has a default parameter signature + // (* args: Any, ** kwargs: Any), don't proceed with the check. + if (FunctionType.hasDefaultParameters(initMemberType) || FunctionType.hasDefaultParameters(newMemberType)) { + return; + } + + // We'll set the "SkipArgsKwargs" flag for pragmatic reasons since __new__ + // often has an *args and/or **kwargs. We'll also set the ParamSpecValue + // because we don't care about the return type for this check. + initMemberType = FunctionType.cloneWithNewFlags( + initMemberType, + initMemberType.details.flags | + FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck | + FunctionTypeFlags.ParamSpecValue + ); + newMemberType = FunctionType.cloneWithNewFlags( + newMemberType, + initMemberType.details.flags | + FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck | + FunctionTypeFlags.ParamSpecValue + ); + + if ( + !this._evaluator.canAssignType( + newMemberType, + initMemberType, + /* diag */ undefined, + /* typeVarMap */ undefined, + CanAssignFlags.SkipFunctionReturnTypeCheck + ) || + !this._evaluator.canAssignType( + initMemberType, + newMemberType, + /* diag */ undefined, + /* typeVarMap */ undefined, + CanAssignFlags.SkipFunctionReturnTypeCheck + ) + ) { + const displayOnInit = ClassType.isSameGenericClass(initMember.classType, classType); + const initDecl = getLastTypedDeclaredForSymbol(initMember.symbol); + const newDecl = getLastTypedDeclaredForSymbol(newMember.symbol); + + if (initDecl && newDecl) { + const mainDecl = displayOnInit ? initDecl : newDecl; + const mainDeclNode = + mainDecl.node.nodeType === ParseNodeType.Function ? mainDecl.node.name : mainDecl.node; + + const diagAddendum = new DiagnosticAddendum(); + const initSignature = this._evaluator.printType(initMemberType); + const newSignature = this._evaluator.printType(newMemberType); + + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.initMethodSignature().format({ + type: initSignature, + }) + ); + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.newMethodSignature().format({ + type: newSignature, + }) + ); + + const diagnostic = this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportInconsistentConstructor, + DiagnosticRule.reportInconsistentConstructor, + Localizer.Diagnostic.constructorParametersMismatch().format({ + classType: this._evaluator.printType( + ClassType.cloneAsInstance(displayOnInit ? initMember.classType : newMember.classType) + ), + }) + diagAddendum.getString(), + mainDeclNode + ); + + if (diagnostic) { + const secondaryDecl = displayOnInit ? newDecl : initDecl; + + diagnostic.addRelatedInfo( + (displayOnInit + ? Localizer.DiagnosticAddendum.newMethodLocation() + : Localizer.DiagnosticAddendum.initMethodLocation() + ).format({ + type: this._evaluator.printType( + ClassType.cloneAsInstance(displayOnInit ? newMember.classType : initMember.classType) + ), + }), + secondaryDecl.path, + secondaryDecl.range + ); + } + } + } + } + + // Validates that any methods in multiple base classes are compatible with each other. + private _validateMultipleInheritanceCompatibility(classType: ClassType, errorNode: ParseNode) { + // Skip this check if reportIncompatibleMethodOverride is disabled because it's + // a relatively expensive check. + if (this._fileInfo.diagnosticRuleSet.reportIncompatibleMethodOverride === 'none') { + return; + } + + const baseClasses: ClassType[] = []; + + // Filter any unknown base classes. Also remove Generic and Protocol + // base classes. + classType.details.baseClasses.forEach((baseClass) => { + if ( + isClass(baseClass) && + !ClassType.isBuiltIn(baseClass, 'Generic') && + !ClassType.isBuiltIn(baseClass, 'Protocol') + ) { + baseClasses.push(baseClass); + } + }); + + // If there is only one base class, there's nothing to do. + if (baseClasses.length < 2) { + return; + } + + // Build maps of symbols for each of the base classes. + const symbolMaps = baseClasses.map((baseClass) => { + const specializedBaseClass = classType.details.mro.find( + (c) => isClass(c) && ClassType.isSameGenericClass(c, baseClass) + ); + if (!specializedBaseClass || !isClass(specializedBaseClass)) { + return new Map(); + } + + // Retrieve all of the specialized symbols from the base class and its ancestors. + return getClassFieldsRecursive(specializedBaseClass); + }); + + for (let symbolMapBaseIndex = 1; symbolMapBaseIndex < symbolMaps.length; symbolMapBaseIndex++) { + const baseSymbolMap = symbolMaps[symbolMapBaseIndex]; + + for (const [name, baseClassAndSymbol] of baseSymbolMap) { + // Special-case dundered methods, which can differ in signature. Also + // exempt private symbols. + if (SymbolNameUtils.isDunderName(name) || SymbolNameUtils.isPrivateName(name)) { + continue; + } + + const baseClassType = baseClassAndSymbol.classType; + if (!isClass(baseClassType)) { + continue; + } + + for ( + let symbolMapOverrideIndex = 0; + symbolMapOverrideIndex < symbolMapBaseIndex; + symbolMapOverrideIndex++ + ) { + const overrideSymbolMap = symbolMaps[symbolMapOverrideIndex]; + const overrideClassAndSymbol = overrideSymbolMap.get(name); + + if (overrideClassAndSymbol) { + let baseType = this._evaluator.getEffectiveTypeOfSymbol(baseClassAndSymbol.symbol); + if (isClass(baseClassAndSymbol.classType)) { + baseType = partiallySpecializeType(baseType, baseClassAndSymbol.classType); + } + let overrideType = this._evaluator.getEffectiveTypeOfSymbol(overrideClassAndSymbol.symbol); + if (isClass(overrideClassAndSymbol.classType)) { + overrideType = partiallySpecializeType(overrideType, overrideClassAndSymbol.classType); + } + + if (isFunction(baseType) || isOverloadedFunction(baseType)) { + const diagAddendum = new DiagnosticAddendum(); + let overrideFunction: FunctionType | undefined; + + if (isFunction(overrideType)) { + overrideFunction = overrideType; + } else if (isOverloadedFunction(overrideType)) { + // Use the last overload. + overrideFunction = overrideType.overloads[overrideType.overloads.length - 1]; + + // If the last overload isn't an implementation, skip the check for this symbol. + if (FunctionType.isOverloaded(overrideFunction)) { + continue; + } + } + + if (overrideFunction) { + if ( + !this._evaluator.canOverrideMethod( + baseType, + overrideFunction, + diagAddendum, + /* enforceParamNameMatch */ true + ) + ) { + const decl = overrideFunction.details.declaration; + if (decl && decl.type === DeclarationType.Function) { + const diag = this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportIncompatibleMethodOverride, + DiagnosticRule.reportIncompatibleMethodOverride, + Localizer.Diagnostic.baseClassMethodTypeIncompatible().format({ + classType: classType.details.name, + name, + }) + diagAddendum.getString(), + errorNode + ); + + const overrideDecl = getLastTypedDeclaredForSymbol( + overrideClassAndSymbol.symbol + ); + const baseDecl = getLastTypedDeclaredForSymbol(baseClassAndSymbol.symbol); + + if (diag && overrideDecl && baseDecl) { + diag.addRelatedInfo( + Localizer.DiagnosticAddendum.baseClassProvidesType().format({ + baseClass: this._evaluator.printType( + convertToInstance(baseClasses[symbolMapOverrideIndex]) + ), + type: this._evaluator.printType(overrideType), + }), + overrideDecl.path, + overrideDecl.range + ); + + diag.addRelatedInfo( + Localizer.DiagnosticAddendum.baseClassProvidesType().format({ + baseClass: this._evaluator.printType( + convertToInstance(baseClasses[symbolMapBaseIndex]) + ), + type: this._evaluator.printType(baseType), + }), + baseDecl.path, + baseDecl.range + ); + } + } + } + } + } + } + } + } + } + } + // Validates that any overridden methods or variables contain the same // types as the original method. Also marks the class as abstract if one // or more abstract methods are not overridden. @@ -3211,8 +4210,9 @@ export class Checker extends ParseTreeWalker { enforceParamNameMatch ) ) { - const decl = overrideFunction.details.declaration; - if (decl && decl.type === DeclarationType.Function) { + const decl = + overrideFunction.details.declaration ?? getLastTypedDeclaredForSymbol(symbol); + if (decl) { const diag = this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportIncompatibleMethodOverride, DiagnosticRule.reportIncompatibleMethodOverride, @@ -3220,7 +4220,7 @@ export class Checker extends ParseTreeWalker { name, className: baseClassAndSymbol.classType.details.name, }) + diagAddendum.getString(), - decl.node.name + decl.type === DeclarationType.Function ? decl.node.name : decl.node ); const origDecl = getLastTypedDeclaredForSymbol(baseClassAndSymbol.symbol); @@ -3432,13 +4432,21 @@ export class Checker extends ParseTreeWalker { const isBaseClassVar = baseClassAndSymbol.symbol.isClassVar(); let isClassVar = symbol.isClassVar(); - // If the subclass doesn't redeclare the type but simply assigns - // it without declaring its type, we won't consider it an instance - // variable. if (isBaseClassVar && !isClassVar) { + // If the subclass doesn't redeclare the type but simply assigns + // it without declaring its type, we won't consider it an instance + // variable. if (!symbol.hasTypedDeclarations()) { isClassVar = true; } + + // If the subclass is declaring an inner class, we'll consider that + // to be a ClassVar. + if ( + symbol.getTypedDeclarations().every((decl) => decl.type === DeclarationType.Class) + ) { + isClassVar = true; + } } if (isBaseClassVar !== isClassVar) { @@ -3478,6 +4486,19 @@ export class Checker extends ParseTreeWalker { const classTypeInfo = this._evaluator.getTypeOfClass(classNode); const classType = classTypeInfo?.classType; + if (node.name && classType) { + const superCheckMethods = ['__init__', '__init_subclass__', '__enter__', '__exit__']; + if (superCheckMethods.some((name) => name === node.name.value)) { + if ( + !FunctionType.isAbstractMethod(functionType) && + !FunctionType.isOverloaded(functionType) && + !this._fileInfo.isStubFile + ) { + this._validateSuperCallForMethod(node, functionType, classType); + } + } + } + if (node.name && node.name.value === '__new__') { // __new__ overrides should have a "cls" parameter. if ( @@ -3617,6 +4638,72 @@ export class Checker extends ParseTreeWalker { } } + // Determines whether the method properly calls through to the same method in all + // parent classes that expose a same-named method. + private _validateSuperCallForMethod(node: FunctionNode, methodType: FunctionType, classType: ClassType) { + // This is an expensive test, so if it's not enabled, don't do any work. + if (this._fileInfo.diagnosticRuleSet.reportMissingSuperCall === 'none') { + return; + } + + // If the class is marked final, we can skip the "object" base class + // because we know that the `__init__` method in `object` doesn't do + // anything. It's not safe to do this if the class isn't final because + // it could be combined with other classes in a multi-inheritance + // situation that effectively adds new superclasses that we don't know + // about statically. + let effectiveFlags = ClassMemberLookupFlags.SkipInstanceVariables | ClassMemberLookupFlags.SkipOriginalClass; + if (ClassType.isFinal(classType)) { + effectiveFlags |= ClassMemberLookupFlags.SkipObjectBaseClass; + } + + const methodMember = lookUpClassMember(classType, methodType.details.name, effectiveFlags); + if (!methodMember) { + return; + } + + let foundCallOfMember = false; + + // Now scan the implementation of the method to determine whether + // super(). has been called for all of the required base classes. + const callNodeWalker = new ParseTreeUtils.CallNodeWalker((node) => { + if (node.leftExpression.nodeType === ParseNodeType.MemberAccess) { + // Is it accessing the method by the same name? + if (node.leftExpression.memberName.value === methodType.details.name) { + const memberBaseExpr = node.leftExpression.leftExpression; + + // Is it a "super" call? + if ( + memberBaseExpr.nodeType === ParseNodeType.Call && + memberBaseExpr.leftExpression.nodeType === ParseNodeType.Name && + memberBaseExpr.leftExpression.value === 'super' + ) { + foundCallOfMember = true; + } else { + // Is it an X. direct call? + const baseType = this._evaluator.getType(memberBaseExpr); + if (baseType && isInstantiableClass(baseType)) { + foundCallOfMember = true; + } + } + } + } + }); + callNodeWalker.walk(node.suite); + + // If we didn't find a call to at least one base class, report the problem. + if (!foundCallOfMember) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportMissingSuperCall, + DiagnosticRule.reportMissingSuperCall, + Localizer.Diagnostic.missingSuperCall().format({ + methodName: methodType.details.name, + }), + node.name + ); + } + } + // Validates that the annotated type of a "self" or "cls" parameter is // compatible with the type of the class that contains it. private _validateClsSelfParameterType(functionType: FunctionType, classType: ClassType, isCls: boolean) { @@ -3639,7 +4726,6 @@ export class Checker extends ParseTreeWalker { const paramType = this._evaluator.makeTopLevelTypeVarsConcrete(paramInfo.type); const expectedType = isCls ? classType : convertToInstance(classType); - const diag = new DiagnosticAddendum(); // If the declared type is a protocol class or instance, skip // the check. This has legitimate uses for mix-in classes. @@ -3656,7 +4742,7 @@ export class Checker extends ParseTreeWalker { return; } - if (!this._evaluator.canAssignType(paramType, expectedType, diag)) { + if (!this._evaluator.canAssignType(paramType, expectedType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -3680,14 +4766,10 @@ export class Checker extends ParseTreeWalker { assert(isFunction(functionTypeResult.functionType)); declaredReturnType = FunctionType.getSpecializedReturnType(functionTypeResult.functionType); if (declaredReturnType) { - declaredYieldType = this._evaluator.getTypeFromIterator( - declaredReturnType, - !!enclosingFunctionNode.isAsync, - /* errorNode */ undefined - ); + declaredYieldType = getGeneratorYieldType(declaredReturnType, !!enclosingFunctionNode.isAsync); } - if (declaredYieldType && !declaredYieldType && enclosingFunctionNode.returnTypeAnnotation) { + if (declaredReturnType && !declaredYieldType && enclosingFunctionNode.returnTypeAnnotation) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -3700,8 +4782,8 @@ export class Checker extends ParseTreeWalker { } } - if (this._evaluator.isNodeReachable(node)) { - if (declaredReturnType && isNoReturnType(declaredReturnType)) { + if (this._evaluator.isNodeReachable(node, /* sourceNode */ undefined)) { + if (declaredReturnType && isNever(declaredReturnType)) { this._evaluator.addDiagnostic( this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -3725,6 +4807,93 @@ export class Checker extends ParseTreeWalker { } } + // Determines whether any of the except statements are unreachable because + // they are redundant. + private _reportUnusedExceptStatements(node: TryNode) { + let sawUnknownOrAny = false; + const exceptionTypesSoFar: ClassType[] = []; + + node.exceptClauses.forEach((except) => { + if (sawUnknownOrAny || except.isExceptGroup || !except.typeExpression) { + return; + } + + const exceptionType = this._evaluator.getType(except.typeExpression); + if (!exceptionType || isAnyOrUnknown(exceptionType)) { + sawUnknownOrAny = true; + return; + } + + const typesForThisExcept: ClassType[] = []; + + if (isInstantiableClass(exceptionType)) { + // If the exception type is a variable whose type could represent + // subclasses, the actual exception type is statically unknown. + if (exceptionType.includeSubclasses) { + sawUnknownOrAny = true; + } + + typesForThisExcept.push(exceptionType); + } else if (isClassInstance(exceptionType)) { + const iterableType = + this._evaluator.getTypeFromIterator( + exceptionType, + /* isAsync */ false, + /* errorNode */ undefined + ) || UnknownType.create(); + + doForEachSubtype(iterableType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + sawUnknownOrAny = true; + } + + if (isInstantiableClass(subtype)) { + // If the exception type is a variable whose type could represent + // subclasses, the actual exception type is statically unknown. + if (subtype.includeSubclasses) { + sawUnknownOrAny = true; + } + typesForThisExcept.push(subtype); + } + }); + } + + if (exceptionTypesSoFar.length > 0) { + const diagAddendum = new DiagnosticAddendum(); + let overriddenExceptionCount = 0; + + typesForThisExcept.forEach((thisExceptType) => { + const subtype = exceptionTypesSoFar.find((previousExceptType) => { + return derivesFromClassRecursive(thisExceptType, previousExceptType, /* ignoreUnknown */ true); + }); + + if (subtype) { + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.unreachableExcept().format({ + exceptionType: this._evaluator.printType(convertToInstance(thisExceptType)), + parentType: this._evaluator.printType(convertToInstance(subtype)), + }) + ); + overriddenExceptionCount++; + } + }); + + // Were all of the exception types overridden? + if (typesForThisExcept.length === overriddenExceptionCount) { + this._evaluator.addDiagnostic( + this._fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.unreachableExcept() + diagAddendum.getString(), + except.typeExpression + ); + this._evaluator.addUnusedCode(except, except.exceptSuite); + } + } + + exceptionTypesSoFar.push(...typesForThisExcept); + }); + } + private _reportDuplicateImports() { const importStatements = getTopLevelImports(this._moduleNode); diff --git a/packages/pyright-internal/src/analyzer/codeFlowEngine.ts b/packages/pyright-internal/src/analyzer/codeFlowEngine.ts new file mode 100644 index 000000000000..71ee12d4f13f --- /dev/null +++ b/packages/pyright-internal/src/analyzer/codeFlowEngine.ts @@ -0,0 +1,1630 @@ +/* + * codeFlowEngine.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that traverses the code flow graph to determine the (narrowed) + * type of a variable or expression or the reachability of a statement. + * + * This is largely based on the code flow engine in the + * TypeScript compiler. + */ + +import { assert, fail } from '../common/debug'; +import { CallNode, ExpressionNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { getImportInfo } from './analyzerNodeInfo'; +import { + CodeFlowReferenceExpressionNode, + createKeyForReference, + createKeysForReferenceSubexpressions, + FlowAssignment, + FlowBranchLabel, + FlowCall, + FlowCondition, + FlowExhaustedMatch, + FlowFlags, + FlowLabel, + FlowNarrowForPattern, + FlowNode, + FlowPostContextManagerLabel, + FlowPostFinally, + FlowPreFinallyGate, + FlowVariableAnnotation, + FlowWildcardImport, +} from './codeFlowTypes'; +import { DeclarationType } from './declaration'; +import { isMatchingExpression, isPartialMatchingExpression } from './parseTreeUtils'; +import { Symbol } from './symbol'; +import { + CachedType, + IncompleteSubtypeInfo, + IncompleteType, + isIncompleteType, + SpeculativeTypeTracker, + TypeCache, +} from './typeCache'; +import { EvaluatorFlags, TypeEvaluator, TypeResult } from './typeEvaluatorTypes'; +import { getTypeNarrowingCallback } from './typeGuards'; +import { + ClassType, + combineTypes, + FunctionType, + isClass, + isClassInstance, + isFunction, + isInstantiableClass, + isModule, + isNever, + isOverloadedFunction, + isTypeSame, + isTypeVar, + ModuleType, + removeUnknownFromUnion, + Type, + TypeVarType, + UnboundType, + UnknownType, +} from './types'; +import { + ClassMemberLookupFlags, + convertToInstance, + doForEachSubtype, + isTypeAliasPlaceholder, + lookUpClassMember, + mapSubtypes, +} from './typeUtils'; + +export interface FlowNodeTypeResult { + type: Type | undefined; + isIncomplete: boolean; + generationCount?: number | undefined; + incompleteType?: Type | undefined; + incompleteSubtypes?: IncompleteSubtypeInfo[] | undefined; + recursiveVisitCount?: number; +} + +export interface CodeFlowAnalyzer { + getTypeFromCodeFlow: ( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + targetSymbolId: number | undefined, + initialType: Type | undefined, + isInitialTypeIncomplete: boolean + ) => FlowNodeTypeResult; +} + +export interface CodeFlowEngine { + createCodeFlowAnalyzer: () => CodeFlowAnalyzer; + isFlowNodeReachable: (flowNode: FlowNode, sourceFlowNode?: FlowNode) => boolean; + narrowConstrainedTypeVar: (flowNode: FlowNode, typeVar: TypeVarType) => Type | undefined; +} + +// Maximum number of times a loop flow node will be evaluated +// with incomplete results before we give up. +const maxFlowNodeLoopVisitCount = 64; + +// Maximum number of times getTypeFromFlowNode can be called +// recursively within loop or branch processing before we give up. +const maxCodeFlowInvocationsPerLoop = 16 * 1024; + +export function getCodeFlowEngine( + evaluator: TypeEvaluator, + speculativeTypeTracker: SpeculativeTypeTracker +): CodeFlowEngine { + const isReachableRecursionMap = new Map(); + const callIsNoReturnCache = new Map(); + const isExceptionContextManagerCache = new Map(); + let codeFlowInvocations = 0; + let flowIncompleteGeneration = 1; + + // Creates a new code flow analyzer that can be used to narrow the types + // of the expressions within an execution context. Each code flow analyzer + // instance maintains a cache of types it has already determined. + function createCodeFlowAnalyzer(): CodeFlowAnalyzer { + const flowNodeTypeCacheSet = new Map(); + + function getTypeFromCodeFlow( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + targetSymbolId: number | undefined, + initialType: Type | undefined, + isInitialTypeIncomplete: boolean + ): FlowNodeTypeResult { + const referenceKey = reference !== undefined ? createKeyForReference(reference) : undefined; + let subexpressionReferenceKeys: string[] | undefined; + const referenceKeyWithSymbolId = + referenceKey !== undefined && targetSymbolId !== undefined + ? referenceKey + `.${targetSymbolId.toString()}` + : '.'; + let flowNodeTypeCache = flowNodeTypeCacheSet.get(referenceKeyWithSymbolId); + if (!flowNodeTypeCache) { + flowNodeTypeCache = new Map(); + flowNodeTypeCacheSet.set(referenceKeyWithSymbolId, flowNodeTypeCache); + } + + // Caches the type of the flow node in our local cache, keyed by the flow node ID. + function setCacheEntry( + flowNode: FlowNode, + type: Type | undefined, + isIncomplete: boolean + ): FlowNodeTypeResult { + if (!isIncomplete) { + flowIncompleteGeneration++; + } else { + const prevEntry = flowNodeTypeCache!.get(flowNode.id); + if (prevEntry === undefined) { + flowIncompleteGeneration++; + } else if (type && (prevEntry as IncompleteType).isIncompleteType) { + const prevIncompleteType = prevEntry as IncompleteType; + if (prevIncompleteType.type && !isTypeSame(prevIncompleteType.type, type)) { + flowIncompleteGeneration++; + } + } + } + + // For speculative or incomplete types, we'll create a separate + // object. For non-speculative and complete types, we'll store + // the type directly. + const entry: CachedType | undefined = isIncomplete + ? { + isIncompleteType: true, + type, + incompleteSubtypes: [], + generationCount: flowIncompleteGeneration, + } + : type; + + flowNodeTypeCache!.set(flowNode.id, entry); + speculativeTypeTracker.trackEntry(flowNodeTypeCache!, flowNode.id); + + return { + type, + isIncomplete, + generationCount: flowIncompleteGeneration, + incompleteSubtypes: isIncomplete ? [] : undefined, + }; + } + + function setIncompleteSubtype( + flowNode: FlowNode, + index: number, + type: Type | undefined, + isIncomplete: boolean, + isPending: boolean + ) { + const cachedEntry = flowNodeTypeCache!.get(flowNode.id); + if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { + fail('setIncompleteSubtype can be called only on a valid incomplete cache entry'); + } + + const incompleteEntries = cachedEntry.incompleteSubtypes; + if (index < incompleteEntries.length) { + const oldEntry = incompleteEntries[index]; + if ( + oldEntry.isIncomplete !== isIncomplete || + oldEntry.type === undefined || + type === undefined || + !isTypeSame(oldEntry.type, type) + ) { + incompleteEntries[index] = { type, isIncomplete, isPending }; + flowIncompleteGeneration++; + } else if (oldEntry.isPending !== isPending) { + incompleteEntries[index] = { type, isIncomplete, isPending }; + } + } else { + assert(incompleteEntries.length === index); + incompleteEntries.push({ type, isIncomplete, isPending }); + flowIncompleteGeneration++; + } + + return getCacheEntry(flowNode); + } + + function incrementFlowNodeVisitCount(flowNode: FlowNode) { + const cachedEntry = flowNodeTypeCache!.get(flowNode.id); + if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { + fail('incrementFlowNodeVisitCount can be called only on a valid incomplete cache entry'); + } + + cachedEntry.recursiveVisitCount = (cachedEntry.recursiveVisitCount ?? 0) + 1; + + return cachedEntry.recursiveVisitCount; + } + + function incrementFlowNodeConvergenceCount(flowNode: FlowNode, reset = false) { + const cachedEntry = flowNodeTypeCache!.get(flowNode.id); + if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { + return 0; + } + + if (reset) { + cachedEntry.recursiveConvergenceCount = 0; + } else { + cachedEntry.recursiveConvergenceCount = (cachedEntry.recursiveConvergenceCount ?? 0) + 1; + } + + return cachedEntry.recursiveConvergenceCount; + } + + function getCacheEntry(flowNode: FlowNode): FlowNodeTypeResult | undefined { + if (!flowNodeTypeCache!.has(flowNode.id)) { + return undefined; + } + + const cachedEntry = flowNodeTypeCache!.get(flowNode.id); + if (cachedEntry === undefined) { + return { + type: cachedEntry, + isIncomplete: false, + }; + } + + if (!isIncompleteType(cachedEntry)) { + return { + type: cachedEntry, + isIncomplete: false, + }; + } + + let type = cachedEntry.type; + + if (cachedEntry.incompleteSubtypes.length > 0) { + // Recompute the effective type based on all of the incomplete + // types we've accumulated so far. + const typesToCombine: Type[] = []; + cachedEntry.incompleteSubtypes.forEach((t) => { + if (t.type) { + typesToCombine.push(t.type); + } + }); + type = typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; + } + + return { + type, + isIncomplete: true, + incompleteSubtypes: cachedEntry.incompleteSubtypes, + generationCount: cachedEntry.generationCount, + }; + } + + function deleteCacheEntry(flowNode: FlowNode) { + flowNodeTypeCache!.delete(flowNode.id); + } + + function evaluateAssignmentFlowNode(flowNode: FlowAssignment): TypeResult | undefined { + // For function and class nodes, the reference node is the name + // node, but we need to use the parent node (the FunctionNode or ClassNode) + // to access the decorated type in the type cache. + let nodeForCacheLookup: ParseNode = flowNode.node; + const parentNode = flowNode.node.parent; + if (parentNode) { + if (parentNode.nodeType === ParseNodeType.Function || parentNode.nodeType === ParseNodeType.Class) { + nodeForCacheLookup = parentNode; + } + } + + return evaluator.evaluateTypeForSubnode(nodeForCacheLookup, () => { + evaluator.evaluateTypesForStatement(flowNode.node); + }); + } + + // If this flow has no knowledge of the target expression, it returns undefined. + // If the start flow node for this scope is reachable, the typeAtStart value is + // returned. + function getTypeFromFlowNode( + flowNode: FlowNode, + reference: CodeFlowReferenceExpressionNode | undefined, + targetSymbolId: number | undefined, + initialType: Type | undefined, + isInitialTypeIncomplete: boolean + ): FlowNodeTypeResult { + let curFlowNode = flowNode; + + // Record how many times this function has been called. + const codeFlowInvocationsAtStart = codeFlowInvocations; + codeFlowInvocations++; + + // This is a frequently-called routine, so it's a good place to call + // the cancellation check. If the operation is canceled, an exception + // will be thrown at this point. + evaluator.checkForCancellation(); + + while (true) { + // Have we already been here? If so, use the cached value. + const cachedEntry = getCacheEntry(curFlowNode); + if (cachedEntry) { + if (!cachedEntry.isIncomplete) { + return cachedEntry; + } + + // If the cached entry is incomplete, we can use it only if nothing + // has changed that may cause the previously-reported incomplete type to change. + if (cachedEntry.generationCount === flowIncompleteGeneration) { + return { + type: cachedEntry?.type ? removeUnknownFromUnion(cachedEntry.type) : undefined, + isIncomplete: true, + }; + } + } + + if (curFlowNode.flags & FlowFlags.Unreachable) { + // We can get here if there are nodes in a compound logical expression + // (e.g. "False and x") that are never executed but are evaluated. + // The type doesn't matter in this case. + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + + if (curFlowNode.flags & FlowFlags.VariableAnnotation) { + const varAnnotationNode = curFlowNode as FlowVariableAnnotation; + curFlowNode = varAnnotationNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Call) { + const callFlowNode = curFlowNode as FlowCall; + + // If this function returns a "NoReturn" type, that means + // it always raises an exception or otherwise doesn't return, + // so we can assume that the code before this is unreachable. + if (isCallNoReturn(callFlowNode.node)) { + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + + curFlowNode = callFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Assignment) { + const assignmentFlowNode = curFlowNode as FlowAssignment; + // Are we targeting the same symbol? We need to do this extra check because the same + // symbol name might refer to different symbols in different scopes (e.g. a list + // comprehension introduces a new scope). + if (reference) { + if ( + targetSymbolId === assignmentFlowNode.targetSymbolId && + isMatchingExpression(reference, assignmentFlowNode.node) + ) { + // Is this a special "unbind" assignment? If so, + // we can handle it immediately without any further evaluation. + if (curFlowNode.flags & FlowFlags.Unbind) { + return setCacheEntry(curFlowNode, UnboundType.create(), /* isIncomplete */ false); + } + + // If there was a cache entry already, that means we hit a recursive + // case (something like "int: int = 4"). Avoid infinite recursion + // by returning an undefined type. + if (cachedEntry && cachedEntry.type === undefined) { + return { type: undefined, isIncomplete: true }; + } + + // Set the cache entry to undefined before evaluating the + // expression in case it depends on itself. + setCacheEntry( + curFlowNode, + reference ? undefined : initialType, + /* isIncomplete */ true + ); + let flowTypeResult = evaluateAssignmentFlowNode(assignmentFlowNode); + if (flowTypeResult) { + if (isTypeAliasPlaceholder(flowTypeResult.type)) { + flowTypeResult = undefined; + } else if ( + reference.nodeType === ParseNodeType.MemberAccess && + evaluator.isAsymmetricDescriptorAssignment(assignmentFlowNode.node) + ) { + flowTypeResult = undefined; + } + } + return setCacheEntry(curFlowNode, flowTypeResult?.type, !!flowTypeResult?.isIncomplete); + } else if (isPartialMatchingExpression(reference, assignmentFlowNode.node)) { + // If the node partially matches the reference, we need to "kill" any narrowed + // types further above this point. For example, if we see the sequence + // a.b = 3 + // a = Foo() + // x = a.b + // The type of "a.b" can no longer be assumed to be Literal[3]. + return { + type: initialType, + isIncomplete: isInitialTypeIncomplete, + }; + } + } + + curFlowNode = assignmentFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.BranchLabel) { + const branchFlowNode = curFlowNode as FlowBranchLabel; + if (curFlowNode.flags & FlowFlags.PostContextManager) { + // Determine whether any of the context managers support exception + // suppression. If not, none of its antecedents are reachable. + const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; + const contextManagerSwallowsExceptions = contextMgrNode.expressions.some((expr) => + isExceptionContextManager(expr, contextMgrNode.isAsync) + ); + + if (contextManagerSwallowsExceptions === contextMgrNode.blockIfSwallowsExceptions) { + // Do not explore any further along this code flow path. + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + } + + // Is the current symbol modified in any way within the scope of the branch? + // If not, we can skip all processing within the branch scope. + if (reference && branchFlowNode.preBranchAntecedent && branchFlowNode.affectedExpressions) { + if (!subexpressionReferenceKeys) { + subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); + } + + if ( + !subexpressionReferenceKeys.some((key) => + branchFlowNode.affectedExpressions!.has(key) + ) && + isFlowNodeReachable(curFlowNode, branchFlowNode.preBranchAntecedent) + ) { + curFlowNode = branchFlowNode.preBranchAntecedent; + continue; + } + } + + const labelNode = curFlowNode as FlowLabel; + const typesToCombine: Type[] = []; + + let sawIncomplete = false; + + // Set the cache entry to undefined before evaluating the + // expression in case it depends on itself. + setCacheEntry(curFlowNode, reference ? undefined : initialType, /* isIncomplete */ true); + + labelNode.antecedents.forEach((antecedent) => { + const flowTypeResult = getTypeFromFlowNode( + antecedent, + reference, + targetSymbolId, + initialType, + isInitialTypeIncomplete + ); + + if (flowTypeResult.isIncomplete) { + sawIncomplete = true; + } + + if (flowTypeResult.type) { + typesToCombine.push(flowTypeResult.type); + } + }); + + const effectiveType = + !!reference || typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; + + // Limit the number of recursive calls before we give up and call the type + // complete. This can theoretically result in incorrect type information in + // very complex code flows, but it's preferable to extremely long analysis times. + if (codeFlowInvocations - codeFlowInvocationsAtStart > maxCodeFlowInvocationsPerLoop) { + sawIncomplete = false; + } + + return setCacheEntry(curFlowNode, effectiveType, sawIncomplete); + } + + if (curFlowNode.flags & FlowFlags.LoopLabel) { + const loopNode = curFlowNode as FlowLabel; + + // Is the current symbol modified in any way within the loop? If not, we can skip all + // processing within the loop and assume that the type comes from the first antecedent, + // which feeds the loop. + if (reference) { + if (!subexpressionReferenceKeys) { + subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); + } + + if (!subexpressionReferenceKeys.some((key) => loopNode.affectedExpressions!.has(key))) { + curFlowNode = loopNode.antecedents[0]; + continue; + } + } + + let sawIncomplete = false; + + // See if we've been here before. If so, there will be an incomplete cache entry. + let cacheEntry = getCacheEntry(curFlowNode); + let typeAtStart: Type | undefined; + + if (cacheEntry === undefined) { + // We haven't been here before, so create a new incomplete cache entry. + cacheEntry = setCacheEntry( + curFlowNode, + reference ? undefined : initialType, + /* isIncomplete */ true + ); + } else { + typeAtStart = cacheEntry.type; + } + + const isRecursive = + cacheEntry.incompleteSubtypes !== undefined && + cacheEntry.incompleteSubtypes.some((subtype) => subtype.isPending); + const visitCount = incrementFlowNodeVisitCount(curFlowNode); + + // If every subtype is already pending evaluation, do not bother + // trying to further evaluate. Instead, unwind the stack and allow + // the existing evaluations to complete. + if (isRecursive && cacheEntry.incompleteSubtypes?.every((subtype) => subtype.isPending)) { + return { + type: cacheEntry.type, + isIncomplete: true, + }; + } + + loopNode.antecedents.forEach((antecedent, index) => { + cacheEntry = getCacheEntry(curFlowNode)!; + + // Have we already been here (i.e. does the entry exist and is + // not marked "pending")? If so, we can use the type that was already + // computed if it is complete. + const subtypeEntry = + cacheEntry.incompleteSubtypes !== undefined && + index < cacheEntry.incompleteSubtypes.length + ? cacheEntry.incompleteSubtypes[index] + : undefined; + if ( + subtypeEntry === undefined || + (!subtypeEntry?.isPending && subtypeEntry?.isIncomplete) + ) { + // Set this entry to "pending" to prevent infinite recursion. + // We'll mark it "not pending" below. + cacheEntry = setIncompleteSubtype( + curFlowNode, + index, + subtypeEntry?.type ?? (reference ? undefined : initialType), + /* isIncomplete */ true, + /* isPending */ true + ); + + try { + const flowTypeResult = getTypeFromFlowNode( + antecedent, + reference, + targetSymbolId, + initialType, + isInitialTypeIncomplete + ); + + if (flowTypeResult.isIncomplete) { + sawIncomplete = true; + } + + cacheEntry = setIncompleteSubtype( + curFlowNode, + index, + flowTypeResult.type, + flowTypeResult.isIncomplete, + /* isPending */ false + ); + } catch (e) { + setIncompleteSubtype( + curFlowNode, + index, + undefined, + /* isIncomplete */ true, + /* isPending */ false + ); + throw e; + } + } + }); + + if (isRecursive) { + // This was not the first time through the loop, so we are recursively trying + // to resolve other parts of the incomplete type. It will be marked complete + // once the stack pops back up to the first caller. + + // If we have visited the loop node maxFlowNodeLoopVisitCount times already + // and some of the subtypes are still incomplete, bail and base the + // isIncomplete flag on the first subtype, which is the one that feeds + // the top of the loop. + let isIncomplete = + visitCount >= maxFlowNodeLoopVisitCount + ? cacheEntry.incompleteSubtypes![0].isIncomplete + : reference !== undefined; + + // Limit the number of recursive calls before we give up and call the type + // complete. This can theoretically result in incorrect type information in + // very complex code flows, but it's preferable to extremely long analysis times. + if (codeFlowInvocations - codeFlowInvocationsAtStart > maxCodeFlowInvocationsPerLoop) { + isIncomplete = false; + } + + return { + type: cacheEntry.type, + isIncomplete, + }; + } + + // If we've been here more than once and the type has converged (didn't change + // since last time), assume that the type is complete. + if (sawIncomplete && typeAtStart && cacheEntry.type) { + if (isTypeSame(typeAtStart, cacheEntry.type)) { + // The type was the same more than two times, so it is not oscillating + // or changing. It's safe to conclude that additional times through + // the loop won't cause it to change further. + if (incrementFlowNodeConvergenceCount(flowNode) > 2) { + sawIncomplete = false; + } + } else { + // The type changed since last time, so reset the convergence count. + incrementFlowNodeConvergenceCount(flowNode, /* reset */ true); + } + } + + // The result is incomplete if one or more entries were incomplete. + if (sawIncomplete) { + // If there is an "Unknown" type within a union type, remove + // it. Otherwise we might end up resolving the cycle with a type + // that includes an undesirable unknown. + // Note that we return isIncomplete = false here but do not + // save the cached entry for next time. + return { + type: cacheEntry?.type ? removeUnknownFromUnion(cacheEntry.type) : undefined, + isIncomplete: false, + }; + } + + // We have made it all the way through all the antecedents, and we can + // mark the type as complete. + return setCacheEntry(curFlowNode, cacheEntry!.type, /* isIncomplete */ false); + } + + if (curFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { + const conditionalFlowNode = curFlowNode as FlowCondition; + + if (reference) { + // Before calling getTypeNarrowingCallback, set the type + // of this flow node in the cache to prevent recursion. + setCacheEntry(curFlowNode, reference ? undefined : initialType, /* isIncomplete */ true); + + try { + const typeNarrowingCallback = getTypeNarrowingCallback( + evaluator, + reference, + conditionalFlowNode.expression, + !!( + conditionalFlowNode.flags & + (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition) + ) + ); + + if (typeNarrowingCallback) { + const flowTypeResult = getTypeFromFlowNode( + conditionalFlowNode.antecedent, + reference, + targetSymbolId, + initialType, + isInitialTypeIncomplete + ); + let flowType = flowTypeResult.type; + if (flowType) { + flowType = typeNarrowingCallback(flowType); + } + + return setCacheEntry(curFlowNode, flowType, flowTypeResult.isIncomplete); + } + + deleteCacheEntry(curFlowNode); + } catch (e) { + // We don't use finally here because the debugger + // doesn't handle it well during single stepping. + deleteCacheEntry(curFlowNode); + throw e; + } + } + + curFlowNode = conditionalFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.TrueNeverCondition | FlowFlags.FalseNeverCondition)) { + const conditionalFlowNode = curFlowNode as FlowCondition; + if (conditionalFlowNode.reference) { + // Don't allow apply if the conditional expression references the expression + // we're already narrowing. This case will be handled by the TrueCondition + // or FalseCondition node. + if (createKeyForReference(conditionalFlowNode.reference) !== referenceKey) { + // Make sure the reference type has a declared type. If not, + // don't bother trying to infer its type because that would be + // too expensive. + const symbolWithScope = evaluator.lookUpSymbolRecursive( + conditionalFlowNode.reference, + conditionalFlowNode.reference.value, + /* honorCodeFlow */ false + ); + if (symbolWithScope && symbolWithScope.symbol.getTypedDeclarations().length > 0) { + // Before calling getTypeNarrowingCallback, set the type + // of this flow node in the cache to prevent recursion. + setCacheEntry( + curFlowNode, + reference ? undefined : initialType, + /* isIncomplete */ true + ); + + try { + const typeNarrowingCallback = getTypeNarrowingCallback( + evaluator, + conditionalFlowNode.reference, + conditionalFlowNode.expression, + !!( + conditionalFlowNode.flags & + (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition) + ) + ); + + if (typeNarrowingCallback) { + const refTypeInfo = evaluator.getTypeOfExpression( + conditionalFlowNode.reference! + ); + const narrowedType = + typeNarrowingCallback(refTypeInfo.type) || refTypeInfo.type; + + // If the narrowed type is "never", don't allow further exploration. + if (isNever(narrowedType)) { + return setCacheEntry( + curFlowNode, + undefined, + !!refTypeInfo.isIncomplete + ); + } + } + + deleteCacheEntry(curFlowNode); + } catch (e) { + // We don't use finally here because the debugger + // doesn't handle it well during single stepping. + deleteCacheEntry(curFlowNode); + throw e; + } + } + } + } + curFlowNode = conditionalFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.ExhaustedMatch) { + const exhaustedMatchFlowNode = curFlowNode as FlowExhaustedMatch; + const narrowedTypeResult = evaluator.evaluateTypeForSubnode(exhaustedMatchFlowNode.node, () => { + evaluator.evaluateTypesForMatchNode(exhaustedMatchFlowNode.node); + }); + + // If the narrowed type is "never", don't allow further exploration. + if (narrowedTypeResult && isNever(narrowedTypeResult.type)) { + return setCacheEntry(curFlowNode, undefined, !!narrowedTypeResult.isIncomplete); + } + + curFlowNode = exhaustedMatchFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.NarrowForPattern) { + const patternFlowNode = curFlowNode as FlowNarrowForPattern; + if (!reference || isMatchingExpression(reference, patternFlowNode.subjectExpression)) { + const typeResult = evaluator.evaluateTypeForSubnode(patternFlowNode.statement, () => { + if (patternFlowNode.statement.nodeType === ParseNodeType.Case) { + evaluator.evaluateTypesForCaseNode(patternFlowNode.statement); + } else { + evaluator.evaluateTypesForMatchNode(patternFlowNode.statement); + } + }); + if (typeResult) { + if (!reference) { + if (isNever(typeResult.type)) { + return setCacheEntry(curFlowNode, undefined, !!typeResult.isIncomplete); + } + } else { + return setCacheEntry(curFlowNode, typeResult.type, !!typeResult.isIncomplete); + } + } + } + curFlowNode = patternFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.PreFinallyGate) { + const preFinallyFlowNode = curFlowNode as FlowPreFinallyGate; + if (preFinallyFlowNode.isGateClosed) { + return { type: undefined, isIncomplete: false }; + } + + // Before recursively calling, set the cache entry to prevent infinite recursion. + setCacheEntry(curFlowNode, reference ? undefined : initialType, /* isIncomplete */ true); + + try { + const flowTypeResult = getTypeFromFlowNode( + preFinallyFlowNode.antecedent, + reference, + targetSymbolId, + initialType, + isInitialTypeIncomplete + ); + + // We want to cache the type only if we're evaluating the "gate closed" path. + deleteCacheEntry(curFlowNode); + + return { + type: flowTypeResult.type, + isIncomplete: flowTypeResult.isIncomplete, + }; + } catch (e) { + deleteCacheEntry(curFlowNode); + throw e; + } + } + + if (curFlowNode.flags & FlowFlags.PostFinally) { + const postFinallyFlowNode = curFlowNode as FlowPostFinally; + const wasGateClosed = postFinallyFlowNode.preFinallyGate.isGateClosed; + try { + postFinallyFlowNode.preFinallyGate.isGateClosed = true; + let flowTypeResult: FlowNodeTypeResult | undefined; + + // Use speculative mode for the remainder of the finally suite + // because the final types within this parse node block should be + // evaluated when the gate is open. + evaluator.useSpeculativeMode(postFinallyFlowNode.finallyNode, () => { + flowTypeResult = getTypeFromFlowNode( + postFinallyFlowNode.antecedent, + reference, + targetSymbolId, + initialType, + isInitialTypeIncomplete + ); + }); + + // If the type is incomplete, don't write back to the cache. + return flowTypeResult!.isIncomplete + ? flowTypeResult! + : setCacheEntry(curFlowNode, flowTypeResult!.type, /* isIncomplete */ false); + } finally { + postFinallyFlowNode.preFinallyGate.isGateClosed = wasGateClosed; + } + } + + if (curFlowNode.flags & FlowFlags.Start) { + return setCacheEntry(curFlowNode, initialType, isInitialTypeIncomplete); + } + + if (curFlowNode.flags & FlowFlags.WildcardImport) { + const wildcardImportFlowNode = curFlowNode as FlowWildcardImport; + if (reference && reference.nodeType === ParseNodeType.Name) { + const nameValue = reference.value; + if (wildcardImportFlowNode.names.some((name) => name === nameValue)) { + // Before calling getTypeFromWildcardImport, set the cache entry to prevent infinite recursion. + setCacheEntry( + curFlowNode, + reference ? undefined : initialType, + /* isIncomplete */ true + ); + + try { + const type = getTypeFromWildcardImport(wildcardImportFlowNode, nameValue); + return setCacheEntry(curFlowNode, type, /* isIncomplete */ false); + } catch (e) { + deleteCacheEntry(curFlowNode); + throw e; + } + } + } + + curFlowNode = wildcardImportFlowNode.antecedent; + continue; + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + return setCacheEntry(curFlowNode, undefined, /* isIncomplete */ false); + } + } + + if (!flowNode) { + // This should happen only in cases where we're evaluating + // parse nodes that are created after the initial parse + // (namely, string literals that are used for forward + // referenced types). + return { + type: initialType, + isIncomplete: isInitialTypeIncomplete, + }; + } + + return getTypeFromFlowNode(flowNode, reference, targetSymbolId, initialType, isInitialTypeIncomplete); + } + + return { + getTypeFromCodeFlow, + }; + } + + // Determines whether the specified flowNode can be reached by any + // control flow path within the execution context. If sourceFlowNode + // is specified, it returns true only if at least one control flow + // path passes through sourceFlowNode. + function isFlowNodeReachable(flowNode: FlowNode, sourceFlowNode?: FlowNode): boolean { + const visitedFlowNodeMap = new Set(); + + function isFlowNodeReachableRecursive( + flowNode: FlowNode, + sourceFlowNode: FlowNode | undefined, + recursionCount = 0 + ): boolean { + // Cut off the recursion at some point to prevent a stack overflow. + const maxFlowNodeReachableRecursionCount = 64; + if (recursionCount > maxFlowNodeReachableRecursionCount) { + return true; + } + recursionCount++; + + let curFlowNode = flowNode; + + while (true) { + // If we've already visited this node, we can assume + // it wasn't reachable. + if (visitedFlowNodeMap.has(curFlowNode.id)) { + return false; + } + + // Note that we've been here before. + visitedFlowNodeMap.add(curFlowNode.id); + + if (curFlowNode.flags & FlowFlags.Unreachable) { + return false; + } + + if (curFlowNode === sourceFlowNode) { + return true; + } + + if ( + curFlowNode.flags & + (FlowFlags.VariableAnnotation | + FlowFlags.Assignment | + FlowFlags.TrueCondition | + FlowFlags.FalseCondition | + FlowFlags.WildcardImport | + FlowFlags.TrueNeverCondition | + FlowFlags.FalseNeverCondition | + FlowFlags.NarrowForPattern | + FlowFlags.ExhaustedMatch) + ) { + const typedFlowNode = curFlowNode as + | FlowVariableAnnotation + | FlowAssignment + | FlowCondition + | FlowWildcardImport + | FlowCondition + | FlowExhaustedMatch; + curFlowNode = typedFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & FlowFlags.Call) { + const callFlowNode = curFlowNode as FlowCall; + + // If this function returns a "NoReturn" type, that means + // it always raises an exception or otherwise doesn't return, + // so we can assume that the code before this is unreachable. + if (isCallNoReturn(callFlowNode.node)) { + return false; + } + + curFlowNode = callFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.BranchLabel | FlowFlags.LoopLabel)) { + if (curFlowNode.flags & FlowFlags.PostContextManager) { + // Determine whether any of the context managers support exception + // suppression. If not, none of its antecedents are reachable. + const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; + if ( + !contextMgrNode.expressions.some((expr) => + isExceptionContextManager(expr, contextMgrNode.isAsync) + ) + ) { + return false; + } + } + + const labelNode = curFlowNode as FlowLabel; + for (const antecedent of labelNode.antecedents) { + if (isFlowNodeReachableRecursive(antecedent, sourceFlowNode, recursionCount)) { + return true; + } + } + return false; + } + + if (curFlowNode.flags & FlowFlags.Start) { + // If we hit the start but were looking for a particular source flow + // node, return false. Otherwise, the start is what we're looking for. + return sourceFlowNode ? false : true; + } + + if (curFlowNode.flags & FlowFlags.PreFinallyGate) { + const preFinallyFlowNode = curFlowNode as FlowPreFinallyGate; + return !preFinallyFlowNode.isGateClosed; + } + + if (curFlowNode.flags & FlowFlags.PostFinally) { + const postFinallyFlowNode = curFlowNode as FlowPostFinally; + const wasGateClosed = postFinallyFlowNode.preFinallyGate.isGateClosed; + + try { + postFinallyFlowNode.preFinallyGate.isGateClosed = true; + return isFlowNodeReachableRecursive( + postFinallyFlowNode.antecedent, + sourceFlowNode, + recursionCount + ); + } finally { + postFinallyFlowNode.preFinallyGate.isGateClosed = wasGateClosed; + } + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + return false; + } + } + + // Protect against infinite recursion. + if (isReachableRecursionMap.has(flowNode.id)) { + return true; + } + isReachableRecursionMap.set(flowNode.id, true); + + try { + return isFlowNodeReachableRecursive(flowNode, sourceFlowNode); + } finally { + isReachableRecursionMap.delete(flowNode.id); + } + } + + // Determines whether the specified typeVar, which is assumed to be constrained, + // can be narrowed to one of its constrained types based on isinstance type + // guard checks. + function narrowConstrainedTypeVar(flowNode: FlowNode, typeVar: TypeVarType): ClassType | undefined { + assert(!typeVar.details.isParamSpec); + assert(!typeVar.details.isVariadic); + assert(!typeVar.details.boundType); + assert(typeVar.details.constraints.length > 0); + + const visitedFlowNodeMap = new Set(); + const startingConstraints: ClassType[] = []; + + for (const constraint of typeVar.details.constraints) { + if (isClassInstance(constraint)) { + startingConstraints.push(constraint); + } else { + // If one or more constraints are Unknown, Any, union types, etc., + // we can't narrow them. + return undefined; + } + } + + function narrowConstrainedTypeVarRecursive(flowNode: FlowNode, typeVar: TypeVarType): ClassType[] { + let curFlowNode = flowNode; + + while (true) { + if (visitedFlowNodeMap.has(curFlowNode.id)) { + return startingConstraints; + } + + if (curFlowNode.flags & (FlowFlags.Unreachable | FlowFlags.Start)) { + return startingConstraints; + } + + if ( + curFlowNode.flags & + (FlowFlags.VariableAnnotation | + FlowFlags.Assignment | + FlowFlags.WildcardImport | + FlowFlags.TrueNeverCondition | + FlowFlags.FalseNeverCondition | + FlowFlags.NarrowForPattern | + FlowFlags.ExhaustedMatch | + FlowFlags.PostFinally | + FlowFlags.PreFinallyGate | + FlowFlags.Call) + ) { + const typedFlowNode = curFlowNode as + | FlowVariableAnnotation + | FlowAssignment + | FlowWildcardImport + | FlowExhaustedMatch + | FlowNarrowForPattern + | FlowPostFinally + | FlowPreFinallyGate + | FlowCall; + curFlowNode = typedFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { + const conditionFlowNode = curFlowNode as FlowCondition; + const testExpression = conditionFlowNode.expression; + const isPositiveTest = (curFlowNode.flags & FlowFlags.TrueCondition) !== 0; + + if ( + testExpression.nodeType === ParseNodeType.Call && + testExpression.leftExpression.nodeType === ParseNodeType.Name && + testExpression.leftExpression.value === 'isinstance' && + testExpression.arguments.length === 2 + ) { + const arg0Expr = testExpression.arguments[0].valueExpression; + + const arg0Type = evaluator.getTypeOfExpression(arg0Expr).type; + + if (isCompatibleWithConstrainedTypeVar(arg0Type, typeVar)) { + // Prevent infinite recursion by noting that we've been here before. + visitedFlowNodeMap.add(curFlowNode.id); + const priorRemainingConstraints = narrowConstrainedTypeVarRecursive( + conditionFlowNode.antecedent, + typeVar + ); + visitedFlowNodeMap.delete(curFlowNode.id); + + const arg1Expr = testExpression.arguments[1].valueExpression; + const arg1Type = evaluator.getTypeOfExpression( + arg1Expr, + undefined, + EvaluatorFlags.EvaluateStringLiteralAsType | + EvaluatorFlags.ParamSpecDisallowed | + EvaluatorFlags.TypeVarTupleDisallowed + ).type; + + if (isInstantiableClass(arg1Type)) { + return priorRemainingConstraints.filter((subtype) => { + if (ClassType.isSameGenericClass(subtype, arg1Type)) { + return isPositiveTest; + } else { + return !isPositiveTest; + } + }); + } + } + } + + curFlowNode = conditionFlowNode.antecedent; + continue; + } + + if (curFlowNode.flags & (FlowFlags.BranchLabel | FlowFlags.LoopLabel)) { + const labelNode = curFlowNode as FlowLabel; + const newConstraints: ClassType[] = []; + + // Prevent infinite recursion by noting that we've been here before. + visitedFlowNodeMap.add(curFlowNode.id); + for (const antecedent of labelNode.antecedents) { + const constraintsToAdd = narrowConstrainedTypeVarRecursive(antecedent, typeVar); + + for (const constraint of constraintsToAdd) { + if (!newConstraints.some((t) => isTypeSame(t, constraint))) { + newConstraints.push(constraint); + } + } + } + visitedFlowNodeMap.delete(curFlowNode.id); + + return newConstraints; + } + + // We shouldn't get here. + fail('Unexpected flow node flags'); + return startingConstraints; + } + } + + const narrowedConstrainedType = narrowConstrainedTypeVarRecursive(flowNode, typeVar); + + // Have we narrowed the typeVar to a single constraint? + return narrowedConstrainedType.length === 1 ? narrowedConstrainedType[0] : undefined; + } + + // Determines whether a specified type is the same as a constrained + // TypeVar or is conditioned on that same TypeVar or is some union of + // the above. + function isCompatibleWithConstrainedTypeVar(type: Type, typeVar: TypeVarType) { + let isCompatible = true; + doForEachSubtype(type, (subtype) => { + if (isTypeVar(subtype)) { + if (!isTypeSame(subtype, typeVar)) { + isCompatible = false; + } + } else if (subtype.condition) { + if ( + !subtype.condition.some( + (condition) => condition.isConstrainedTypeVar && condition.typeVarName === typeVar.nameWithScope + ) + ) { + isCompatible = false; + } + } else { + isCompatible = false; + } + }); + + return isCompatible; + } + + // Performs a cursory analysis to determine whether a call never returns + // without fully evaluating its type. This is done during code flow, + // so it can't rely on full type analysis. It makes some simplifying + // assumptions that work fine in practice. + function isCallNoReturn(node: CallNode) { + // See if this information is cached already. + if (callIsNoReturnCache.has(node.id)) { + return callIsNoReturnCache.get(node.id); + } + + // Initially set to false to avoid infinite recursion. + callIsNoReturnCache.set(node.id, false); + + let noReturnTypeCount = 0; + let subtypeCount = 0; + + // Evaluate the call base type. + const callType = getDeclaredCallBaseType(node.leftExpression); + if (callType) { + doForEachSubtype(callType, (callSubtype) => { + // Track the number of subtypes we've examined. + subtypeCount++; + + let functionType: FunctionType | undefined; + if (isInstantiableClass(callSubtype)) { + // Does the class have a custom metaclass that implements a `__call__` method? + // If so, it will be called instead of `__init__` or `__new__`. We'll assume + // in this case that the __call__ method is not a NoReturn type. + if ( + callSubtype.details.effectiveMetaclass && + isClass(callSubtype.details.effectiveMetaclass) && + !ClassType.isBuiltIn(callSubtype.details.effectiveMetaclass, 'type') + ) { + const metaclassCallMember = lookUpClassMember( + callSubtype.details.effectiveMetaclass, + '__call__', + ClassMemberLookupFlags.SkipInstanceVariables | ClassMemberLookupFlags.SkipObjectBaseClass + ); + if (metaclassCallMember) { + return; + } + } + + let constructorMember = lookUpClassMember( + callSubtype, + '__init__', + ClassMemberLookupFlags.SkipInstanceVariables | ClassMemberLookupFlags.SkipObjectBaseClass + ); + + if (constructorMember === undefined) { + constructorMember = lookUpClassMember( + callSubtype, + '__new__', + ClassMemberLookupFlags.SkipInstanceVariables | ClassMemberLookupFlags.SkipObjectBaseClass + ); + } + + if (constructorMember) { + const constructorType = evaluator.getTypeOfMember(constructorMember); + if (constructorType) { + if (isFunction(constructorType) || isOverloadedFunction(constructorType)) { + const boundConstructorType = evaluator.bindFunctionToClassOrObject( + undefined, + constructorType + ); + if (boundConstructorType) { + callSubtype = boundConstructorType; + } + } + } + } + } else if (isClassInstance(callSubtype)) { + const callMember = lookUpClassMember( + callSubtype, + '__call__', + ClassMemberLookupFlags.SkipInstanceVariables + ); + if (callMember) { + const callMemberType = evaluator.getTypeOfMember(callMember); + if (callMemberType) { + if (isFunction(callMemberType) || isOverloadedFunction(callMemberType)) { + const boundCallType = evaluator.bindFunctionToClassOrObject(undefined, callMemberType); + if (boundCallType) { + callSubtype = boundCallType; + } + } + } + } + } + + if (isFunction(callSubtype)) { + functionType = callSubtype; + } else if (isOverloadedFunction(callSubtype)) { + // Use the last overload, which should be the most general. + const overloadedFunction = callSubtype; + functionType = overloadedFunction.overloads[overloadedFunction.overloads.length - 1]; + } + + if (functionType) { + const returnType = functionType.details.declaredReturnType; + if (FunctionType.isAsync(functionType)) { + if ( + returnType && + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'Coroutine') && + returnType.typeArguments && + returnType.typeArguments.length >= 3 + ) { + if (isNever(returnType.typeArguments[2])) { + if (node.parent?.nodeType === ParseNodeType.Await) { + noReturnTypeCount++; + } + } + } + } else if (returnType) { + if (isNever(returnType)) { + noReturnTypeCount++; + } + } else if (functionType.details.declaration) { + // If the function has yield expressions, it's a generator, and + // we'll assume the yield statements are reachable. Also, don't + // infer a "no return" type for abstract methods. + if ( + !functionType.details.declaration.yieldStatements && + !FunctionType.isAbstractMethod(functionType) && + !FunctionType.isStubDefinition(functionType) && + !FunctionType.isPyTypedDefinition(functionType) + ) { + // Check specifically for a common idiom where the only statement + // (other than a possible docstring) is a "raise NotImplementedError". + const functionStatements = functionType.details.declaration.node.suite.statements; + + let foundRaiseNotImplemented = false; + for (const statement of functionStatements) { + if ( + statement.nodeType !== ParseNodeType.StatementList || + statement.statements.length !== 1 + ) { + break; + } + + const simpleStatement = statement.statements[0]; + if (simpleStatement.nodeType === ParseNodeType.StringList) { + continue; + } + + if ( + simpleStatement.nodeType === ParseNodeType.Raise && + simpleStatement.typeExpression + ) { + // Check for "raise NotImplementedError" or "raise NotImplementedError()" + const isNotImplementedName = (node: ParseNode) => { + return ( + node?.nodeType === ParseNodeType.Name && + node.value === 'NotImplementedError' + ); + }; + + if (isNotImplementedName(simpleStatement.typeExpression)) { + foundRaiseNotImplemented = true; + } else if ( + simpleStatement.typeExpression.nodeType === ParseNodeType.Call && + isNotImplementedName(simpleStatement.typeExpression.leftExpression) + ) { + foundRaiseNotImplemented = true; + } + } + + break; + } + + if ( + !foundRaiseNotImplemented && + !evaluator.isAfterNodeReachable(functionType.details.declaration.node) + ) { + noReturnTypeCount++; + } + } + } + } + }); + } + + // The call is considered NoReturn if all subtypes evaluate to NoReturn. + const callIsNoReturn = subtypeCount > 0 && noReturnTypeCount === subtypeCount; + + // Cache the value for next time. + callIsNoReturnCache.set(node.id, callIsNoReturn); + + return callIsNoReturn; + } + + // Performs a cursory analysis to determine whether the expression + // corresponds to a context manager object that supports the swallowing + // of exceptions. By convention, these objects have an "__exit__" method + // that returns a bool response (as opposed to a None). This function is + // called during code flow, so it can't rely on full type evaluation. It + // makes some simplifying assumptions that work in most cases. + function isExceptionContextManager(node: ExpressionNode, isAsync: boolean) { + // See if this information is cached already. + if (isExceptionContextManagerCache.has(node.id)) { + return isExceptionContextManagerCache.get(node.id); + } + + // Initially set to false to avoid infinite recursion. + isExceptionContextManagerCache.set(node.id, false); + + let cmSwallowsExceptions = false; + let cmType: Type | undefined; + + if (node.nodeType === ParseNodeType.Call) { + const callType = getDeclaredCallBaseType(node.leftExpression); + if (callType) { + if (isInstantiableClass(callType)) { + cmType = convertToInstance(callType); + } else if (isFunction(callType)) { + cmType = callType.details.declaredReturnType; + } else if (isOverloadedFunction(callType)) { + // Handle the overloaded case. As a simple heuristic, we'll simply + // look at the first overloaded signature and ignore the remainder. + // This works for pytype.raises, which is a common case. + const firstOverload = callType.overloads.find((overload) => FunctionType.isOverloaded(overload)); + if (firstOverload) { + cmType = firstOverload.details.declaredReturnType; + } + } + } + } else if (node.nodeType === ParseNodeType.Name) { + cmType = evaluator.getDeclaredTypeForExpression(node); + } + + if (cmType && isClassInstance(cmType)) { + const exitMethodName = isAsync ? '__aexit__' : '__exit__'; + const exitType = evaluator.getTypeFromObjectMember(node, cmType, exitMethodName)?.type; + + if (exitType && isFunction(exitType) && exitType.details.declaredReturnType) { + const returnType = exitType.details.declaredReturnType; + cmSwallowsExceptions = isClassInstance(returnType) && ClassType.isBuiltIn(returnType, 'bool'); + } + } + + // Cache the value for next time. + isExceptionContextManagerCache.set(node.id, cmSwallowsExceptions); + + return cmSwallowsExceptions; + } + + function getTypeFromWildcardImport(flowNode: FlowWildcardImport, name: string): Type { + const importInfo = getImportInfo(flowNode.node.module); + assert(importInfo !== undefined && importInfo.isImportFound); + assert(flowNode.node.isWildcardImport); + + const symbolWithScope = evaluator.lookUpSymbolRecursive(flowNode.node, name, /* honorCodeFlow */ false); + assert(symbolWithScope !== undefined); + const decls = symbolWithScope!.symbol.getDeclarations(); + const wildcardDecl = decls.find((decl) => decl.node === flowNode.node); + + if (!wildcardDecl) { + return UnknownType.create(); + } + + return evaluator.getInferredTypeOfDeclaration(symbolWithScope!.symbol, wildcardDecl) || UnknownType.create(); + } + + function getDeclaredTypeOfSymbol(symbol: Symbol, isBeyondExecutionScope: boolean): Type | undefined { + const type = evaluator.getDeclaredTypeOfSymbol(symbol); + if (type) { + return type; + } + + // There was no declared type. Before we give up, see if the + // symbol is a function parameter whose value can be inferred + // or an imported symbol. + // Use the last declaration that is not within an except suite. + const declarations = symbol.getDeclarations().filter((decl) => !decl.isInExceptSuite); + if (declarations.length === 0) { + return undefined; + } + + const decl = declarations[declarations.length - 1]; + if (decl.type === DeclarationType.Parameter) { + return evaluator.evaluateTypeForSubnode(decl.node.name!, () => { + evaluator.evaluateTypeOfParameter(decl.node); + })?.type; + } + + // If it is a symbol from an outer execution scope or an alias, it + // is safe to infer its type. Calling this under other circumstances + // can result in extreme performance degradation and stack overflows. + if (decl.type === DeclarationType.Alias || isBeyondExecutionScope) { + return evaluator.getInferredTypeOfDeclaration(symbol, decl); + } + + return undefined; + } + + // When we're evaluating a call to determine whether it returns NoReturn, + // we don't want to do a full type evaluation, which would be expensive + // and create circular dependencies in type evaluation. Instead, we do + // a best-effort evaluation using only declared types (functions, parameters, + // etc.). + function getDeclaredCallBaseType(node: ExpressionNode): Type | undefined { + if (node.nodeType === ParseNodeType.Name) { + const symbolWithScope = evaluator.lookUpSymbolRecursive(node, node.value, /* honorCodeFlow */ false); + if (!symbolWithScope) { + return undefined; + } + + return getDeclaredTypeOfSymbol(symbolWithScope.symbol, symbolWithScope.isBeyondExecutionScope); + } + + if (node.nodeType === ParseNodeType.MemberAccess) { + const memberName = node.memberName.value; + let baseType = getDeclaredCallBaseType(node.leftExpression); + if (!baseType) { + return undefined; + } + + baseType = evaluator.makeTopLevelTypeVarsConcrete(baseType); + + const declaredTypeOfSymbol = mapSubtypes(baseType, (subtype) => { + let symbol: Symbol | undefined; + if (isModule(subtype)) { + symbol = ModuleType.getField(subtype, memberName); + } else if (isClass(subtype)) { + const classMemberInfo = lookUpClassMember(subtype, memberName); + symbol = classMemberInfo ? classMemberInfo.symbol : undefined; + } + + if (!symbol) { + return UnknownType.create(); + } + + // We want to limit the evaluation to declared types only, so + // we use getDeclaredTypeOfSymbol rather than getEffectiveTypeOfSymbol. + // Set isBeyondExecutionScope to false so we don't attempt to infer + // the symbol type. This can lead to very bad performance. + return getDeclaredTypeOfSymbol(symbol, /* isBeyondExecutionScope */ false) ?? UnknownType.create(); + }); + + if (!isNever(declaredTypeOfSymbol)) { + return declaredTypeOfSymbol; + } + } + + if (node.nodeType === ParseNodeType.Call) { + const baseType = getDeclaredCallBaseType(node.leftExpression); + if (!baseType) { + return undefined; + } + + if (baseType && isInstantiableClass(baseType)) { + const inst = convertToInstance(baseType); + return inst; + } + + if (isFunction(baseType)) { + return baseType.details.declaredReturnType; + } + } + + return undefined; + } + + return { + createCodeFlowAnalyzer, + isFlowNodeReachable, + narrowConstrainedTypeVar, + }; +} diff --git a/packages/pyright-internal/src/analyzer/codeFlow.ts b/packages/pyright-internal/src/analyzer/codeFlowTypes.ts similarity index 94% rename from packages/pyright-internal/src/analyzer/codeFlow.ts rename to packages/pyright-internal/src/analyzer/codeFlowTypes.ts index 9fda741cf59f..424aabb3f146 100644 --- a/packages/pyright-internal/src/analyzer/codeFlow.ts +++ b/packages/pyright-internal/src/analyzer/codeFlowTypes.ts @@ -1,5 +1,5 @@ /* - * codeFlow.ts + * codeFlowTypes.ts * Copyright (c) Microsoft Corporation. * Licensed under the MIT license. * Author: Eric Traut @@ -43,7 +43,6 @@ export enum FlowFlags { Call = 1 << 10, // Call node PreFinallyGate = 1 << 11, // Injected edge that links pre-finally label and pre-try flow PostFinally = 1 << 12, // Injected edge that links post-finally flow with the rest of the graph - AssignmentAlias = 1 << 13, // Assigned symbol is aliased to another symbol with the same name VariableAnnotation = 1 << 14, // Separates a variable annotation from its name node PostContextManager = 1 << 15, // Label that's used for context managers that suppress exceptions TrueNeverCondition = 1 << 16, // Condition whose type evaluates to never when narrowed in positive test @@ -93,18 +92,6 @@ export interface FlowAssignment extends FlowNode { targetSymbolId: number; } -// FlowAssignmentAlias handles a case where a symbol -// takes on the value of a symbol with the same name -// but within an outer scope, such as when a variable -// is references within a list comprehension iteration -// expression before the result is assigned to a -// local variable of the same name. -export interface FlowAssignmentAlias extends FlowNode { - antecedent: FlowNode; - targetSymbolId: number; - aliasSymbolId: number; -} - // FlowVariableAnnotation separates a variable annotation // node from its type annotation. For example, the declaration // "foo: bar", the "bar" needs to be associated with a flow @@ -169,6 +156,12 @@ export interface FlowPostFinally extends FlowNode { export interface FlowPostContextManagerLabel extends FlowLabel { expressions: ExpressionNode[]; isAsync: boolean; + + // If the context manager swallows exceptions and this value + // is true, block any code flow analysis along this path. Conversely, + // if the context manager does not swallow exceptions and this + // value is false, block any code flow analysis along this path. + blockIfSwallowsExceptions: boolean; } export function isCodeFlowSupportedForReference(reference: ExpressionNode): boolean { diff --git a/packages/pyright-internal/src/analyzer/commentUtils.ts b/packages/pyright-internal/src/analyzer/commentUtils.ts index 3e0fab5cfa85..2541fe15afe2 100644 --- a/packages/pyright-internal/src/analyzer/commentUtils.ts +++ b/packages/pyright-internal/src/analyzer/commentUtils.ts @@ -12,11 +12,13 @@ import { cloneDiagnosticRuleSet, DiagnosticLevel, DiagnosticRuleSet, + getBasicDiagnosticRuleSet, getBooleanDiagnosticRules, getDiagLevelDiagnosticRules, getStrictDiagnosticRuleSet, getStrictModeNotOverriddenRules, } from '../common/configOptions'; +import { DiagnosticRule } from '../common/diagnosticRules'; import { TextRangeCollection } from '../common/textRangeCollection'; import { Token } from '../parser/tokenizerTypes'; @@ -46,10 +48,20 @@ export function getFileLevelDirectives( } function _applyStrictRules(ruleSet: DiagnosticRuleSet) { - const strictRuleSet = getStrictDiagnosticRuleSet(); + _overrideRules(ruleSet, getStrictDiagnosticRuleSet(), getStrictModeNotOverriddenRules()); +} + +function _applyBasicRules(ruleSet: DiagnosticRuleSet) { + _overrideRules(ruleSet, getBasicDiagnosticRuleSet(), []); +} + +function _overrideRules( + ruleSet: DiagnosticRuleSet, + overrideRuleSet: DiagnosticRuleSet, + skipRuleNames: DiagnosticRule[] +) { const boolRuleNames = getBooleanDiagnosticRules(); const diagRuleNames = getDiagLevelDiagnosticRules(); - const skipRuleNames = getStrictModeNotOverriddenRules(); // Enable the strict rules as appropriate. for (const ruleName of boolRuleNames) { @@ -57,7 +69,7 @@ function _applyStrictRules(ruleSet: DiagnosticRuleSet) { continue; } - if ((strictRuleSet as any)[ruleName]) { + if ((overrideRuleSet as any)[ruleName]) { (ruleSet as any)[ruleName] = true; } } @@ -67,15 +79,16 @@ function _applyStrictRules(ruleSet: DiagnosticRuleSet) { continue; } - const strictValue: DiagnosticLevel = (strictRuleSet as any)[ruleName]; + const overrideValue: DiagnosticLevel = (overrideRuleSet as any)[ruleName]; const prevValue: DiagnosticLevel = (ruleSet as any)[ruleName]; + // Override only if the new value is more strict than the existing value. if ( - strictValue === 'error' || - (strictValue === 'warning' && prevValue !== 'error') || - (strictValue === 'information' && prevValue !== 'error' && prevValue !== 'warning') + overrideValue === 'error' || + (overrideValue === 'warning' && prevValue !== 'error') || + (overrideValue === 'information' && prevValue !== 'error' && prevValue !== 'warning') ) { - (ruleSet as any)[ruleName] = strictValue; + (ruleSet as any)[ruleName] = overrideValue; } } } @@ -92,6 +105,8 @@ function _parsePyrightComment(commentValue: string, ruleSet: DiagnosticRuleSet) // diagnostic rules with their strict counterparts. if (operandList.some((s) => s === 'strict')) { _applyStrictRules(ruleSet); + } else if (operandList.some((s) => s === 'basic')) { + _applyBasicRules(ruleSet); } for (const operand of operandList) { diff --git a/packages/pyright-internal/src/analyzer/constructorTransform.ts b/packages/pyright-internal/src/analyzer/constructorTransform.ts new file mode 100644 index 000000000000..a3f45add78a9 --- /dev/null +++ b/packages/pyright-internal/src/analyzer/constructorTransform.ts @@ -0,0 +1,317 @@ +/* + * constructorTransform.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that transforms a newly-created object after a call to the + * constructor is evaluated. It allows for special-case behavior that + * cannot otherwise be described in the Python type system. + * + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { Localizer } from '../localization/localize'; +import { ArgumentCategory, ExpressionNode, ParameterCategory } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { Symbol, SymbolFlags } from './symbol'; +import { FunctionArgument, FunctionResult, TypeEvaluator } from './typeEvaluatorTypes'; +import { ClassType, FunctionParameter, FunctionType, isClassInstance, isFunction, isTypeSame } from './types'; +import { + applySolvedTypeVars, + convertToInstance, + getParameterListDetails, + getTypeVarScopeId, + lookUpObjectMember, + ParameterSource, +} from './typeUtils'; +import { TypeVarMap } from './typeVarMap'; + +export function applyConstructorTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: FunctionArgument[], + classType: ClassType, + result: FunctionResult +): FunctionResult { + if (classType.details.fullName === 'functools.partial') { + return applyPartialTransform(evaluator, errorNode, argList, result); + } + + // By default, return the result unmodified. + return result; +} + +// Applies a transform for the functools.partial class constructor. +function applyPartialTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: FunctionArgument[], + result: FunctionResult +): FunctionResult { + // We assume that the normal return result is a functools.partial class instance. + if (!isClassInstance(result.returnType) || result.returnType.details.fullName !== 'functools.partial') { + return result; + } + + const callMemberResult = lookUpObjectMember(result.returnType, '__call__'); + if (!callMemberResult || !isTypeSame(convertToInstance(callMemberResult.classType), result.returnType)) { + return result; + } + + const callMemberType = evaluator.getTypeOfMember(callMemberResult); + if (!isFunction(callMemberType) || callMemberType.details.parameters.length < 1) { + return result; + } + + if (argList.length < 1) { + return result; + } + + const origFunctionTypeResult = evaluator.getTypeForArgument(argList[0]); + const origFunctionType = origFunctionTypeResult.type; + + // Evaluate the inferred return type if necessary. + evaluator.inferReturnTypeIfNecessary(origFunctionType); + + // Make sure the first argument is a simple function. + // We don't currently handle overloaded functions. + if (!isFunction(origFunctionType)) { + return result; + } + + // We don't currently handle unpacked arguments. + if (argList.some((arg) => arg.argumentCategory !== ArgumentCategory.Simple)) { + return result; + } + + // Create a map to track which parameters have supplied arguments. + const paramMap = new Map(); + + const paramListDetails = getParameterListDetails(origFunctionType); + + // Verify the types of the provided arguments. + let argumentErrors = false; + let reportedPositionalError = false; + const typeVarMap = new TypeVarMap(getTypeVarScopeId(origFunctionType)); + + const remainingArgsList = argList.slice(1); + remainingArgsList.forEach((arg, argIndex) => { + const argTypeResult = evaluator.getTypeForArgument(arg); + + // Is it a positional argument or a keyword argument? + if (!arg.name) { + // Does this positional argument map to a positional parameter? + if ( + argIndex >= paramListDetails.params.length || + paramListDetails.params[argIndex].source === ParameterSource.KeywordOnly + ) { + if (paramListDetails.argsIndex !== undefined) { + const paramType = FunctionType.getEffectiveParameterType( + origFunctionType, + paramListDetails.params[paramListDetails.argsIndex].index + ); + const diag = new DiagnosticAddendum(); + + if (!evaluator.canAssignType(paramType, argTypeResult.type, diag, typeVarMap)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.details.name, + paramName: paramListDetails.params[paramListDetails.argsIndex].param.name ?? '', + }), + arg.valueExpression ?? errorNode + ); + + argumentErrors = true; + } + } else { + // Don't report multiple positional errors. + if (!reportedPositionalError) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + paramListDetails.positionParamCount === 1 + ? Localizer.Diagnostic.argPositionalExpectedOne() + : Localizer.Diagnostic.argPositionalExpectedCount().format({ + expected: paramListDetails.positionParamCount, + }), + arg.valueExpression ?? errorNode + ); + } + + reportedPositionalError = true; + argumentErrors = true; + } + } else { + const paramType = FunctionType.getEffectiveParameterType(origFunctionType, argIndex); + const diag = new DiagnosticAddendum(); + const paramName = paramListDetails.params[argIndex].param.name ?? ''; + + if (!evaluator.canAssignType(paramType, argTypeResult.type, diag, typeVarMap)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.details.name, + paramName, + }), + arg.valueExpression ?? errorNode + ); + + argumentErrors = true; + } + + // Mark the parameter as assigned. + paramMap.set(paramName, false); + } + } else { + const matchingParam = paramListDetails.params.find( + (paramInfo) => + paramInfo.param.name === arg.name?.value && paramInfo.source !== ParameterSource.PositionOnly + ); + + if (!matchingParam) { + // Is there a kwargs parameter? + if (paramListDetails.kwargsIndex === undefined) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.paramNameMissing().format({ name: arg.name.value }), + arg.name + ); + argumentErrors = true; + } else { + const paramType = FunctionType.getEffectiveParameterType( + origFunctionType, + paramListDetails.params[paramListDetails.kwargsIndex].index + ); + const diag = new DiagnosticAddendum(); + + if (!evaluator.canAssignType(paramType, argTypeResult.type, diag, typeVarMap)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.details.name, + paramName: paramListDetails.params[paramListDetails.kwargsIndex].param.name ?? '', + }), + arg.valueExpression ?? errorNode + ); + + argumentErrors = true; + } + } + } else { + const paramName = matchingParam.param.name!; + const paramType = FunctionType.getEffectiveParameterType(origFunctionType, matchingParam.index); + + if (paramMap.has(paramName)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.paramAlreadyAssigned().format({ name: arg.name.value }), + arg.name + ); + + argumentErrors = true; + } else { + const diag = new DiagnosticAddendum(); + + if (!evaluator.canAssignType(paramType, argTypeResult.type, diag, typeVarMap)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.argAssignmentParamFunction().format({ + argType: evaluator.printType(argTypeResult.type), + paramType: evaluator.printType(paramType), + functionName: origFunctionType.details.name, + paramName, + }), + arg.valueExpression ?? errorNode + ); + + argumentErrors = true; + } + paramMap.set(paramName, true); + } + } + } + }); + + const specializedFunctionType = applySolvedTypeVars(origFunctionType, typeVarMap); + if (!isFunction(specializedFunctionType)) { + return result; + } + + // Create a new parameter list that omits parameters that have been + // populated already. + const updatedParamList: FunctionParameter[] = specializedFunctionType.details.parameters.map((param, index) => { + const specializedParam: FunctionParameter = { ...param }; + specializedParam.type = FunctionType.getEffectiveParameterType(specializedFunctionType, index); + + // If it's a keyword parameter that has been assigned a value through + // the "partial" mechanism, mark it has having a default value. + if (param.name && paramMap.get(param.name)) { + specializedParam.hasDefault = true; + } + return specializedParam; + }); + const unassignedParamList = updatedParamList.filter((param) => { + if (param.category === ParameterCategory.VarArgDictionary) { + return false; + } + if (param.category === ParameterCategory.VarArgList) { + return true; + } + return !param.name || !paramMap.has(param.name); + }); + const assignedKeywordParamList = updatedParamList.filter((param) => { + return param.name && paramMap.get(param.name); + }); + const kwargsParam = updatedParamList.filter((param) => { + return param.category === ParameterCategory.VarArgDictionary; + }); + + const newParamList = [...unassignedParamList, ...assignedKeywordParamList, ...kwargsParam]; + + // Create a new __call__ method that uses the remaining parameters. + const newCallMemberType = FunctionType.createInstance( + callMemberType.details.name, + callMemberType.details.fullName, + callMemberType.details.moduleName, + callMemberType.details.flags, + specializedFunctionType.details.docString + ); + + if (callMemberType.details.parameters.length > 0) { + FunctionType.addParameter(newCallMemberType, callMemberType.details.parameters[0]); + } + newParamList.forEach((param) => { + FunctionType.addParameter(newCallMemberType, param); + }); + + newCallMemberType.details.declaredReturnType = specializedFunctionType.details.declaredReturnType + ? FunctionType.getSpecializedReturnType(specializedFunctionType) + : specializedFunctionType.inferredReturnType; + newCallMemberType.details.declaration = callMemberType.details.declaration; + newCallMemberType.details.typeVarScopeId = specializedFunctionType.details.typeVarScopeId; + + // Create a new copy of the functools.partial class that overrides the __call__ method. + const newPartialClass = ClassType.cloneForSymbolTableUpdate(result.returnType); + newPartialClass.details.fields.set('__call__', Symbol.createWithType(SymbolFlags.ClassMember, newCallMemberType)); + + return { + returnType: newPartialClass, + isTypeIncomplete: false, + argumentErrors, + }; +} diff --git a/packages/pyright-internal/src/analyzer/dataClasses.ts b/packages/pyright-internal/src/analyzer/dataClasses.ts index ff4e491c696f..8aee2af2e4ab 100644 --- a/packages/pyright-internal/src/analyzer/dataClasses.ts +++ b/packages/pyright-internal/src/analyzer/dataClasses.ts @@ -27,7 +27,7 @@ import { DeclarationType } from './declaration'; import { updateNamedTupleBaseClass } from './namedTuples'; import { evaluateStaticBoolExpression } from './staticExpressions'; import { Symbol, SymbolFlags } from './symbol'; -import { FunctionArgument, TypeEvaluator } from './typeEvaluatorTypes'; +import { EvaluatorFlags, FunctionArgument, TypeEvaluator } from './typeEvaluatorTypes'; import { AnyType, ClassType, @@ -43,6 +43,7 @@ import { isInstantiableClass, isOverloadedFunction, NoneType, + TupleTypeArgument, Type, UnknownType, } from './types'; @@ -50,10 +51,15 @@ import { applySolvedTypeVars, buildTypeVarMapFromSpecializedClass, convertToInstance, + getTypeVarScopeId, isLiteralType, + lookUpObjectMember, + populateTypeVarMapForSelfType, + requiresSpecialization, specializeTupleClass, synthesizeTypeVarForSelfCls, } from './typeUtils'; +import { TypeVarMap } from './typeVarMap'; // Validates fields for compatibility with a dataclass and synthesizes // an appropriate __new__ and __init__ methods plus __dataclass_fields__ @@ -62,7 +68,9 @@ export function synthesizeDataClassMethods( evaluator: TypeEvaluator, node: ClassNode, classType: ClassType, - skipSynthesizeInit: boolean + skipSynthesizeInit: boolean, + hasExistingInitMethod: boolean, + skipSynthesizeHash: boolean ) { assert(ClassType.isDataClass(classType)); @@ -146,7 +154,11 @@ export function synthesizeDataClassMethods( // If the RHS of the assignment is assigning a field instance where the // "init" parameter is set to false, do not include it in the init method. if (statement.rightExpression.nodeType === ParseNodeType.Call) { - const callType = evaluator.getTypeOfExpression(statement.rightExpression.leftExpression).type; + const callType = evaluator.getTypeOfExpression( + statement.rightExpression.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; if ( isDataclassFieldConstructor( callType, @@ -222,13 +234,18 @@ export function synthesizeDataClassMethods( } } - hasDefaultValue = statement.rightExpression.arguments.some( + const defaultArg = statement.rightExpression.arguments.find( (arg) => arg.name?.value === 'default' || arg.name?.value === 'default_factory' || arg.name?.value === 'factory' ); + hasDefaultValue = !!defaultArg; + if (defaultArg?.valueExpression) { + defaultValueExpression = defaultArg.valueExpression; + } + const aliasArg = statement.rightExpression.arguments.find( (arg) => arg.name?.value === 'alias' ); @@ -288,6 +305,7 @@ export function synthesizeDataClassMethods( } const dataClassEntry: DataClassEntry = { name: variableName, + classType, alias: aliasName, isKeywordOnly: false, hasDefault: hasDefaultValue, @@ -303,6 +321,7 @@ export function synthesizeDataClassMethods( // allows us to handle circular references in types. const dataClassEntry: DataClassEntry = { name: variableName, + classType, alias: aliasName, isKeywordOnly, hasDefault: hasDefaultValue, @@ -324,6 +343,16 @@ export function synthesizeDataClassMethods( // Add the new entry to the full entry list. insertIndex = fullDataClassEntries.findIndex((p) => p.name === variableName); if (insertIndex >= 0) { + const oldEntry = fullDataClassEntries[insertIndex]; + + // While this isn't documented behavior, it appears that the dataclass implementation + // causes overridden variables to "inherit" default values from parent classes. + if (!dataClassEntry.hasDefault && oldEntry.hasDefault) { + dataClassEntry.hasDefault = true; + dataClassEntry.defaultValueExpression = oldEntry.defaultValueExpression; + hasDefaultValue = true; + } + fullDataClassEntries[insertIndex] = dataClassEntry; } else { fullDataClassEntries.push(dataClassEntry); @@ -360,15 +389,28 @@ export function synthesizeDataClassMethods( const symbolTable = classType.details.fields; const keywordOnlyParams: FunctionParameter[] = []; - if (!skipSynthesizeInit && allAncestorsKnown) { + if (!skipSynthesizeInit && !hasExistingInitMethod && allAncestorsKnown) { fullDataClassEntries.forEach((entry) => { if (entry.includeInInit) { + // If the type refers to Self of the parent class, we need to + // transform it to refer to the Self of this subclass. + let effectiveType = entry.type; + if (entry.classType !== classType && requiresSpecialization(effectiveType)) { + const typeVarMap = new TypeVarMap(getTypeVarScopeId(entry.classType)); + populateTypeVarMapForSelfType(typeVarMap, entry.classType, classType); + effectiveType = applySolvedTypeVars(effectiveType, typeVarMap); + } + + if (classType.details.dataClassBehaviors?.transformDescriptorTypes) { + effectiveType = transformDescriptorType(evaluator, effectiveType); + } + const functionParam: FunctionParameter = { category: ParameterCategory.Simple, name: entry.alias || entry.name, hasDefault: entry.hasDefault, defaultValueExpression: entry.defaultValueExpression, - type: entry.type, + type: effectiveType, hasDeclaredType: true, }; @@ -406,13 +448,13 @@ export function synthesizeDataClassMethods( ) { const matchArgsNames: string[] = []; fullDataClassEntries.forEach((entry) => { - if (entry.includeInInit) { + if (entry.includeInInit && !entry.isKeywordOnly) { // Use the field name, not its alias (if it has one). matchArgsNames.push(entry.name); } }); - const literalTypes = matchArgsNames.map((name) => { - return ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)); + const literalTypes: TupleTypeArgument[] = matchArgsNames.map((name) => { + return { type: ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)), isUnbounded: false }; }); const matchArgsType = ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, literalTypes)); symbolTable.set('__match_args__', Symbol.createWithType(SymbolFlags.ClassMember, matchArgsType)); @@ -423,7 +465,7 @@ export function synthesizeDataClassMethods( FunctionType.addParameter(operatorMethod, selfParam); FunctionType.addParameter(operatorMethod, { category: ParameterCategory.Simple, - name: 'x', + name: 'other', type: paramType, hasDeclaredType: true, }); @@ -443,6 +485,30 @@ export function synthesizeDataClassMethods( }); } + let synthesizeHashFunction = + !ClassType.isSkipSynthesizedDataClassEq(classType) && ClassType.isFrozenDataClass(classType); + const synthesizeHashNone = + !ClassType.isSkipSynthesizedDataClassEq(classType) && !ClassType.isFrozenDataClass(classType); + + if (skipSynthesizeHash) { + synthesizeHashFunction = false; + } + + // If the user has indicated that a hash function should be generated even if it's unsafe + // to do so or there is already a hash function present, override the default logic. + if (ClassType.isSynthesizeDataClassUnsafeHash(classType)) { + synthesizeHashFunction = true; + } + + if (synthesizeHashFunction) { + const hashMethod = FunctionType.createInstance('__hash__', '', '', FunctionTypeFlags.SynthesizedMethod); + FunctionType.addParameter(hashMethod, selfParam); + hashMethod.details.declaredReturnType = evaluator.getBuiltInObject(node, 'int'); + symbolTable.set('__hash__', Symbol.createWithType(SymbolFlags.ClassMember, hashMethod)); + } else if (synthesizeHashNone && !skipSynthesizeHash) { + symbolTable.set('__hash__', Symbol.createWithType(SymbolFlags.ClassMember, NoneType.createInstance())); + } + let dictType = evaluator.getBuiltInType(node, 'dict'); if (isInstantiableClass(dictType)) { dictType = ClassType.cloneAsInstance( @@ -455,6 +521,10 @@ export function synthesizeDataClassMethods( } symbolTable.set('__dataclass_fields__', Symbol.createWithType(SymbolFlags.ClassMember, dictType)); + if (ClassType.isGeneratedDataClassSlots(classType) && classType.details.localSlotsNames === undefined) { + classType.details.localSlotsNames = localDataClassEntries.map((entry) => entry.name); + } + // If this dataclass derived from a NamedTuple, update the NamedTuple with // the specialized entry types. updateNamedTupleBaseClass( @@ -464,6 +534,33 @@ export function synthesizeDataClassMethods( ); } +// If the specified type is a descriptor — in particular, if it implements a +// __set__ method, this method transforms the type into the input parameter +// for the set method. +function transformDescriptorType(evaluator: TypeEvaluator, type: Type): Type { + if (!isClassInstance(type)) { + return type; + } + + const setMethodInfo = lookUpObjectMember(type, '__set__'); + if (!setMethodInfo) { + return type; + } + + const setMethodType = evaluator.getTypeOfMember(setMethodInfo); + if (!isFunction(setMethodType)) { + return type; + } + + const boundSetMethod = evaluator.bindFunctionToClassOrObject(type, setMethodType); + if (!boundSetMethod || !isFunction(boundSetMethod) || boundSetMethod.details.parameters.length < 2) { + return type; + } + + // The value parameter for a bound __set__ method is parameter index 1. + return FunctionType.getEffectiveParameterType(boundSetMethod, 1); +} + // Builds a sorted list of dataclass entries that are inherited by // the specified class. These entries must be unique and in reverse-MRO // order. Returns true if all of the class types in the hierarchy are @@ -528,17 +625,13 @@ function isDataclassFieldConstructor(type: Type, fieldDescriptorNames: string[]) export function validateDataClassTransformDecorator( evaluator: TypeEvaluator, - node: ExpressionNode + node: CallNode ): DataClassBehaviors | undefined { - if (node.nodeType !== ParseNodeType.Call) { - // TODO - emit diagnostic - return undefined; - } - const behaviors: DataClassBehaviors = { keywordOnlyParams: false, generateEq: true, generateOrder: false, + transformDescriptorTypes: false, fieldDescriptorNames: [], }; @@ -546,13 +639,8 @@ export function validateDataClassTransformDecorator( // Parse the arguments to the call. node.arguments.forEach((arg) => { - if (!arg.name) { - // TODO - emit diagnostic - return; - } - - if (arg.argumentCategory !== ArgumentCategory.Simple) { - // TODO - emit diagnostic + if (!arg.name || arg.argumentCategory !== ArgumentCategory.Simple) { + evaluator.addError(Localizer.Diagnostic.dataClassTransformPositionalParam(), arg); return; } @@ -560,7 +648,10 @@ export function validateDataClassTransformDecorator( case 'kw_only_default': { const value = evaluateStaticBoolExpression(arg.valueExpression, fileInfo.executionEnvironment); if (value === undefined) { - // TODO - emit diagnostic + evaluator.addError( + Localizer.Diagnostic.dataClassTransformExpectedBoolLiteral(), + arg.valueExpression + ); return; } @@ -571,7 +662,10 @@ export function validateDataClassTransformDecorator( case 'eq_default': { const value = evaluateStaticBoolExpression(arg.valueExpression, fileInfo.executionEnvironment); if (value === undefined) { - // TODO - emit diagnostic + evaluator.addError( + Localizer.Diagnostic.dataClassTransformExpectedBoolLiteral(), + arg.valueExpression + ); return; } @@ -582,7 +676,10 @@ export function validateDataClassTransformDecorator( case 'order_default': { const value = evaluateStaticBoolExpression(arg.valueExpression, fileInfo.executionEnvironment); if (value === undefined) { - // TODO - emit diagnostic + evaluator.addError( + Localizer.Diagnostic.dataClassTransformExpectedBoolLiteral(), + arg.valueExpression + ); return; } @@ -590,6 +687,20 @@ export function validateDataClassTransformDecorator( break; } + case 'transform_descriptor_types': { + const value = evaluateStaticBoolExpression(arg.valueExpression, fileInfo.executionEnvironment); + if (value === undefined) { + evaluator.addError( + Localizer.Diagnostic.dataClassTransformExpectedBoolLiteral(), + arg.valueExpression + ); + return; + } + + behaviors.transformDescriptorTypes = value; + break; + } + case 'field_descriptors': { const valueType = evaluator.getTypeOfExpression(arg.valueExpression).type; if ( @@ -597,10 +708,18 @@ export function validateDataClassTransformDecorator( !ClassType.isBuiltIn(valueType, 'tuple') || !valueType.tupleTypeArguments || valueType.tupleTypeArguments.some( - (entry) => !isInstantiableClass(entry) && !isFunction(entry) && !isOverloadedFunction(entry) + (entry) => + !isInstantiableClass(entry.type) && + !isFunction(entry.type) && + !isOverloadedFunction(entry.type) ) ) { - // TODO - emit diagnostic + evaluator.addError( + Localizer.Diagnostic.dataClassTransformFieldDescriptor().format({ + type: evaluator.printType(valueType), + }), + arg.valueExpression + ); return; } @@ -608,17 +727,20 @@ export function validateDataClassTransformDecorator( behaviors.fieldDescriptorNames = []; } valueType.tupleTypeArguments.forEach((arg) => { - if (isInstantiableClass(arg) || isFunction(arg)) { - behaviors.fieldDescriptorNames.push(arg.details.fullName); - } else if (isOverloadedFunction(arg)) { - behaviors.fieldDescriptorNames.push(arg.overloads[0].details.fullName); + if (isInstantiableClass(arg.type) || isFunction(arg.type)) { + behaviors.fieldDescriptorNames.push(arg.type.details.fullName); + } else if (isOverloadedFunction(arg.type)) { + behaviors.fieldDescriptorNames.push(arg.type.overloads[0].details.fullName); } }); break; } default: - // TODO - emit diagnostic + evaluator.addError( + Localizer.Diagnostic.dataClassTransformUnknownArgument().format({ name: arg.name.value }), + arg.valueExpression + ); break; } }); @@ -648,6 +770,7 @@ export function getDataclassDecoratorBehaviors(type: Type): DataClassBehaviors | keywordOnlyParams: false, generateEq: true, generateOrder: false, + transformDescriptorTypes: false, fieldDescriptorNames: ['dataclasses.field', 'dataclasses.Field'], }; } @@ -691,9 +814,12 @@ function applyDataClassBehaviorOverride( if (ClassType.isFrozenDataClass(baseClass)) { hasFrozenBaseClass = true; } else if ( - !baseClass.details.declaredMetaclass || - !isInstantiableClass(baseClass.details.declaredMetaclass) || - !baseClass.details.declaredMetaclass.details.metaclassDataClassTransform + !baseClass.details.classDataClassTransform && + !( + baseClass.details.declaredMetaclass && + isInstantiableClass(baseClass.details.declaredMetaclass) && + !!baseClass.details.declaredMetaclass.details.classDataClassTransform + ) ) { // If this base class is unfrozen and isn't the class that directly // references the metaclass that provides dataclass-like behaviors, @@ -734,10 +860,34 @@ function applyDataClassBehaviorOverride( classType.details.flags &= ~ClassTypeFlags.SkipSynthesizedDataClassEq; } break; + + case 'slots': + if (value === true) { + classType.details.flags |= ClassTypeFlags.GenerateDataClassSlots; + + if (classType.details.localSlotsNames) { + evaluator.addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.dataClassSlotsOverwrite(), + errorNode + ); + } + } else if (value === false) { + classType.details.flags &= ~ClassTypeFlags.GenerateDataClassSlots; + } + break; + + case 'hash': + case 'unsafe_hash': + if (value === true) { + classType.details.flags |= ClassTypeFlags.SynthesizeDataClassUnsafeHash; + } + break; } } -export function applyDataClassMetaclassBehaviorOverrides( +export function applyDataClassClassBehaviorOverrides( evaluator: TypeEvaluator, classType: ClassType, args: FunctionArgument[] @@ -775,10 +925,6 @@ export function applyDataClassDecorator( applyDataClassDefaultBehaviors(classType, defaultBehaviors); if (callNode?.arguments) { - callNode.arguments.forEach((arg) => { - if (arg.name && arg.valueExpression) { - applyDataClassBehaviorOverride(evaluator, arg, classType, arg.name.value, arg.valueExpression); - } - }); + applyDataClassClassBehaviorOverrides(evaluator, classType, callNode.arguments); } } diff --git a/packages/pyright-internal/src/analyzer/declaration.ts b/packages/pyright-internal/src/analyzer/declaration.ts index d660e62dab20..1549e0669eb0 100644 --- a/packages/pyright-internal/src/analyzer/declaration.ts +++ b/packages/pyright-internal/src/analyzer/declaration.ts @@ -59,6 +59,10 @@ export interface DeclarationBase { // because a source file can be accessed via different // import names in some cases). moduleName: string; + + // The declaration is within an except clause of a try + // statement. We may want to ignore such declarations. + isInExceptSuite: boolean; } export interface IntrinsicDeclaration extends DeclarationBase { @@ -112,6 +116,9 @@ export interface VariableDeclaration extends DeclarationBase { // constant in that reassignment is not permitted)? isFinal?: boolean; + // Is the declaration a "ClassVar"? + isClassVar?: boolean; + // Is the declaration annotated with "Required"? isRequired?: boolean; @@ -121,6 +128,16 @@ export interface VariableDeclaration extends DeclarationBase { // Is the declaration an entry in __slots__? isDefinedBySlots?: boolean; + // For most symbols in a "py.typed" file, type inference is not + // allowed. But in certain cases (as with __match_args__ or __slots__), + // inference is permitted. + isInferenceAllowedInPyTyped?: boolean; + + // Is the declaration using a runtime-evaluated type expression + // rather than an annotation? This is used for TypedDicts, NamedTuples, + // and other complex (more dynamic) class definitions with typed variables. + isRuntimeTypeExpression?: boolean; + // Points to the "TypeAlias" annotation described in PEP 613. typeAliasAnnotation?: ExpressionNode | undefined; @@ -147,6 +164,9 @@ export interface AliasDeclaration extends DeclarationBase { // rename references. usesLocalName: boolean; + // Indicate whether symbols can be loaded from the path. + loadSymbolsFromPath: boolean; + // The name of the symbol being imported (used for "from X import Y" // statements, not applicable to "import X" statements). symbolName?: string | undefined; @@ -170,6 +190,10 @@ export interface AliasDeclaration extends DeclarationBase { // Is this a dummy entry for an unresolved import? isUnresolved?: boolean; + + // Is this a dummy entry for an import that cannot be resolved + // directly because it targets a native library? + isNativeLib?: boolean; } // This interface represents a set of actions that the python loader @@ -180,6 +204,9 @@ export interface ModuleLoaderActions { // a directory). path: string; + // Indicate whether symbols can be loaded from the path. + loadSymbolsFromPath: boolean; + // See comment for "implicitImports" field in AliasDeclaration. implicitImports?: Map; } @@ -213,6 +240,10 @@ export function isAliasDeclaration(decl: Declaration): decl is AliasDeclaration return decl.type === DeclarationType.Alias; } -export function isSpecialBuiltInClassDeclarations(decl: Declaration): decl is SpecialBuiltInClassDeclaration { +export function isSpecialBuiltInClassDeclaration(decl: Declaration): decl is SpecialBuiltInClassDeclaration { return decl.type === DeclarationType.SpecialBuiltInClass; } + +export function isIntrinsicDeclaration(decl: Declaration): decl is IntrinsicDeclaration { + return decl.type === DeclarationType.Intrinsic; +} diff --git a/packages/pyright-internal/src/analyzer/declarationUtils.ts b/packages/pyright-internal/src/analyzer/declarationUtils.ts index 40ce586c265b..04f208e5b3e2 100644 --- a/packages/pyright-internal/src/analyzer/declarationUtils.ts +++ b/packages/pyright-internal/src/analyzer/declarationUtils.ts @@ -7,8 +7,9 @@ * Collection of static methods that operate on declarations. */ +import { getEmptyRange } from '../common/textRange'; import { ParseNodeType } from '../parser/parseNodes'; -import { Declaration, DeclarationType, isAliasDeclaration } from './declaration'; +import { AliasDeclaration, Declaration, DeclarationType, isAliasDeclaration, ModuleLoaderActions } from './declaration'; import { getFileInfoFromNode } from './parseTreeUtils'; export function hasTypeForDeclaration(declaration: Declaration): boolean { @@ -52,7 +53,11 @@ export function hasTypeForDeclaration(declaration: Declaration): boolean { } } -export function areDeclarationsSame(decl1: Declaration, decl2: Declaration): boolean { +export function areDeclarationsSame( + decl1: Declaration, + decl2: Declaration, + treatModuleInImportAndFromImportSame = false +): boolean { if (decl1.type !== decl2.type) { return false; } @@ -71,11 +76,22 @@ export function areDeclarationsSame(decl1: Declaration, decl2: Declaration): boo // Alias declarations refer to the entire import statement. // We need to further differentiate. if (decl1.type === DeclarationType.Alias && decl2.type === DeclarationType.Alias) { - if ( - decl1.symbolName !== decl2.symbolName || - decl1.firstNamePart !== decl2.firstNamePart || - decl1.usesLocalName !== decl2.usesLocalName - ) { + if (decl1.symbolName !== decl2.symbolName || decl1.usesLocalName !== decl2.usesLocalName) { + return false; + } + + if (treatModuleInImportAndFromImportSame) { + // Treat "module" in "import [|module|]", "from [|module|] import ..." + // or "from ... import [|module|]" same in IDE services. + // + // Some case such as "from [|module|] import ...", symbol for [|module|] doesn't even + // exist and it can't be referenced inside of a module, but nonetheless, IDE still + // needs these sometimes for things like hover tooltip, highlight references, + // find all references and etc. + return true; + } + + if (decl1.firstNamePart !== decl2.firstNamePart) { return false; } } @@ -162,3 +178,32 @@ export function isDefinedInFile(decl: Declaration, filePath: string) { // Other decls, the path points to the file the symbol is defined in. return decl.path === filePath; } + +export function getDeclarationsWithUsesLocalNameRemoved(decls: Declaration[]) { + // Make a shallow copy and clear the "usesLocalName" field. + return decls.map((localDecl) => { + if (localDecl.type !== DeclarationType.Alias) { + return localDecl; + } + + const nonLocalDecl: AliasDeclaration = { ...localDecl }; + nonLocalDecl.usesLocalName = false; + return nonLocalDecl; + }); +} + +export function createSynthesizedAliasDeclaration(path: string): AliasDeclaration { + // The only time this decl is used is for IDE services such as + // the find all references, hover provider and etc. + return { + type: DeclarationType.Alias, + node: undefined!, + path, + loadSymbolsFromPath: false, + range: getEmptyRange(), + implicitImports: new Map(), + usesLocalName: false, + moduleName: '', + isInExceptSuite: false, + }; +} diff --git a/packages/pyright-internal/src/analyzer/docStringConversion.ts b/packages/pyright-internal/src/analyzer/docStringConversion.ts index 6f120533c0d4..6601ff1f2d55 100644 --- a/packages/pyright-internal/src/analyzer/docStringConversion.ts +++ b/packages/pyright-internal/src/analyzer/docStringConversion.ts @@ -248,13 +248,13 @@ class DocStringConverter { return; } - const line = this.formatPlainTextIndent(this._currentLine()); + const line = this._formatPlainTextIndent(this._currentLine()); this._appendTextLine(line); this._eatLine(); } - private formatPlainTextIndent(line: string) { + private _formatPlainTextIndent(line: string) { const prev = this._lineAt(this._lineNum - 1); const prevIndent = this._prevIndent(); const currIndent = this._currentIndent(); diff --git a/packages/pyright-internal/src/analyzer/functionTransform.ts b/packages/pyright-internal/src/analyzer/functionTransform.ts new file mode 100644 index 000000000000..6117e29ce3f7 --- /dev/null +++ b/packages/pyright-internal/src/analyzer/functionTransform.ts @@ -0,0 +1,115 @@ +/* + * functionTransform.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Code that transforms the return result of a function. + * + */ + +import { DiagnosticRule } from '../common/diagnosticRules'; +import { Localizer } from '../localization/localize'; +import { ExpressionNode, ParameterCategory } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { Symbol, SymbolFlags } from './symbol'; +import { FunctionArgument, FunctionResult, TypeEvaluator } from './typeEvaluatorTypes'; +import { + ClassType, + FunctionParameter, + FunctionType, + FunctionTypeFlags, + isClassInstance, + isFunction, + isInstantiableClass, + OverloadedFunctionType, +} from './types'; +import { ClassMemberLookupFlags, lookUpObjectMember, synthesizeTypeVarForSelfCls } from './typeUtils'; + +export function applyFunctionTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: FunctionArgument[], + functionType: FunctionType | OverloadedFunctionType, + result: FunctionResult +): FunctionResult { + if (isFunction(functionType)) { + if (functionType.details.fullName === 'functools.total_ordering') { + return applyTotalOrderingTransform(evaluator, errorNode, argList, result); + } + } + + // By default, return the result unmodified. + return result; +} + +function applyTotalOrderingTransform( + evaluator: TypeEvaluator, + errorNode: ExpressionNode, + argList: FunctionArgument[], + result: FunctionResult +) { + if (argList.length !== 1) { + return result; + } + + // This function is meant to apply to a concrete instantiable class. + const classType = argList[0].type; + if (!classType || !isInstantiableClass(classType) || classType.includeSubclasses) { + return result; + } + + const orderingMethods = ['__lt__', '__le__', '__gt__', '__ge__']; + const instanceType = ClassType.cloneAsInstance(classType); + + // Verify that the class has at least one of the required functions. + const missingMethods = orderingMethods.filter((methodName) => { + return !lookUpObjectMember(instanceType, methodName, ClassMemberLookupFlags.SkipInstanceVariables); + }); + + if (missingMethods.length === orderingMethods.length) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.totalOrderingMissingMethod(), + errorNode + ); + return result; + } + + const objectType = evaluator.getBuiltInObject(errorNode, 'object'); + if (!objectType || !isClassInstance(objectType)) { + return result; + } + + const boolType = evaluator.getBuiltInObject(errorNode, 'bool'); + if (!boolType || !isClassInstance(boolType)) { + return result; + } + + const selfParam: FunctionParameter = { + category: ParameterCategory.Simple, + name: 'self', + type: synthesizeTypeVarForSelfCls(classType, /* isClsParam */ false), + hasDeclaredType: true, + }; + + const objParam: FunctionParameter = { + category: ParameterCategory.Simple, + name: '__value', + type: objectType, + hasDeclaredType: true, + }; + + // Add the missing members to the class's symbol table. + missingMethods.forEach((methodName) => { + const methodToAdd = FunctionType.createInstance(methodName, '', '', FunctionTypeFlags.SynthesizedMethod); + FunctionType.addParameter(methodToAdd, selfParam); + FunctionType.addParameter(methodToAdd, objParam); + methodToAdd.details.declaredReturnType = boolType; + + classType.details.fields.set(methodName, Symbol.createWithType(SymbolFlags.ClassMember, methodToAdd)); + }); + + return result; +} diff --git a/packages/pyright-internal/src/analyzer/importResolver.ts b/packages/pyright-internal/src/analyzer/importResolver.ts index 334668d5d658..cea34651eda4 100644 --- a/packages/pyright-internal/src/analyzer/importResolver.ts +++ b/packages/pyright-internal/src/analyzer/importResolver.ts @@ -10,7 +10,7 @@ import type { Dirent } from 'fs'; -import { getOrAdd } from '../common/collectionUtils'; +import { flatten, getMapValues, getOrAdd } from '../common/collectionUtils'; import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; import { FileSystem } from '../common/fileSystem'; import { Host } from '../common/host'; @@ -28,7 +28,10 @@ import { getPathComponents, getRelativePathComponentsFromDirectory, isDirectory, + isDiskPathRoot, isFile, + normalizePath, + normalizePathCase, resolvePaths, stripFileExtension, stripTrailingDirectorySeparator, @@ -41,6 +44,8 @@ import * as StringUtils from '../common/stringUtils'; import { isIdentifierChar, isIdentifierStartChar } from '../parser/characters'; import { PyrightFileSystem } from '../pyrightFileSystem'; import { ImplicitImport, ImportResult, ImportType } from './importResult'; +import { getDirectoryLeadingDotsPointsTo } from './importStatementUtils'; +import { ImportPath, ParentDirectoryCache } from './parentDirectoryCache'; import * as PythonPathUtils from './pythonPathUtils'; import { getPyTypedInfo, PyTypedInfo } from './pyTypedUtils'; import { isDunderName } from './symbolNameUtils'; @@ -58,6 +63,14 @@ export interface ModuleNameAndType { isLocalTypingsFile: boolean; } +export function createImportedModuleDescriptor(moduleName: string): ImportedModuleDescriptor { + return { + leadingDots: 0, + nameParts: moduleName.split('.'), + importedSymbols: [], + }; +} + type CachedImportResults = Map; interface SupportedVersionRange { min: PythonVersion; @@ -86,15 +99,21 @@ export class ImportResolver { private _cachedTypeshedThirdPartyPackageRoots: string[] | undefined; private _cachedEntriesForPath = new Map(); + protected cachedParentImportResults: ParentDirectoryCache; + constructor( public readonly fileSystem: FileSystem, protected _configOptions: ConfigOptions, public readonly host: Host - ) {} + ) { + this.cachedParentImportResults = new ParentDirectoryCache(() => this.getPythonSearchPaths([])); + } invalidateCache() { this._cachedImportResults = new Map(); this._cachedModuleNameResults = new Map>(); + this.cachedParentImportResults.reset(); + this._invalidateFileSystemCache(); if (this.fileSystem instanceof PyrightFileSystem) { @@ -123,7 +142,86 @@ export class ImportResolver { ): ImportResult { const importName = this.formatImportName(moduleDescriptor); const importFailureInfo: string[] = []; + const importResult = this._resolveImportStrict( + importName, + sourceFilePath, + execEnv, + moduleDescriptor, + importFailureInfo + ); + + if (importResult.isImportFound || moduleDescriptor.leadingDots > 0) { + return importResult; + } + + // If the import is absolute and no other method works, try resolving the + // absolute in the importing file's directory, then the parent directory, + // and so on, until the import root is reached. + sourceFilePath = normalizePathCase(this.fileSystem, normalizePath(sourceFilePath)); + const origin = ensureTrailingDirectorySeparator(getDirectoryPath(sourceFilePath)); + + const result = this.cachedParentImportResults.getImportResult(origin, importName, importResult); + if (result) { + // Already ran the parent directory resolution for this import name on this location. + return this.filterImplicitImports(result, moduleDescriptor.importedSymbols); + } + + // Check whether the given file is in the parent directory import resolution cache. + const root = this.getParentImportResolutionRoot(sourceFilePath, execEnv.root); + if (!this.cachedParentImportResults.checkValidPath(this.fileSystem, sourceFilePath, root)) { + return importResult; + } + const importPath: ImportPath = { importPath: undefined }; + + // Going up the given folder one by one until we can resolve the import. + let current = origin; + while (this._shouldWalkUp(current, root, execEnv)) { + const result = this.resolveAbsoluteImport( + current, + execEnv, + moduleDescriptor, + importName, + [], + /* allowPartial */ undefined, + /* allowNativeLib */ undefined, + /* useStubPackage */ false, + /* allowPyi */ true + ); + + this.cachedParentImportResults.checked(current, importName, importPath); + + if (result.isImportFound) { + // This will make cache to point to actual path that contains the module we found + importPath.importPath = current; + + this.cachedParentImportResults.add({ + importResult: result, + path: current, + importName, + }); + + return this.filterImplicitImports(result, moduleDescriptor.importedSymbols); + } + + let success; + [success, current] = this._tryWalkUp(current); + if (!success) { + break; + } + } + + this.cachedParentImportResults.checked(current, importName, importPath); + return importResult; + } + + private _resolveImportStrict( + importName: string, + sourceFilePath: string, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor, + importFailureInfo: string[] + ) { const notFoundResult: ImportResult = { importName, isRelative: false, @@ -194,57 +292,107 @@ export class ImportResolver { sourceFilePath: string, execEnv: ExecutionEnvironment, moduleDescriptor: ImportedModuleDescriptor + ) { + const suggestions = this._getCompletionSuggestionsStrict(sourceFilePath, execEnv, moduleDescriptor); + + // We only do parent import resolution for absolute path. + if (moduleDescriptor.leadingDots > 0) { + return suggestions; + } + + const root = this.getParentImportResolutionRoot(sourceFilePath, execEnv.root); + const origin = ensureTrailingDirectorySeparator( + getDirectoryPath(normalizePathCase(this.fileSystem, normalizePath(sourceFilePath))) + ); + + let current = origin; + while (this._shouldWalkUp(current, root, execEnv)) { + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, + current, + moduleDescriptor, + suggestions, + /*strictOnly*/ false + ); + + let success; + [success, current] = this._tryWalkUp(current); + if (!success) { + break; + } + } + + return suggestions; + } + + private _getCompletionSuggestionsStrict( + sourceFilePath: string, + execEnv: ExecutionEnvironment, + moduleDescriptor: ImportedModuleDescriptor ): Set { const importFailureInfo: string[] = []; const suggestions = new Set(); // Is it a relative import? if (moduleDescriptor.leadingDots > 0) { - this._getCompletionSuggestionsRelative(sourceFilePath, moduleDescriptor, suggestions); + this._getCompletionSuggestionsRelative(sourceFilePath, execEnv, moduleDescriptor, suggestions); } else { // First check for a typeshed file. if (moduleDescriptor.nameParts.length > 0) { - this._getCompletionSuggestionsTypeshedPath(execEnv, moduleDescriptor, true, suggestions); + this._getCompletionSuggestionsTypeshedPath( + sourceFilePath, + execEnv, + moduleDescriptor, + true, + suggestions + ); } // Look for it in the root directory of the execution environment. if (execEnv.root) { - this.getCompletionSuggestionsAbsolute( + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, execEnv.root, moduleDescriptor, - suggestions, - sourceFilePath, - execEnv + suggestions ); } for (const extraPath of execEnv.extraPaths) { - this.getCompletionSuggestionsAbsolute( + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, extraPath, moduleDescriptor, - suggestions, - sourceFilePath, - execEnv + suggestions ); } // Check for a typings file. if (this._configOptions.stubPath) { - this.getCompletionSuggestionsAbsolute(this._configOptions.stubPath, moduleDescriptor, suggestions); + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, + this._configOptions.stubPath, + moduleDescriptor, + suggestions + ); } // Check for a typeshed file. - this._getCompletionSuggestionsTypeshedPath(execEnv, moduleDescriptor, false, suggestions); + this._getCompletionSuggestionsTypeshedPath(sourceFilePath, execEnv, moduleDescriptor, false, suggestions); // Look for the import in the list of third-party packages. const pythonSearchPaths = this.getPythonSearchPaths(importFailureInfo); for (const searchPath of pythonSearchPaths) { - this.getCompletionSuggestionsAbsolute( + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, searchPath, moduleDescriptor, - suggestions, - sourceFilePath, - execEnv + suggestions ); } } @@ -269,7 +417,9 @@ export class ImportResolver { result.nonStubImportResult.resolvedPaths.length - 1 ]; - if (nonEmptyPath.endsWith('.py')) { + if (nonEmptyPath.endsWith('.py') || nonEmptyPath.endsWith('.pyi')) { + // We allow pyi in case there are multiple pyi for a compiled module such as + // numpy.random.mtrand sourceFilePaths.push(nonEmptyPath); } } @@ -390,6 +540,7 @@ export class ImportResolver { // Look for it in the root directory of the execution environment. if (execEnv.root) { moduleName = this.getModuleNameFromPath(execEnv.root, filePath); + importType = ImportType.Local; } for (const extraPath of execEnv.extraPaths) { @@ -669,7 +820,11 @@ export class ImportResolver { // We found fully typed stub packages. if (importResult.packageDirectory) { - return importResult; + // If this is a namespace package that wasn't resolved, assume that + // it's a partial stub package and continue looking for a real package. + if (!importResult.isNamespacePackage || importResult.isImportFound) { + return importResult; + } } } @@ -1038,44 +1193,27 @@ export class ImportResolver { const importName = this.formatImportName(moduleDescriptor); const importFailureInfo: string[] = []; - // First check for a stdlib typeshed file. - if (allowPyi && moduleDescriptor.nameParts.length > 0) { - const builtInImport = this._findTypeshedPath( + // Check for a local stub file using stubPath. + if (allowPyi && this._configOptions.stubPath) { + importFailureInfo.push(`Looking in stubPath '${this._configOptions.stubPath}'`); + const typingsImport = this.resolveAbsoluteImport( + this._configOptions.stubPath, execEnv, moduleDescriptor, importName, - /* isStdLib */ true, - importFailureInfo + importFailureInfo, + /* allowPartial */ undefined, + /* allowNativeLib */ false, + /* useStubPackage */ true, + allowPyi, + /* lookForPyTyped */ false ); - if (builtInImport) { - builtInImport.isTypeshedFile = true; - return builtInImport; - } - } - if (allowPyi) { - // Check for a local stub file using stubPath. - if (this._configOptions.stubPath) { - importFailureInfo.push(`Looking in stubPath '${this._configOptions.stubPath}'`); - const typingsImport = this.resolveAbsoluteImport( - this._configOptions.stubPath, - execEnv, - moduleDescriptor, - importName, - importFailureInfo, - /* allowPartial */ undefined, - /* allowNativeLib */ false, - /* useStubPackage */ true, - allowPyi, - /* lookForPyTyped */ false - ); - - if (typingsImport.isImportFound) { - // We will treat typings files as "local" rather than "third party". - typingsImport.importType = ImportType.Local; - typingsImport.isLocalTypingsFile = true; - return typingsImport; - } + if (typingsImport.isImportFound) { + // We will treat typings files as "local" rather than "third party". + typingsImport.importType = ImportType.Local; + typingsImport.isLocalTypingsFile = true; + return typingsImport; } } @@ -1153,13 +1291,14 @@ export class ImportResolver { // If a library is fully py.typed, then we have found the best match, // unless the execution environment is typeshed itself, in which case - // we don't want to favor py.typed libraries the typeshed lookup below. + // we don't want to favor py.typed libraries. Use the typeshed lookup below. if (execEnv.root !== this._getTypeshedRoot(execEnv, importFailureInfo)) { if (bestResultSoFar?.pyTypedInfo && !bestResultSoFar.isPartlyResolved) { return bestResultSoFar; } } + // Call the extensibility hook for subclasses. const extraResults = this.resolveImportEx( sourceFilePath, execEnv, @@ -1168,13 +1307,29 @@ export class ImportResolver { importFailureInfo, allowPyi ); - if (extraResults !== undefined) { + + if (extraResults) { return extraResults; } - if (allowPyi) { + if (allowPyi && moduleDescriptor.nameParts.length > 0) { + // Check for a stdlib typeshed file. + importFailureInfo.push(`Looking for typeshed stdlib path`); + const typeshedStdlibImport = this._findTypeshedPath( + execEnv, + moduleDescriptor, + importName, + /* isStdLib */ true, + importFailureInfo + ); + + if (typeshedStdlibImport) { + typeshedStdlibImport.isTypeshedFile = true; + return typeshedStdlibImport; + } + // Check for a third-party typeshed file. - importFailureInfo.push(`Looking for typeshed path`); + importFailureInfo.push(`Looking for typeshed third-party path`); const typeshedImport = this._findTypeshedPath( execEnv, moduleDescriptor, @@ -1182,9 +1337,10 @@ export class ImportResolver { /* isStdLib */ false, importFailureInfo ); + if (typeshedImport) { typeshedImport.isTypeshedFile = true; - return typeshedImport; + bestResultSoFar = this._pickBestImport(bestResultSoFar, typeshedImport, moduleDescriptor); } } @@ -1217,6 +1373,11 @@ export class ImportResolver { return newImport; } + // Prefer local packages. + if (bestImportSoFar.importType === ImportType.Local && !bestImportSoFar.isNamespacePackage) { + return bestImportSoFar; + } + // If both are namespace imports, select the one that resolves the symbols. if ( bestImportSoFar.isNamespacePackage && @@ -1231,6 +1392,20 @@ export class ImportResolver { } } + // Prefer py.typed over non-py.typed. + if (bestImportSoFar.pyTypedInfo && !newImport.pyTypedInfo) { + return bestImportSoFar; + } else if (!bestImportSoFar.pyTypedInfo && newImport.pyTypedInfo) { + return newImport; + } + + // Prefer pyi over py. + if (bestImportSoFar.isStubFile && !newImport.isStubFile) { + return bestImportSoFar; + } else if (!bestImportSoFar.isStubFile && newImport.isStubFile) { + return newImport; + } + // All else equal, prefer shorter resolution paths. if (bestImportSoFar.resolvedPaths.length > newImport.resolvedPaths.length) { return newImport; @@ -1365,6 +1540,7 @@ export class ImportResolver { } private _getCompletionSuggestionsTypeshedPath( + sourceFilePath: string, execEnv: ExecutionEnvironment, moduleDescriptor: ImportedModuleDescriptor, isStdLib: boolean, @@ -1379,7 +1555,18 @@ export class ImportResolver { typeshedPaths = [path]; } } else { - typeshedPaths = this._getThirdPartyTypeshedPackagePaths(moduleDescriptor, execEnv, importFailureInfo); + typeshedPaths = this._getThirdPartyTypeshedPackagePaths( + moduleDescriptor, + execEnv, + importFailureInfo, + /*includeMatchOnly*/ false + ); + + const typeshedPathEx = this.getTypeshedPathEx(execEnv, importFailureInfo); + if (typeshedPathEx) { + typeshedPaths = typeshedPaths ?? []; + typeshedPaths.push(typeshedPathEx); + } } if (!typeshedPaths) { @@ -1388,7 +1575,13 @@ export class ImportResolver { typeshedPaths.forEach((typeshedPath) => { if (this.dirExistsCached(typeshedPath)) { - this.getCompletionSuggestionsAbsolute(typeshedPath, moduleDescriptor, suggestions); + this._getCompletionSuggestionsAbsolute( + sourceFilePath, + execEnv, + typeshedPath, + moduleDescriptor, + suggestions + ); } }); } @@ -1508,7 +1701,8 @@ export class ImportResolver { private _getThirdPartyTypeshedPackagePaths( moduleDescriptor: ImportedModuleDescriptor, execEnv: ExecutionEnvironment, - importFailureInfo: string[] + importFailureInfo: string[], + includeMatchOnly = true ): string[] | undefined { const typeshedPath = this._getThirdPartyTypeshedPath(execEnv, importFailureInfo); @@ -1517,7 +1711,17 @@ export class ImportResolver { } const firstNamePart = moduleDescriptor.nameParts.length > 0 ? moduleDescriptor.nameParts[0] : ''; - return this._cachedTypeshedThirdPartyPackagePaths!.get(firstNamePart); + if (includeMatchOnly) { + return this._cachedTypeshedThirdPartyPackagePaths!.get(firstNamePart); + } + + if (firstNamePart) { + return flatten( + getMapValues(this._cachedTypeshedThirdPartyPackagePaths!, (k) => k.startsWith(firstNamePart)) + ); + } + + return []; } private _getThirdPartyTypeshedPackageRoots(execEnv: ExecutionEnvironment, importFailureInfo: string[]) { @@ -1603,18 +1807,18 @@ export class ImportResolver { importFailureInfo.push('Attempting to resolve relative import'); // Determine which search path this file is part of. - let curDir = getDirectoryPath(sourceFilePath); - for (let i = 1; i < moduleDescriptor.leadingDots; i++) { - if (curDir === '') { - importFailureInfo.push(`Invalid relative path '${importName}'`); - return undefined; - } - curDir = getDirectoryPath(curDir); + const directory = getDirectoryLeadingDotsPointsTo( + getDirectoryPath(sourceFilePath), + moduleDescriptor.leadingDots + ); + if (!directory) { + importFailureInfo.push(`Invalid relative path '${importName}'`); + return undefined; } // Now try to match the module parts from the current directory location. const absImport = this.resolveAbsoluteImport( - curDir, + directory, execEnv, moduleDescriptor, importName, @@ -1627,20 +1831,21 @@ export class ImportResolver { private _getCompletionSuggestionsRelative( sourceFilePath: string, + execEnv: ExecutionEnvironment, moduleDescriptor: ImportedModuleDescriptor, suggestions: Set ) { // Determine which search path this file is part of. - let curDir = getDirectoryPath(sourceFilePath); - for (let i = 1; i < moduleDescriptor.leadingDots; i++) { - if (curDir === '') { - return; - } - curDir = getDirectoryPath(curDir); + const directory = getDirectoryLeadingDotsPointsTo( + getDirectoryPath(sourceFilePath), + moduleDescriptor.leadingDots + ); + if (!directory) { + return; } // Now try to match the module parts from the current directory location. - this.getCompletionSuggestionsAbsolute(curDir, moduleDescriptor, suggestions); + this._getCompletionSuggestionsAbsolute(sourceFilePath, execEnv, directory, moduleDescriptor, suggestions); } private _getFilesInDirectory(dirPath: string): string[] { @@ -1658,12 +1863,13 @@ export class ImportResolver { return filesInDir; } - protected getCompletionSuggestionsAbsolute( + private _getCompletionSuggestionsAbsolute( + sourceFilePath: string, + execEnv: ExecutionEnvironment, rootPath: string, moduleDescriptor: ImportedModuleDescriptor, suggestions: Set, - sourceFilePath?: string, - execEnv?: ExecutionEnvironment + strictOnly = true ) { // Starting at the specified path, walk the file system to find the // specified module. @@ -1676,24 +1882,38 @@ export class ImportResolver { nameParts.push(''); } + // We need to track this since a module might be resolvable using relative path + // but can't resolved by absolute path. + const leadingDots = moduleDescriptor.leadingDots; const parentNameParts = nameParts.slice(0, -1); // Handle the case where the user has typed the first // dot (or multiple) in a relative path. if (nameParts.length === 0) { - this._addFilteredSuggestionsAbsolute(dirPath, '', suggestions, parentNameParts, sourceFilePath, execEnv); + this._addFilteredSuggestionsAbsolute( + sourceFilePath, + execEnv, + dirPath, + '', + suggestions, + leadingDots, + parentNameParts, + strictOnly + ); } else { for (let i = 0; i < nameParts.length; i++) { // Provide completions only if we're on the last part // of the name. if (i === nameParts.length - 1) { this._addFilteredSuggestionsAbsolute( + sourceFilePath, + execEnv, dirPath, nameParts[i], suggestions, + leadingDots, parentNameParts, - sourceFilePath, - execEnv + strictOnly ); } @@ -1706,18 +1926,20 @@ export class ImportResolver { } private _addFilteredSuggestionsAbsolute( - dirPath: string, + sourceFilePath: string, + execEnv: ExecutionEnvironment, + currentPath: string, filter: string, suggestions: Set, + leadingDots: number, parentNameParts: string[], - sourceFilePath?: string, - execEnv?: ExecutionEnvironment + strictOnly: boolean ) { // Enumerate all of the files and directories in the path, expanding links. const entries = getFileSystemEntriesFromDirEntries( - this.readdirEntriesCached(dirPath), + this.readdirEntriesCached(currentPath), this.fileSystem, - dirPath + currentPath ); entries.files.forEach((file) => { @@ -1727,46 +1949,75 @@ export class ImportResolver { const fileWithoutExtension = stripFileExtension(file, /* multiDotExtension */ true); if (supportedFileExtensions.some((ext) => ext === fileExtension)) { - if (fileWithoutExtension !== '__init__') { - if (!filter || StringUtils.isPatternInSymbol(filter, fileWithoutExtension)) { - if (this._isUniqueValidSuggestion(fileWithoutExtension, suggestions)) { - suggestions.add(fileWithoutExtension); - } - } + if (fileWithoutExtension === '__init__') { + return; + } + + if (filter && !StringUtils.isPatternInSymbol(filter, fileWithoutExtension)) { + return; + } + + if ( + !this._isUniqueValidSuggestion(fileWithoutExtension, suggestions) || + !this._isResolvableSuggestion( + fileWithoutExtension, + leadingDots, + parentNameParts, + sourceFilePath, + execEnv, + strictOnly + ) + ) { + return; } + + suggestions.add(fileWithoutExtension); } }); entries.directories.forEach((dir) => { + if (filter && !dir.startsWith(filter)) { + return; + } + if ( - (!filter || dir.startsWith(filter)) && - this._isUniqueValidSuggestion(dir, suggestions) && - this._isResolvableSuggestion(dir, parentNameParts, sourceFilePath, execEnv) + !this._isUniqueValidSuggestion(dir, suggestions) || + !this._isResolvableSuggestion(dir, leadingDots, parentNameParts, sourceFilePath, execEnv, strictOnly) ) { - suggestions.add(dir); + return; } + + suggestions.add(dir); }); } // Fix for editable installed submodules where the suggested directory was a namespace directory that wouldn't resolve. // only used for absolute imports private _isResolvableSuggestion( - dir: string, + name: string, + leadingDots: number, parentNameParts: string[], - sourceFilePath?: string, - execEnv?: ExecutionEnvironment + sourceFilePath: string, + execEnv: ExecutionEnvironment, + strictOnly: boolean ) { - if (sourceFilePath && execEnv) { - const result = this._resolveImport(sourceFilePath, execEnv, { - leadingDots: 0, - nameParts: [...parentNameParts, dir], - importedSymbols: [], - }); + // We always resolve names based on sourceFilePath. + const moduleDescriptor = { + leadingDots: leadingDots, + nameParts: [...parentNameParts, name], + importedSymbols: [], + }; - return result.isImportFound; + // Make sure we don't use parent folder resolution when checking whether the given name is resolvable. + if (strictOnly) { + const importName = this.formatImportName(moduleDescriptor); + const importFailureInfo: string[] = []; + + return this._resolveImportStrict(importName, sourceFilePath, execEnv, moduleDescriptor, importFailureInfo) + .isImportFound; } - return false; + return this._resolveImport(sourceFilePath, execEnv, moduleDescriptor).isImportFound; } private _isUniqueValidSuggestion(suggestionToAdd: string, suggestions: Set) { @@ -1955,6 +2206,31 @@ export class ImportResolver { private _isNativeModuleFileExtension(fileExtension: string): boolean { return supportedNativeLibExtensions.some((ext) => ext === fileExtension); } + + private _tryWalkUp(current: string): [success: boolean, path: string] { + if (isDiskPathRoot(current)) { + return [false, '']; + } + + return [ + true, + ensureTrailingDirectorySeparator( + normalizePathCase(this.fileSystem, normalizePath(combinePaths(current, '..'))) + ), + ]; + } + + private _shouldWalkUp(current: string, root: string, execEnv: ExecutionEnvironment) { + return current.length > root.length || (current === root && !execEnv.root); + } + + protected getParentImportResolutionRoot(sourceFilePath: string, executionRoot: string | undefined) { + if (executionRoot) { + return ensureTrailingDirectorySeparator(normalizePathCase(this.fileSystem, normalizePath(executionRoot))); + } + + return ensureTrailingDirectorySeparator(getDirectoryPath(sourceFilePath)); + } } export type ImportResolverFactory = (fs: FileSystem, options: ConfigOptions, host: Host) => ImportResolver; diff --git a/packages/pyright-internal/src/analyzer/importStatementUtils.ts b/packages/pyright-internal/src/analyzer/importStatementUtils.ts index 4867c965881f..f0a3c87eaa4b 100644 --- a/packages/pyright-internal/src/analyzer/importStatementUtils.ts +++ b/packages/pyright-internal/src/analyzer/importStatementUtils.ts @@ -11,10 +11,19 @@ import { CancellationToken } from 'vscode-languageserver'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { addIfUnique, createMapFromItems } from '../common/collectionUtils'; import { TextEditAction } from '../common/editAction'; +import { FileSystem } from '../common/fileSystem'; +import { + getDirectoryPath, + getFileName, + getRelativePathComponentsFromDirectory, + isFile, + stripFileExtension, +} from '../common/pathUtils'; import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; -import { Position } from '../common/textRange'; -import { TextRange } from '../common/textRange'; +import { compareStringsCaseSensitive } from '../common/stringUtils'; +import { Position, Range, TextRange } from '../common/textRange'; import { ImportAsNode, ImportFromAsNode, @@ -27,6 +36,7 @@ import { } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { ModuleNameAndType } from './importResolver'; import { ImportResult, ImportType } from './importResult'; import * as SymbolNameUtils from './symbolNameUtils'; @@ -54,6 +64,15 @@ export const enum ImportGroup { LocalRelative = 3, } +export interface ImportNameInfo { + name?: string; + alias?: string; +} + +export interface ImportNameWithModuleInfo extends ImportNameInfo { + module: ModuleNameAndType; +} + // Determines which import grouping should be used when sorting imports. export function getImportGroup(statement: ImportStatement): ImportGroup { if (statement.importResult) { @@ -142,136 +161,335 @@ function _getImportSymbolNameType(symbolName: string): number { } export function getTextEditsForAutoImportSymbolAddition( - symbolName: string, + importNameInfo: ImportNameInfo | ImportNameInfo[], importStatement: ImportStatement, - parseResults: ParseResults, - aliasName?: string -) { + parseResults: ParseResults +): TextEditAction[] { + const additionEdits: AdditionEdit[] = []; + if ( + !importStatement.node || + importStatement.node.nodeType !== ParseNodeType.ImportFrom || + importStatement.node.isWildcardImport + ) { + return additionEdits; + } + + // Make sure we're not attempting to auto-import a symbol that + // already exists in the import list. + const importFrom = importStatement.node; + importNameInfo = (Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]).filter( + (info) => + !!info.name && + !importFrom.imports.some((importAs) => importAs.name.value === info.name && importAs.alias === info.alias) + ); + + if (importNameInfo.length === 0) { + return additionEdits; + } + + for (const nameInfo of importNameInfo) { + additionEdits.push( + _getTextEditsForAutoImportSymbolAddition(nameInfo.name!, nameInfo.alias, importStatement.node, parseResults) + ); + } + + // Merge edits with the same insertion point. + const editsMap = createMapFromItems(additionEdits, (e) => Range.print(e.range)); const textEditList: TextEditAction[] = []; + for (const editGroup of editsMap.values()) { + if (editGroup.length === 1) { + textEditList.push(editGroup[0]); + } else { + textEditList.push({ + range: editGroup[0].range, + replacementText: editGroup + .sort((a, b) => _compareImportNames(a.importName, b.importName)) + .map((e) => e.replacementText) + .join(''), + }); + } + } + return textEditList; +} + +function _compareImportNames(name1: string, name2: string) { + // Compare import name by import symbol type and then alphabetical order. + // Match isort default behavior. + const name1Type = _getImportSymbolNameType(name1); + const name2Type = _getImportSymbolNameType(name2); + const compare = name1Type - name2Type; + if (compare !== 0) { + return compare; + } + + // isort will prefer '_' over alphanumerical chars + // This can't be reproduced by a normal string compare in TypeScript, since '_' > 'A'. + // Replace all '_' with '=' which guarantees '=' < 'A'. + // Safe to do as '=' is an invalid char in Python names. + const name1toCompare = name1.replace(/_/g, '='); + const name2toCompare = name2.replace(/_/g, '='); + return compareStringsCaseSensitive(name1toCompare, name2toCompare); +} + +interface AdditionEdit extends TextEditAction { + importName: string; +} + +function _getTextEditsForAutoImportSymbolAddition( + importName: string, + alias: string | undefined, + node: ImportFromNode, + parseResults: ParseResults +): AdditionEdit { // Scan through the import symbols to find the right insertion point, // assuming we want to keep the imports alphabetized. let priorImport: ImportFromAsNode | undefined; + for (const curImport of node.imports) { + if (_compareImportNames(curImport.name.value, importName) > 0) { + break; + } - if (importStatement.node && importStatement.node.nodeType === ParseNodeType.ImportFrom) { - // Make sure we're not attempting to auto-import a symbol that - // already exists in the import list. - if (!importStatement.node.imports.some((importAs) => importAs.name.value === symbolName)) { - // Insert new symbol by import symbol type and then alphabetical order. - // Match isort default behavior. - const symbolNameType = _getImportSymbolNameType(symbolName); - // isort will prefer '_' over alphanumerical chars - // This can't be reproduced by a normal string compare in TypeScript, since '_' > 'A'. - // Replace all '_' with '=' which guarantees '=' < 'A'. - // Safe to do as '=' is an invalid char in Python names. - const symbolNameCompare = symbolName.replace(/_/g, '='); - for (const curImport of importStatement.node.imports) { - const curImportNameType = _getImportSymbolNameType(curImport.name.value); - if ( - (curImportNameType === symbolNameType && - curImport.name.value.replace(/_/g, '=') > symbolNameCompare) || - curImportNameType > symbolNameType - ) { - break; - } + priorImport = curImport; + } - priorImport = curImport; - } + // Are import symbols formatted one per line or multiple per line? We + // will honor the existing formatting. We'll use a heuristic to determine + // whether symbols are one per line or multiple per line. + // from x import a, b, c + // or + // from x import ( + // a + // ) + let useOnePerLineFormatting = false; + let indentText = ''; + if (node.imports.length > 0) { + const importStatementPos = convertOffsetToPosition(node.start, parseResults.tokenizerOutput.lines); + const firstSymbolPos = convertOffsetToPosition(node.imports[0].start, parseResults.tokenizerOutput.lines); + const secondSymbolPos = + node.imports.length > 1 + ? convertOffsetToPosition(node.imports[1].start, parseResults.tokenizerOutput.lines) + : undefined; - // Are import symbols formatted one per line or multiple per line? We - // will honor the existing formatting. We'll use a heuristic to determine - // whether symbols are one per line or multiple per line. - // from x import a, b, c - // or - // from x import ( - // a - // ) - let useOnePerLineFormatting = false; - let indentText = ''; - if (importStatement.node.imports.length > 0) { - const importStatementPos = convertOffsetToPosition( - importStatement.node.start, - parseResults.tokenizerOutput.lines - ); - const firstSymbolPos = convertOffsetToPosition( - importStatement.node.imports[0].start, - parseResults.tokenizerOutput.lines - ); - const secondSymbolPos = - importStatement.node.imports.length > 1 - ? convertOffsetToPosition( - importStatement.node.imports[1].start, - parseResults.tokenizerOutput.lines - ) - : undefined; - - if ( - firstSymbolPos.line > importStatementPos.line && - (secondSymbolPos === undefined || secondSymbolPos.line > firstSymbolPos.line) - ) { - const firstSymbolLineRange = parseResults.tokenizerOutput.lines.getItemAt(firstSymbolPos.line); - - // Use the same combination of spaces or tabs to match - // existing formatting. - indentText = parseResults.text.substr(firstSymbolLineRange.start, firstSymbolPos.character); - - // Is the indent text composed of whitespace only? - if (/^\s*$/.test(indentText)) { - useOnePerLineFormatting = true; - } - } - } + if ( + firstSymbolPos.line > importStatementPos.line && + (secondSymbolPos === undefined || secondSymbolPos.line > firstSymbolPos.line) + ) { + const firstSymbolLineRange = parseResults.tokenizerOutput.lines.getItemAt(firstSymbolPos.line); - const insertionOffset = priorImport - ? TextRange.getEnd(priorImport) - : importStatement.node.imports.length > 0 - ? importStatement.node.imports[0].start - : importStatement.node.start + importStatement.node.length; - const insertionPosition = convertOffsetToPosition(insertionOffset, parseResults.tokenizerOutput.lines); - - const insertText = aliasName ? `${symbolName} as ${aliasName}` : `${symbolName}`; - let replacementText: string; - - if (useOnePerLineFormatting) { - const eol = parseResults.tokenizerOutput.predominantEndOfLineSequence; - replacementText = priorImport - ? `,${eol}${indentText}${insertText}` - : `${insertText},${eol}${indentText}`; - } else { - replacementText = priorImport ? `, ${insertText}` : `${insertText}, `; + // Use the same combination of spaces or tabs to match + // existing formatting. + indentText = parseResults.text.substr(firstSymbolLineRange.start, firstSymbolPos.character); + + // Is the indent text composed of whitespace only? + if (/^\s*$/.test(indentText)) { + useOnePerLineFormatting = true; } + } + } + + const insertionOffset = priorImport + ? TextRange.getEnd(priorImport) + : node.imports.length > 0 + ? node.imports[0].start + : node.start + node.length; + const insertionPosition = convertOffsetToPosition(insertionOffset, parseResults.tokenizerOutput.lines); + + const insertText = alias ? `${importName} as ${alias}` : `${importName}`; + let replacementText: string; + + if (useOnePerLineFormatting) { + const eol = parseResults.tokenizerOutput.predominantEndOfLineSequence; + replacementText = priorImport ? `,${eol}${indentText}${insertText}` : `${insertText},${eol}${indentText}`; + } else { + replacementText = priorImport ? `, ${insertText}` : `${insertText}, `; + } + + return { + range: { start: insertionPosition, end: insertionPosition }, + importName, + replacementText, + }; +} + +interface InsertionEdit { + range: Range; + preChange: string; + importStatement: string; + postChange: string; + importGroup: ImportGroup; +} + +export function getTextEditsForAutoImportInsertions( + importNameInfo: ImportNameWithModuleInfo[] | ImportNameWithModuleInfo, + importStatements: ImportStatements, + parseResults: ParseResults, + invocationPosition: Position +): TextEditAction[] { + const insertionEdits: InsertionEdit[] = []; + + importNameInfo = Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + return []; + } + const map = createMapFromItems(importNameInfo, (i) => i.module.moduleName); + for (const importInfo of map.values()) { + insertionEdits.push( + ..._getInsertionEditsForAutoImportInsertion( + importInfo, + importStatements, + importInfo[0].module.moduleName, + getImportGroupFromModuleNameAndType(importInfo[0].module), + parseResults, + invocationPosition + ) + ); + } + + return _convertInsertionEditsToTextEdits(parseResults, insertionEdits); +} + +export function getTextEditsForAutoImportInsertion( + importNameInfo: ImportNameInfo[] | ImportNameInfo, + importStatements: ImportStatements, + moduleName: string, + importGroup: ImportGroup, + parseResults: ParseResults, + invocationPosition: Position +): TextEditAction[] { + const insertionEdits = _getInsertionEditsForAutoImportInsertion( + importNameInfo, + importStatements, + moduleName, + importGroup, + parseResults, + invocationPosition + ); + + return _convertInsertionEditsToTextEdits(parseResults, insertionEdits); +} + +function _convertInsertionEditsToTextEdits(parseResults: ParseResults, insertionEdits: InsertionEdit[]) { + if (insertionEdits.length < 2) { + return insertionEdits.map((e) => getTextEdit(e)); + } + + // Merge edits with the same insertion point. + const editsMap = [...createMapFromItems(insertionEdits, (e) => `${e.importGroup} ${Range.print(e.range)}`)] + .sort((a, b) => compareStringsCaseSensitive(a[0], b[0])) + .map((v) => v[1]); + + const textEditList: TextEditAction[] = []; + for (const editGroup of editsMap) { + if (editGroup.length === 1) { + textEditList.push(getTextEdit(editGroup[0])); + } else { textEditList.push({ - range: { start: insertionPosition, end: insertionPosition }, - replacementText, + range: editGroup[0].range, + replacementText: + editGroup[0].preChange + + editGroup + .map((e) => e.importStatement) + .sort((a, b) => compareImports(a, b)) + .join(parseResults.tokenizerOutput.predominantEndOfLineSequence) + + editGroup[0].postChange, }); } } return textEditList; + + function getTextEdit(edit: InsertionEdit): TextEditAction { + return { range: edit.range, replacementText: edit.preChange + edit.importStatement + edit.postChange }; + } + + function compareImports(a: string, b: string) { + const isImport1 = a.startsWith('import'); + const isImport2 = b.startsWith('import'); + + if (isImport1 === isImport2) { + return a < b ? -1 : 1; + } + + return isImport1 ? -1 : 1; + } } -export function getTextEditsForAutoImportInsertion( - symbolName: string | undefined, +function _getInsertionEditsForAutoImportInsertion( + importNameInfo: ImportNameInfo[] | ImportNameInfo, importStatements: ImportStatements, moduleName: string, importGroup: ImportGroup, parseResults: ParseResults, - invocationPosition: Position, - aliasName?: string -): TextEditAction[] { - const textEditList: TextEditAction[] = []; + invocationPosition: Position +): InsertionEdit[] { + const insertionEdits: InsertionEdit[] = []; + + importNameInfo = Array.isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + // This will let "import [moduleName]" to be generated. + importNameInfo.push({}); + } // We need to emit a new 'from import' statement if symbolName is given. otherwise, use 'import' statement. - const importText = symbolName ? symbolName : moduleName; - const importTextWithAlias = aliasName ? `${importText} as ${aliasName}` : importText; - let newImportStatement = symbolName - ? `from ${moduleName} import ${importTextWithAlias}` - : `import ${importTextWithAlias}`; + const map = createMapFromItems(importNameInfo, (i) => (i.name ? 'from' : 'import')); + + // Add import statements first. + const imports = map.get('import'); + if (imports) { + appendToEdits(imports, (names) => `import ${names.join(', ')}`); + } + + // Add from import statements next. + const fromImports = map.get('from'); + if (fromImports) { + appendToEdits(fromImports, (names) => `from ${moduleName} import ${names.join(', ')}`); + } + + return insertionEdits; + + function getImportAsText(nameInfo: ImportNameInfo, moduleName: string) { + const importText = nameInfo.name ? nameInfo.name : moduleName; + return { + sortText: importText, + text: nameInfo.alias ? `${importText} as ${nameInfo.alias}` : importText, + }; + } + + function appendToEdits(importNameInfo: ImportNameInfo[], importStatementGetter: (n: string[]) => string) { + const importNames = importNameInfo + .map((i) => getImportAsText(i, moduleName)) + .sort((a, b) => _compareImportNames(a.sortText, b.sortText)) + .reduce((set, v) => addIfUnique(set, v.text), [] as string[]); + + insertionEdits.push( + _getInsertionEditForAutoImportInsertion( + importStatementGetter(importNames), + importStatements, + moduleName, + importGroup, + parseResults, + invocationPosition + ) + ); + } +} + +function _getInsertionEditForAutoImportInsertion( + importStatement: string, + importStatements: ImportStatements, + moduleName: string, + importGroup: ImportGroup, + parseResults: ParseResults, + invocationPosition: Position +): InsertionEdit { + let preChange = ''; + let postChange = ''; let insertionPosition: Position; const invocation = convertPositionToOffset(invocationPosition, parseResults.tokenizerOutput.lines)!; - if (importStatements.orderedImports.length > 0 && invocation >= importStatements.orderedImports[0].node.start) { + if (importStatements.orderedImports.length > 0 && invocation > importStatements.orderedImports[0].node.start) { let insertBefore = true; let insertionImport = importStatements.orderedImports[0]; @@ -288,7 +506,7 @@ export function getTextEditsForAutoImportInsertion( if (importGroup < curImportGroup) { if (!insertBefore && prevImportGroup < importGroup) { // Add an extra line to create a new group. - newImportStatement = parseResults.tokenizerOutput.predominantEndOfLineSequence + newImportStatement; + preChange = parseResults.tokenizerOutput.predominantEndOfLineSequence + preChange; } break; } @@ -302,7 +520,7 @@ export function getTextEditsForAutoImportInsertion( if (curImport.followsNonImportStatement) { if (importGroup > prevImportGroup) { // Add an extra line to create a new group. - newImportStatement = parseResults.tokenizerOutput.predominantEndOfLineSequence + newImportStatement; + preChange = parseResults.tokenizerOutput.predominantEndOfLineSequence + preChange; } break; } @@ -311,7 +529,7 @@ export function getTextEditsForAutoImportInsertion( if (curImport === importStatements.orderedImports[importStatements.orderedImports.length - 1]) { if (importGroup > curImportGroup) { // Add an extra line to create a new group. - newImportStatement = parseResults.tokenizerOutput.predominantEndOfLineSequence + newImportStatement; + preChange = parseResults.tokenizerOutput.predominantEndOfLineSequence + preChange; } } @@ -328,9 +546,9 @@ export function getTextEditsForAutoImportInsertion( if (insertionImport) { if (insertBefore) { - newImportStatement = newImportStatement + parseResults.tokenizerOutput.predominantEndOfLineSequence; + postChange = postChange + parseResults.tokenizerOutput.predominantEndOfLineSequence; } else { - newImportStatement = parseResults.tokenizerOutput.predominantEndOfLineSequence + newImportStatement; + preChange = parseResults.tokenizerOutput.predominantEndOfLineSequence + preChange; } insertionPosition = convertOffsetToPosition( @@ -377,23 +595,19 @@ export function getTextEditsForAutoImportInsertion( } } - newImportStatement += + postChange = + postChange + parseResults.tokenizerOutput.predominantEndOfLineSequence + parseResults.tokenizerOutput.predominantEndOfLineSequence; - if (addNewLineBefore) { - newImportStatement = parseResults.tokenizerOutput.predominantEndOfLineSequence + newImportStatement; + preChange = parseResults.tokenizerOutput.predominantEndOfLineSequence + preChange; } else { - newImportStatement += parseResults.tokenizerOutput.predominantEndOfLineSequence; + postChange = postChange + parseResults.tokenizerOutput.predominantEndOfLineSequence; } } - textEditList.push({ - range: { start: insertionPosition, end: insertionPosition }, - replacementText: newImportStatement, - }); - - return textEditList; + const range = { start: insertionPosition, end: insertionPosition }; + return { range, preChange, importStatement, postChange, importGroup }; } function _processImportNode(node: ImportNode, localImports: ImportStatements, followsNonImportStatement: boolean) { @@ -512,3 +726,109 @@ export function getAllImportNames(node: ImportNode | ImportFromNode) { const importFromNode = node as ImportFromNode; return importFromNode.imports; } + +export function getImportGroupFromModuleNameAndType(moduleNameAndType: ModuleNameAndType): ImportGroup { + let importGroup = ImportGroup.Local; + if (moduleNameAndType.isLocalTypingsFile || moduleNameAndType.importType === ImportType.ThirdParty) { + importGroup = ImportGroup.ThirdParty; + } else if (moduleNameAndType.importType === ImportType.BuiltIn) { + importGroup = ImportGroup.BuiltIn; + } + + return importGroup; +} + +export function getTextRangeForImportNameDeletion( + nameNodes: ImportAsNode[] | ImportFromAsNode[], + nameNodeIndex: number +): TextRange { + let editSpan: TextRange; + if (nameNodes.length === 1 && nameNodeIndex === 0) { + // get span of "import [|A|]" + editSpan = nameNodes[0]; + } else if (nameNodeIndex === nameNodes.length - 1) { + // get span of "import A[|, B|]" + const start = TextRange.getEnd(nameNodes[nameNodeIndex - 1]); + const length = TextRange.getEnd(nameNodes[nameNodeIndex]) - start; + editSpan = { start, length }; + } else { + // get span of "import [|A, |]B" + const start = nameNodes[nameNodeIndex].start; + const length = nameNodes[nameNodeIndex + 1].start - start; + editSpan = { start, length }; + } + + return editSpan; +} + +export function getRelativeModuleName( + fs: FileSystem, + sourcePath: string, + targetPath: string, + ignoreFolderStructure = false, + sourceIsFile?: boolean +) { + let srcPath = sourcePath; + sourceIsFile = sourceIsFile !== undefined ? sourceIsFile : isFile(fs, sourcePath); + if (sourceIsFile) { + srcPath = getDirectoryPath(sourcePath); + } + + let symbolName: string | undefined; + let destPath = targetPath; + if (sourceIsFile) { + destPath = getDirectoryPath(targetPath); + + const fileName = stripFileExtension(getFileName(targetPath)); + if (fileName !== '__init__') { + // ex) src: a.py, dest: b.py -> ".b" will be returned. + symbolName = fileName; + } else if (ignoreFolderStructure) { + // ex) src: nested1/nested2/__init__.py, dest: nested1/__init__.py -> "...nested1" will be returned + // like how it would return for sibling folder. + // + // if folder structure is not ignored, ".." will be returned + symbolName = getFileName(destPath); + destPath = getDirectoryPath(destPath); + } + } + + const relativePaths = getRelativePathComponentsFromDirectory(srcPath, destPath, (f) => fs.realCasePath(f)); + + // This assumes both file paths are under the same importing root. + // So this doesn't handle paths pointing to 2 different import roots. + // ex) user file A to library file B + let currentPaths = '.'; + for (let i = 1; i < relativePaths.length; i++) { + const relativePath = relativePaths[i]; + if (relativePath === '..') { + currentPaths += '.'; + } else { + currentPaths += relativePath; + } + + if (relativePath !== '..' && i !== relativePaths.length - 1) { + currentPaths += '.'; + } + } + + if (symbolName) { + currentPaths = + currentPaths[currentPaths.length - 1] === '.' ? currentPaths + symbolName : currentPaths + '.' + symbolName; + } + + return currentPaths; +} + +export function getDirectoryLeadingDotsPointsTo(fromDirectory: string, leadingDots: number) { + let currentDirectory = fromDirectory; + for (let i = 1; i < leadingDots; i++) { + if (currentDirectory === '') { + return undefined; + } + + currentDirectory = getDirectoryPath(currentDirectory); + } + + return currentDirectory; +} diff --git a/packages/pyright-internal/src/analyzer/namedTuples.ts b/packages/pyright-internal/src/analyzer/namedTuples.ts index 56e875b2875a..95092430bef9 100644 --- a/packages/pyright-internal/src/analyzer/namedTuples.ts +++ b/packages/pyright-internal/src/analyzer/namedTuples.ts @@ -27,20 +27,22 @@ import { AnyType, ClassType, ClassTypeFlags, + combineTypes, FunctionParameter, FunctionType, FunctionTypeFlags, isClassInstance, isInstantiableClass, NoneType, + TupleTypeArgument, Type, UnknownType, } from './types'; import { computeMroLinearization, convertToInstance, - isOpenEndedTupleClass, isTupleClass, + isUnboundedTupleClass, specializeTupleClass, synthesizeTypeVarForSelfCls, } from './typeUtils'; @@ -77,7 +79,7 @@ export function createNamedTupleType( if ( isClassInstance(defaultsArgType) && isTupleClass(defaultsArgType) && - !isOpenEndedTupleClass(defaultsArgType) && + !isUnboundedTupleClass(defaultsArgType) && defaultsArgType.tupleTypeArguments ) { defaultArgCount = defaultsArgType.tupleTypeArguments.length; @@ -93,7 +95,7 @@ export function createNamedTupleType( ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), fileInfo.moduleName, fileInfo.filePath, - ClassTypeFlags.None, + ClassTypeFlags.ReadOnlyInstanceVariables, ParseTreeUtils.getTypeSourceId(errorNode), /* declaredMetaclass */ undefined, isInstantiableClass(namedTupleType) ? namedTupleType.details.effectiveMetaclass : UnknownType.create() @@ -180,6 +182,7 @@ export function createNamedTupleType( const declaration: VariableDeclaration = { type: DeclarationType.Variable, node: stringNode as StringListNode, + isRuntimeTypeExpression: true, path: fileInfo.filePath, range: convertOffsetsToRange( stringNode.start, @@ -187,19 +190,28 @@ export function createNamedTupleType( fileInfo.lines ), moduleName: fileInfo.moduleName, + isInExceptSuite: false, }; newSymbol.addDeclaration(declaration); classFields.set(entryName, newSymbol); entryTypes.push(entryType); } }); - } else if (entriesArg.valueExpression && entriesArg.valueExpression.nodeType === ParseNodeType.List) { + } else if ( + entriesArg.valueExpression?.nodeType === ParseNodeType.List || + entriesArg.valueExpression?.nodeType === ParseNodeType.Tuple + ) { const entryList = entriesArg.valueExpression; const entryMap = new Map(); + const entryExpressions = + entriesArg.valueExpression?.nodeType === ParseNodeType.List + ? entriesArg.valueExpression.entries + : entriesArg.valueExpression.expressions; + const firstParamWithDefaultIndex = - defaultArgCount === undefined ? 0 : Math.max(0, entryList.entries.length - defaultArgCount); + defaultArgCount === undefined ? 0 : Math.max(0, entryExpressions.length - defaultArgCount); - entryList.entries.forEach((entry, index) => { + entryExpressions.forEach((entry, index) => { let entryTypeNode: ExpressionNode | undefined; let entryType: Type | undefined; let entryNameNode: ExpressionNode | undefined; @@ -211,7 +223,7 @@ export function createNamedTupleType( entryNameNode = entry.expressions[0]; entryTypeNode = entry.expressions[1]; entryType = convertToInstance( - evaluator.getTypeForExpressionExpectingType(entryTypeNode, /* allowFinal */ false) + evaluator.getTypeForExpressionExpectingType(entryTypeNode, /* allowFinal */ false).type ); } else { evaluator.addError(Localizer.Diagnostic.namedTupleNameType(), entry); @@ -270,11 +282,16 @@ export function createNamedTupleType( fileInfo.lines ), moduleName: fileInfo.moduleName, + isInExceptSuite: false, }; newSymbol.addDeclaration(declaration); } classFields.set(entryName, newSymbol); }); + + // Set the type in the type cache for the dict node so it + // doesn't get evaluated again. + evaluator.setTypeForNode(entryList); } else { // A dynamic expression was used, so we can't evaluate // the named tuple statically. @@ -349,8 +366,8 @@ export function createNamedTupleType( tupleClassType && isInstantiableClass(tupleClassType) ) { - const literalTypes = matchArgsNames.map((name) => { - return ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)); + const literalTypes: TupleTypeArgument[] = matchArgsNames.map((name) => { + return { type: ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, name)), isUnbounded: false }; }); const matchArgsType = ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, literalTypes)); classFields.set('__match_args__', Symbol.createWithType(SymbolFlags.ClassMember, matchArgsType)); @@ -379,7 +396,20 @@ export function updateNamedTupleBaseClass(classType: ClassType, typeArgs: Type[] return; } - const updatedTupleClass = specializeTupleClass(typedTupleClass, typeArgs, isTypeArgumentExplicit); + const tupleTypeArgs: TupleTypeArgument[] = []; + + if (!isTypeArgumentExplicit) { + tupleTypeArgs.push({ + type: typeArgs.length > 0 ? combineTypes(typeArgs) : UnknownType.create(), + isUnbounded: true, + }); + } else { + typeArgs.forEach((t) => { + tupleTypeArgs.push({ type: t, isUnbounded: false }); + }); + } + + const updatedTupleClass = specializeTupleClass(typedTupleClass, tupleTypeArgs, isTypeArgumentExplicit); // Create a copy of the NamedTuple class that overrides the normal MRO // entries with a version of Tuple that is specialized appropriately. diff --git a/packages/pyright-internal/src/analyzer/packageTypeReport.ts b/packages/pyright-internal/src/analyzer/packageTypeReport.ts index b98e7cc81eb9..9abc4446fbee 100644 --- a/packages/pyright-internal/src/analyzer/packageTypeReport.ts +++ b/packages/pyright-internal/src/analyzer/packageTypeReport.ts @@ -23,10 +23,13 @@ export enum SymbolCategory { TypeAlias, } +// The order of these is important. Status values with higher numbers are +// considered "worse" than status values with lower numbers. export const enum TypeKnownStatus { - Known, - PartiallyUnknown, - Unknown, + Known = 0, // Type is fully known (declared) + Ambiguous = 1, // Type is inferred and potentially ambiguous (may differ by type checker) + PartiallyUnknown = 2, // Part of the type is unknown + Unknown = 3, // The type is completely unknown } export interface SymbolInfo { diff --git a/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts b/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts index ae0f29f5c7c9..dba20e86a81c 100644 --- a/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts +++ b/packages/pyright-internal/src/analyzer/packageTypeVerifier.ts @@ -16,7 +16,7 @@ import { FullAccessHost } from '../common/fullAccessHost'; import { combinePaths, getDirectoryPath, getFileExtension, stripFileExtension, tryStat } from '../common/pathUtils'; import { getEmptyRange, Range } from '../common/textRange'; import { DeclarationType, FunctionDeclaration, VariableDeclaration } from './declaration'; -import { ImportedModuleDescriptor, ImportResolver } from './importResolver'; +import { createImportedModuleDescriptor, ImportResolver } from './importResolver'; import { AlternateSymbolNameMap, getEmptyReport, @@ -35,8 +35,10 @@ import { isDunderName, isPrivateOrProtectedName } from './symbolNameUtils'; import { ClassType, FunctionType, + isClass, isInstantiableClass, isModule, + isTypeSame, isUnknown, ModuleType, Type, @@ -53,8 +55,13 @@ export class PackageTypeVerifier { private _importResolver: ImportResolver; private _program: Program; - constructor(private _fileSystem: FileSystem) { + constructor(private _fileSystem: FileSystem, private _packageName: string, private _ignoreExternal = false) { this._configOptions = new ConfigOptions(''); + + if (_ignoreExternal) { + this._configOptions.evaluateUnknownImportsAsAny = true; + } + this._execEnv = this._configOptions.findExecEnvironment('.'); this._importResolver = new ImportResolver( this._fileSystem, @@ -64,14 +71,14 @@ export class PackageTypeVerifier { this._program = new Program(this._importResolver, this._configOptions); } - verify(packageName: string, ignoreExternal = false): PackageTypeReport { - const trimmedPackageName = packageName.trim(); + verify(): PackageTypeReport { + const trimmedPackageName = this._packageName.trim(); const packageNameParts = trimmedPackageName.split('.'); const report = getEmptyReport( packageNameParts[0], this._getDirectoryForPackage(packageNameParts[0]) || '', - ignoreExternal + this._ignoreExternal ); const commonDiagnostics = report.generalDiagnostics; @@ -93,7 +100,7 @@ export class PackageTypeVerifier { ) ); } else { - const pyTypedInfo = getPyTypedInfo(this._fileSystem, report.rootDirectory); + const pyTypedInfo = this._getDeepestPyTypedInfo(report.rootDirectory, packageNameParts); if (!pyTypedInfo) { commonDiagnostics.push( new Diagnostic(DiagnosticCategory.Error, 'No py.typed file found', getEmptyRange()) @@ -179,13 +186,25 @@ export class PackageTypeVerifier { } } + private _getDeepestPyTypedInfo(rootDirectory: string, packageNameParts: string[]) { + let subNameParts = [...packageNameParts]; + + // Find the deepest py.typed file that corresponds to the requested submodule. + while (subNameParts.length >= 1) { + const packageSubdir = combinePaths(rootDirectory, ...subNameParts.slice(1)); + const pyTypedInfo = getPyTypedInfo(this._fileSystem, packageSubdir); + if (pyTypedInfo) { + return pyTypedInfo; + } + + subNameParts = subNameParts.slice(0, subNameParts.length - 1); + } + + return undefined; + } + private _resolveImport(moduleName: string) { - const moduleDescriptor: ImportedModuleDescriptor = { - leadingDots: 0, - nameParts: moduleName.split('.'), - importedSymbols: [], - }; - return this._importResolver.resolveImport('', this._execEnv, moduleDescriptor); + return this._importResolver.resolveImport('', this._execEnv, createImportedModuleDescriptor(moduleName)); } private _getPublicSymbolsForModule( @@ -324,7 +343,7 @@ export class PackageTypeVerifier { const parseTree = sourceFile.getParseResults()!.parseTree; const moduleScope = getScopeForNode(parseTree)!; - this._verifySymbolsInSymbolTable( + this._getTypeKnownStatusForSymbolTable( report, module.name, moduleScope.symbolTable, @@ -418,18 +437,19 @@ export class PackageTypeVerifier { return report.ignoreExternal && !fullTypeName.startsWith(report.packageName); } - private _verifySymbolsInSymbolTable( + private _getTypeKnownStatusForSymbolTable( report: PackageTypeReport, scopeName: string, symbolTable: SymbolTable, scopeType: ScopeType, - publicSymbolMap: PublicSymbolMap - ): boolean { + publicSymbolMap: PublicSymbolMap, + overrideSymbolCallback?: (name: string, symbol: Symbol) => Symbol + ): TypeKnownStatus { if (this._shouldIgnoreType(report, scopeName)) { - return true; + return TypeKnownStatus.Known; } - let isKnown = true; + let knownStatus = TypeKnownStatus.Known; symbolTable.forEach((symbol, name) => { if ( @@ -450,7 +470,29 @@ export class PackageTypeVerifier { return; } - const symbolType = this._program.getTypeForSymbol(symbol); + let symbolType = this._program.getTypeForSymbol(symbol); + + let usesAmbiguousOverride = false; + let baseSymbolType: Type | undefined; + let childSymbolType: Type | undefined; + + if (overrideSymbolCallback) { + const baseTypeSymbol = overrideSymbolCallback(name, symbol); + + if (baseTypeSymbol !== symbol) { + childSymbolType = symbolType; + baseSymbolType = this._program.getTypeForSymbol(baseTypeSymbol); + + // If the inferred type is ambiguous or the declared base class type is + // not the same type as the inferred type, mark it as ambiguous because + // different type checkers will get different results. + if (TypeBase.isAmbiguous(childSymbolType) || !isTypeSame(baseSymbolType, childSymbolType)) { + usesAmbiguousOverride = true; + } + + symbolType = baseSymbolType; + } + } const typedDecls = symbol.getTypedDeclarations(); const primaryDecl = typedDecls.length > 0 ? typedDecls[typedDecls.length - 1] : undefined; @@ -468,6 +510,16 @@ export class PackageTypeVerifier { const symbolCategory = this._getSymbolCategory(symbol, symbolType); const isExported = publicSymbolMap.has(fullName); + // If the only reference to this symbol is a "__slots__" entry, we will + // skip it when considering type completeness. + if ( + decls.length === 1 && + primaryDecl?.type === DeclarationType.Variable && + primaryDecl.isDefinedBySlots + ) { + return; + } + symbolInfo = { category: symbolCategory, name, @@ -482,37 +534,126 @@ export class PackageTypeVerifier { this._addSymbol(report, symbolInfo); if (!this._isSymbolTypeImplied(scopeType, name)) { - this._validateSymbolType(report, symbolInfo, symbolType, declRange, declPath, publicSymbolMap); + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + symbolType, + declRange, + declPath, + publicSymbolMap + ); } } - if (symbolInfo.typeKnownStatus !== TypeKnownStatus.Known) { - isKnown = false; + if (usesAmbiguousOverride) { + const decls = symbol.getDeclarations(); + const primaryDecl = decls.length > 0 ? decls[decls.length - 1] : undefined; + const declRange = primaryDecl?.range || getEmptyRange(); + const declPath = primaryDecl?.path || ''; + + const extraInfo = new DiagnosticAddendum(); + if (baseSymbolType) { + extraInfo.addMessage( + `Type declared in base class is "${this._program.printType( + baseSymbolType, + /* expandTypeAlias */ false + )}"` + ); + } + + if (childSymbolType) { + extraInfo.addMessage( + `Type inferred in child class is "${this._program.printType( + childSymbolType, + /* expandTypeAlias */ false + )}"` + ); + + if (TypeBase.isAmbiguous(childSymbolType)) { + extraInfo.addMessage( + 'Inferred child class type is missing type annotation and could be inferred differently by type checkers' + ); + } + } + + this._addSymbolError( + symbolInfo, + `Ambiguous base class override` + extraInfo.getString(), + declRange, + declPath + ); + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.Ambiguous + ); } + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, symbolInfo.typeKnownStatus); } }); - return isKnown; + return knownStatus; } // Determines whether the type for the symbol in question is fully known. // If not, it adds diagnostics to the symbol information and updates the // typeKnownStatus field. - private _validateSymbolType( + private _getSymbolTypeKnownStatus( report: PackageTypeReport, symbolInfo: SymbolInfo, type: Type, declRange: Range, declFilePath: string, publicSymbolMap: PublicSymbolMap - ): boolean { + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; + + if (type.typeAliasInfo && type.typeAliasInfo.typeArguments) { + type.typeAliasInfo.typeArguments.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for type alias "${type.typeAliasInfo!.name}" has unknown type`, + declRange, + declFilePath + ); + knownStatus = TypeKnownStatus.Unknown; + } else if (isPartlyUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for type alias "${ + type.typeAliasInfo!.name + }" has partially unknown type`, + declRange, + declFilePath + ); + knownStatus = TypeKnownStatus.PartiallyUnknown; + } + }); + } + + if (TypeBase.isAmbiguous(type) && !isUnknown(type)) { + const ambiguousDiag = new DiagnosticAddendum(); + ambiguousDiag.addMessage( + `Inferred type is "${this._program.printType(type, /* expandTypeAlias */ false)}"` + ); + this._addSymbolError( + symbolInfo, + 'Type is missing type annotation and could be inferred differently by type checkers' + + ambiguousDiag.getString(), + declRange, + declFilePath + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Ambiguous); + } + switch (type.category) { case TypeCategory.Unbound: case TypeCategory.Any: case TypeCategory.None: case TypeCategory.Never: case TypeCategory.TypeVar: - return true; + break; case TypeCategory.Unknown: { this._addSymbolError( @@ -523,32 +664,32 @@ export class PackageTypeVerifier { declRange, declFilePath ); - symbolInfo.typeKnownStatus = TypeKnownStatus.Unknown; - return false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + break; } case TypeCategory.Union: { - let isKnown = true; doForEachSubtype(type, (subtype) => { - if ( - !this._validateSymbolType(report, symbolInfo, subtype, declRange, declFilePath, publicSymbolMap) - ) { - isKnown = false; - } + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( + report, + symbolInfo, + subtype, + declRange, + declFilePath, + publicSymbolMap + ) + ); }); - - if (!isKnown) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; - } - - return isKnown; + break; } case TypeCategory.OverloadedFunction: { - let isKnown = true; for (const overload of type.overloads) { - if ( - !this._validateSymbolType( + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( report, symbolInfo, overload, @@ -556,34 +697,32 @@ export class PackageTypeVerifier { declFilePath, publicSymbolMap ) - ) { - isKnown = false; - } - } - - if (!isKnown) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + ); } - return isKnown; + break; } case TypeCategory.Function: { if (!this._shouldIgnoreType(report, type.details.fullName)) { - if ( - !this._validateFunctionType(report, type, publicSymbolMap, symbolInfo, declRange, declFilePath) - ) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; - return false; - } + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getFunctionTypeKnownStatus( + report, + type, + publicSymbolMap, + symbolInfo, + declRange, + declFilePath + ) + ); } - return true; + break; } case TypeCategory.Class: { // Properties require special handling. if (TypeBase.isInstance(type) && ClassType.isPropertyClass(type)) { - let isTypeKnown = true; const accessors = ['fget', 'fset', 'fdel']; const propertyClass = type; @@ -595,8 +734,9 @@ export class PackageTypeVerifier { return; } - if ( - !this._validateSymbolType( + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getSymbolTypeKnownStatus( report, symbolInfo, accessType, @@ -604,61 +744,51 @@ export class PackageTypeVerifier { '', publicSymbolMap ) - ) { - isTypeKnown = false; - } + ); }); - return isTypeKnown; + break; } - let isKnown = true; - if (!this._shouldIgnoreType(report, type.details.fullName)) { // Don't bother type-checking built-in types. if (!ClassType.isBuiltIn(type)) { // Reference the class. this._getSymbolForClass(report, type, publicSymbolMap); } - - // Analyze type arguments if present to make sure they are known. - if (type.typeArguments) { - type.typeArguments!.forEach((typeArg, index) => { - if (isUnknown(typeArg)) { - this._addSymbolError( - symbolInfo, - `Type argument ${index} has unknown type`, - declRange, - declFilePath - ); - isKnown = false; - } else if (isPartlyUnknown(typeArg)) { - const diag = new DiagnosticAddendum(); - diag.addMessage( - `Type is ${this._program.printType(typeArg, /* expandTypeAlias */ false)}` - ); - this._addSymbolError( - symbolInfo, - `Type argument ${index} has partially unknown type` + diag.getString(), - declRange, - declFilePath - ); - isKnown = false; - } - }); - } } - if (!isKnown) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + // Analyze type arguments if present to make sure they are known. + if (type.typeArguments) { + type.typeArguments!.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for class "${type.details.name}" has unknown type`, + declRange, + declFilePath + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + const diag = new DiagnosticAddendum(); + diag.addMessage(`Type is ${this._program.printType(typeArg, /* expandTypeAlias */ false)}`); + this._addSymbolError( + symbolInfo, + `Type argument ${index + 1} for class "${ + type.details.name + }" has partially unknown type` + diag.getString(), + declRange, + declFilePath + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); } - return isKnown; + break; } case TypeCategory.Module: { - let isKnown = true; - if (!this._shouldIgnoreType(report, type.moduleName)) { const moduleSymbol = this._getSymbolForModule(report, type, publicSymbolMap); if (moduleSymbol.typeKnownStatus !== TypeKnownStatus.Known) { @@ -668,20 +798,21 @@ export class PackageTypeVerifier { declRange, declFilePath ); - isKnown = false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, moduleSymbol.typeKnownStatus); } } - if (!isKnown) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; - } - - return isKnown; + break; } } + + // Downgrade the symbol's type known status info. + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse(symbolInfo.typeKnownStatus, knownStatus); + + return knownStatus; } - private _validateFunctionType( + private _getFunctionTypeKnownStatus( report: PackageTypeReport, type: FunctionType, publicSymbolMap: PublicSymbolMap, @@ -689,8 +820,8 @@ export class PackageTypeVerifier { declRange?: Range, declFilePath?: string, diag?: DiagnosticAddendum - ): boolean { - let isKnown = true; + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; // If the file path wasn't provided, try to get it from the type. if (type.details.declaration && !declFilePath) { @@ -723,7 +854,7 @@ export class PackageTypeVerifier { `Type annotation for parameter "${param.name}" is missing` ); } - isKnown = false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); } } else if (isUnknown(param.type)) { if (symbolInfo) { @@ -737,13 +868,21 @@ export class PackageTypeVerifier { diag.createAddendum().addMessage(`Type of parameter "${param.name}" is unknown`); } } - isKnown = false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); } else { const extraInfo = new DiagnosticAddendum(); - if (!this._isTypeKnown(report, param.type, publicSymbolMap, extraInfo.createAddendum())) { + const paramKnownStatus = this._getTypeKnownStatus( + report, + param.type, + publicSymbolMap, + extraInfo.createAddendum() + ); + + if (paramKnownStatus !== TypeKnownStatus.Known) { extraInfo.addMessage( `Parameter type is "${this._program.printType(param.type, /* expandTypeAlias */ false)}"` ); + if (symbolInfo) { this._addSymbolError( symbolInfo, @@ -752,12 +891,14 @@ export class PackageTypeVerifier { declFilePath || '' ); } + if (diag) { const subDiag = diag.createAddendum(); subDiag.addMessage(`Type of parameter "${param.name}" is partially unknown`); subDiag.addAddendum(extraInfo); } - isKnown = false; + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, paramKnownStatus); } } } @@ -773,23 +914,24 @@ export class PackageTypeVerifier { declFilePath || '' ); } - isKnown = false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); } else { const extraInfo = new DiagnosticAddendum(); - if ( - !this._isTypeKnown( - report, - type.details.declaredReturnType, - publicSymbolMap, - extraInfo.createAddendum() - ) - ) { + const returnTypeKnownStatus = this._getTypeKnownStatus( + report, + type.details.declaredReturnType, + publicSymbolMap, + extraInfo.createAddendum() + ); + + if (returnTypeKnownStatus !== TypeKnownStatus.Known) { extraInfo.addMessage( `Return type is "${this._program.printType( type.details.declaredReturnType, /* expandTypeAlias */ false )}"` ); + if (symbolInfo) { this._addSymbolError( symbolInfo, @@ -798,12 +940,14 @@ export class PackageTypeVerifier { declFilePath || '' ); } + if (diag) { const subDiag = diag.createAddendum(); subDiag.addMessage(`Return type is partially unknown`); subDiag.addAddendum(extraInfo); } - isKnown = false; + + knownStatus = this._updateKnownStatusIfWorse(knownStatus, returnTypeKnownStatus); } } } else { @@ -820,7 +964,7 @@ export class PackageTypeVerifier { if (diag) { diag.createAddendum().addMessage(`Return type annotation is missing`); } - isKnown = false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); } } @@ -853,11 +997,11 @@ export class PackageTypeVerifier { report.missingDefaultParamCount++; } - if (!isKnown && symbolInfo) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + if (symbolInfo) { + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse(symbolInfo.typeKnownStatus, knownStatus); } - return isKnown; + return knownStatus; } private _getSymbolForClass( @@ -897,26 +1041,54 @@ export class PackageTypeVerifier { report.missingClassDocStringCount++; } - if ( - !this._verifySymbolsInSymbolTable( - report, - type.details.fullName, - type.details.fields, - ScopeType.Class, - publicSymbolMap - ) - ) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; - } + const symbolTableTypeKnownStatus = this._getTypeKnownStatusForSymbolTable( + report, + type.details.fullName, + type.details.fields, + ScopeType.Class, + publicSymbolMap, + (name: string, symbol: Symbol) => { + // If the symbol within this class is lacking a type declaration, + // see if we can find a same-named symbol in a parent class with + // a type declaration. + if (!symbol.hasTypedDeclarations()) { + for (const mroClass of type.details.mro.slice(1)) { + if (isClass(mroClass)) { + const overrideSymbol = mroClass.details.fields.get(name); + if (overrideSymbol && overrideSymbol.hasTypedDeclarations()) { + return overrideSymbol; + } + } + } + } + + return symbol; + } + ); + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + symbolTableTypeKnownStatus + ); // Add information for the metaclass. if (type.details.effectiveMetaclass) { if (!isInstantiableClass(type.details.effectiveMetaclass)) { this._addSymbolError(symbolInfo, `Type of metaclass unknown`, getEmptyRange(), ''); - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.PartiallyUnknown + ); } else { const diag = new DiagnosticAddendum(); - if (!this._isTypeKnown(report, type.details.effectiveMetaclass, publicSymbolMap, diag)) { + const metaclassKnownStatus = this._getTypeKnownStatus( + report, + type.details.effectiveMetaclass, + publicSymbolMap, + diag + ); + + if (metaclassKnownStatus !== TypeKnownStatus.Known) { this._addSymbolError( symbolInfo, `Type of metaclass "${type.details.effectiveMetaclass}" is partially unknown` + @@ -924,7 +1096,10 @@ export class PackageTypeVerifier { getEmptyRange(), '' ); - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + metaclassKnownStatus + ); } } } @@ -933,7 +1108,10 @@ export class PackageTypeVerifier { type.details.baseClasses.forEach((baseClass) => { if (!isInstantiableClass(baseClass)) { this._addSymbolError(symbolInfo, `Type of base class unknown`, getEmptyRange(), ''); - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + TypeKnownStatus.PartiallyUnknown + ); } else { // Handle "tuple" specially. Even though it's a generic class, it // doesn't require a type argument. @@ -942,14 +1120,20 @@ export class PackageTypeVerifier { } const diag = new DiagnosticAddendum(); - if (!this._isTypeKnown(report, baseClass, publicSymbolMap, diag)) { + const baseClassTypeStatus = this._getTypeKnownStatus(report, baseClass, publicSymbolMap, diag); + + if (baseClassTypeStatus !== TypeKnownStatus.Known) { this._addSymbolError( symbolInfo, `Type of base class "${baseClass.details.fullName}" is partially unknown` + diag.getString(), getEmptyRange(), '' ); - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + baseClassTypeStatus + ); } } }); @@ -982,111 +1166,146 @@ export class PackageTypeVerifier { this._addSymbol(report, symbolInfo); - if ( - !this._verifySymbolsInSymbolTable(report, type.moduleName, type.fields, ScopeType.Module, publicSymbolMap) - ) { - symbolInfo.typeKnownStatus = TypeKnownStatus.PartiallyUnknown; - } + const symbolTableTypeKnownStatus = this._getTypeKnownStatusForSymbolTable( + report, + type.moduleName, + type.fields, + ScopeType.Module, + publicSymbolMap + ); + + symbolInfo.typeKnownStatus = this._updateKnownStatusIfWorse( + symbolInfo.typeKnownStatus, + symbolTableTypeKnownStatus + ); return symbolInfo; } - private _isTypeKnown( + private _getTypeKnownStatus( report: PackageTypeReport, type: Type, publicSymbolMap: PublicSymbolMap, diag: DiagnosticAddendum - ): boolean { + ): TypeKnownStatus { + let knownStatus = TypeKnownStatus.Known; + + if (type.typeAliasInfo && type.typeAliasInfo.typeArguments) { + type.typeAliasInfo.typeArguments.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for type alias "${type.typeAliasInfo!.name}" has unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for type alias "${ + type.typeAliasInfo!.name + }" has partially unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); + } + + if (TypeBase.isAmbiguous(type)) { + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Ambiguous); + } + switch (type.category) { case TypeCategory.Unbound: case TypeCategory.Any: case TypeCategory.None: case TypeCategory.Never: case TypeCategory.TypeVar: - return true; + break; case TypeCategory.Unknown: { - return false; + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + break; } case TypeCategory.Union: { - let isKnown = true; doForEachSubtype(type, (subtype) => { - if (!this._isTypeKnown(report, subtype, publicSymbolMap, diag.createAddendum())) { - isKnown = false; - } + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getTypeKnownStatus(report, subtype, publicSymbolMap, diag.createAddendum()) + ); }); - return isKnown; + break; } case TypeCategory.OverloadedFunction: { - let isKnown = true; for (const overload of type.overloads) { - if (!this._isTypeKnown(report, overload, publicSymbolMap, diag.createAddendum())) { - isKnown = false; - } + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getTypeKnownStatus(report, overload, publicSymbolMap, diag.createAddendum()) + ); } - return isKnown; + break; } case TypeCategory.Function: { if (!this._shouldIgnoreType(report, type.details.fullName)) { - return this._validateFunctionType( - report, - type, - publicSymbolMap, - /* symbolInfo */ undefined, - /* declRange */ undefined, - /* declFilePath */ undefined, - diag + knownStatus = this._updateKnownStatusIfWorse( + knownStatus, + this._getFunctionTypeKnownStatus( + report, + type, + publicSymbolMap, + /* symbolInfo */ undefined, + /* declRange */ undefined, + /* declFilePath */ undefined, + diag + ) ); } - return true; + break; } case TypeCategory.Class: { - let isKnown = true; - if (!this._shouldIgnoreType(report, type.details.fullName)) { // Don't bother type-checking built-in types. if (!ClassType.isBuiltIn(type)) { // Reference the class. this._getSymbolForClass(report, type, publicSymbolMap); } + } - // Analyze type arguments if present to make sure they are known. - if (type.typeArguments) { - type.typeArguments!.forEach((typeArg, index) => { - if (isUnknown(typeArg)) { - diag.addMessage(`Type argument ${index} has unknown type`); - isKnown = false; - } else if (isPartlyUnknown(typeArg)) { - diag.addMessage(`Type argument ${index} has partially unknown type`); - isKnown = false; - } - }); - } + // Analyze type arguments if present to make sure they are known. + if (type.typeArguments) { + type.typeArguments!.forEach((typeArg, index) => { + if (isUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for class "${type.details.name}" has unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.Unknown); + } else if (isPartlyUnknown(typeArg)) { + diag.addMessage( + `Type argument ${index + 1} for class "${type.details.name}" has partially unknown type` + ); + knownStatus = this._updateKnownStatusIfWorse(knownStatus, TypeKnownStatus.PartiallyUnknown); + } + }); } - return isKnown; + break; } case TypeCategory.Module: { - let isKnown = true; - if (!this._shouldIgnoreType(report, type.moduleName)) { const moduleSymbol = this._getSymbolForModule(report, type, publicSymbolMap); - if (moduleSymbol.typeKnownStatus !== TypeKnownStatus.Known) { - isKnown = false; - } + knownStatus = this._updateKnownStatusIfWorse(knownStatus, moduleSymbol.typeKnownStatus); } - return isKnown; + break; } } + + return knownStatus; } private _getSymbolCategory(symbol: Symbol, type: Type): SymbolCategory { @@ -1147,17 +1366,21 @@ export class PackageTypeVerifier { } private _getDirectoryForPackage(packageName: string): string | undefined { - const moduleDescriptor: ImportedModuleDescriptor = { - leadingDots: 0, - nameParts: [packageName], - importedSymbols: [], - }; - - const importResult = this._importResolver.resolveImport('', this._execEnv, moduleDescriptor); + const importResult = this._importResolver.resolveImport( + '', + this._execEnv, + createImportedModuleDescriptor(packageName) + ); if (importResult.isImportFound) { const resolvedPath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; - return getDirectoryPath(resolvedPath); + if (resolvedPath) { + getDirectoryPath(resolvedPath); + } + + // If it's a namespace package with no __init__.py(i), use the package + // directory instead. + return importResult.packageDirectory || ''; } return undefined; @@ -1173,6 +1396,7 @@ export class PackageTypeVerifier { '__qualname__', '__slots__', '__all__', + '__weakref__', ]; return knownClassSymbols.some((sym) => sym === name); } else if (scopeType === ScopeType.Module) { @@ -1210,4 +1434,9 @@ export class PackageTypeVerifier { filePath: declFilePath, }); } + + private _updateKnownStatusIfWorse(currentStatus: TypeKnownStatus, newStatus: TypeKnownStatus) { + // Is the current status worse than the current status. + return newStatus > currentStatus ? newStatus : currentStatus; + } } diff --git a/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts b/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts new file mode 100644 index 000000000000..459defafa6bc --- /dev/null +++ b/packages/pyright-internal/src/analyzer/parentDirectoryCache.ts @@ -0,0 +1,88 @@ +/* + * parentDirectoryCache.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Cache to hold parent directory import result to make sure + * we don't repeatedly search folders. + */ + +import { getOrAdd } from '../common/collectionUtils'; +import { FileSystem } from '../common/fileSystem'; +import { ensureTrailingDirectorySeparator, normalizePath, normalizePathCase } from '../common/pathUtils'; +import { ImportResult } from './importResult'; + +export type ImportPath = { importPath: string | undefined }; + +type CacheEntry = { importResult: ImportResult; path: string; importName: string }; + +export class ParentDirectoryCache { + private readonly _importChecked = new Map>(); + private readonly _cachedResults = new Map>(); + + private _libPathCache: string[] | undefined = undefined; + + constructor(private _importRootGetter: () => string[]) { + // empty + } + + getImportResult(path: string, importName: string, importResult: ImportResult): ImportResult | undefined { + const result = this._cachedResults.get(importName)?.get(path); + if (result) { + // We already checked for the importName at the path. + // Return the result if succeeded otherwise, return regular import result given. + return result ?? importResult; + } + + const checked = this._importChecked.get(importName)?.get(path); + if (checked) { + // We already checked for the importName at the path. + if (!checked.importPath) { + return importResult; + } + + return this._cachedResults.get(importName)?.get(checked.importPath) ?? importResult; + } + + return undefined; + } + + checkValidPath(fs: FileSystem, sourceFilePath: string, root: string): boolean { + if (!sourceFilePath.startsWith(root)) { + // We don't search containing folders for libs. + return false; + } + + this._libPathCache = + this._libPathCache ?? + this._importRootGetter() + .map((r) => ensureTrailingDirectorySeparator(normalizePathCase(fs, normalizePath(r)))) + .filter((r) => r !== root) + .filter((r) => r.startsWith(root)); + + if (this._libPathCache.some((p) => sourceFilePath.startsWith(p))) { + // Make sure it is not lib folders under user code root. + // ex) .venv folder + return false; + } + + return true; + } + + checked(path: string, importName: string, importPath: ImportPath) { + getOrAdd(this._importChecked, importName, () => new Map()).set(path, importPath); + } + + add(result: CacheEntry) { + getOrAdd(this._cachedResults, result.importName, () => new Map()).set( + result.path, + result.importResult + ); + } + + reset() { + this._importChecked.clear(); + this._cachedResults.clear(); + this._libPathCache = undefined; + } +} diff --git a/packages/pyright-internal/src/analyzer/parseTreeUtils.ts b/packages/pyright-internal/src/analyzer/parseTreeUtils.ts index bb6b0158d5ed..9e01a51ea915 100644 --- a/packages/pyright-internal/src/analyzer/parseTreeUtils.ts +++ b/packages/pyright-internal/src/analyzer/parseTreeUtils.ts @@ -9,8 +9,8 @@ import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; import { assertNever, fail } from '../common/debug'; -import { convertPositionToOffset } from '../common/positionUtils'; -import { Position } from '../common/textRange'; +import { convertPositionToOffset, convertTextRangeToRange } from '../common/positionUtils'; +import { Position, Range } from '../common/textRange'; import { TextRange } from '../common/textRange'; import { TextRangeCollection } from '../common/textRangeCollection'; import { @@ -26,17 +26,22 @@ import { IndexNode, isExpressionNode, LambdaNode, + MemberAccessNode, ModuleNode, NameNode, ParameterCategory, ParameterNode, ParseNode, ParseNodeType, + StatementListNode, StatementNode, + StringListNode, + StringNode, SuiteNode, TypeAnnotationNode, } from '../parser/parseNodes'; -import { KeywordType, OperatorType, StringTokenFlags, Token, TokenType } from '../parser/tokenizerTypes'; +import { TokenizerOutput } from '../parser/tokenizer'; +import { KeywordType, OperatorType, StringToken, StringTokenFlags, Token, TokenType } from '../parser/tokenizerTypes'; import { getScope } from './analyzerNodeInfo'; import { ParseTreeWalker } from './parseTreeWalker'; @@ -47,6 +52,8 @@ export const enum PrintExpressionFlags { ForwardDeclarations = 1 << 0, } +// Returns the depth of the node as measured from the root +// of the parse tree. export function getNodeDepth(node: ParseNode): number { let depth = 0; let curNode: ParseNode | undefined = node; @@ -182,6 +189,12 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla case ParseNodeType.Number: { let value = node.value.toString(); + + // If it's stored as a bigint, strip off the "n". + if (value.endsWith('n')) { + value = value.substring(0, value.length - 1); + } + if (node.isImaginary) { value += 'j'; } @@ -319,10 +332,10 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla listStr = `${keyStr}: ${valueStr}`; } - return ( + listStr = listStr + ' ' + - node.comprehensions + node.forIfNodes .map((expr) => { if (expr.nodeType === ParseNodeType.ListComprehensionFor) { return ( @@ -334,8 +347,9 @@ export function printExpression(node: ExpressionNode, flags = PrintExpressionFla return `if ${printExpression(expr.testExpression, flags)}`; } }) - .join(' ') - ); + .join(' '); + + return node.isParenthesized ? `(${listStr}})` : listStr; } case ParseNodeType.Slice: { @@ -477,6 +491,19 @@ export function printOperator(operator: OperatorType): string { return 'unknown'; } +export function getEnclosingSuite(node: ParseNode): SuiteNode | undefined { + let curNode = node.parent; + + while (curNode) { + if (curNode.nodeType === ParseNodeType.Suite) { + return curNode; + } + curNode = curNode.parent; + } + + return undefined; +} + export function getEnclosingClass(node: ParseNode, stopAtFunction = false): ClassNode | undefined { let curNode = node.parent; while (curNode) { @@ -539,15 +566,21 @@ export function getEnclosingClassOrModule(node: ParseNode, stopAtFunction = fals export function getEnclosingFunction(node: ParseNode): FunctionNode | undefined { let curNode = node.parent; + let prevNode: ParseNode | undefined; + while (curNode) { if (curNode.nodeType === ParseNodeType.Function) { - return curNode; + // Don't treat a decorator as being "enclosed" in the function. + if (!curNode.decorators.some((decorator) => decorator === prevNode)) { + return curNode; + } } if (curNode.nodeType === ParseNodeType.Class) { return undefined; } + prevNode = curNode; curNode = curNode.parent; } @@ -657,6 +690,7 @@ export function getEvaluationNodeForAssignmentExpression( // a symbol referenced in the specified node. export function getEvaluationScopeNode(node: ParseNode): EvaluationScopeNode { let prevNode: ParseNode | undefined; + let prevPrevNode: ParseNode | undefined; let curNode: ParseNode | undefined = node; let isParamNameNode = false; @@ -709,14 +743,31 @@ export function getEvaluationScopeNode(node: ParseNode): EvaluationScopeNode { break; } - case ParseNodeType.ListComprehension: + case ParseNodeType.ListComprehension: { + if (getScope(curNode) !== undefined) { + // The iterable expression of the first subnode of a list comprehension + // is evaluated within the scope of its parent. + const isFirstIterableExpr = + prevNode === curNode.forIfNodes[0] && + curNode.forIfNodes[0].nodeType === ParseNodeType.ListComprehensionFor && + curNode.forIfNodes[0].iterableExpression === prevPrevNode; + + if (!isFirstIterableExpr) { + return curNode; + } + } + break; + } + case ParseNodeType.Module: { if (getScope(curNode) !== undefined) { return curNode; } + break; } } + prevPrevNode = prevNode; prevNode = curNode; curNode = curNode.parent; } @@ -857,12 +908,32 @@ export function isNodeContainedWithin(node: ParseNode, potentialContainer: Parse return false; } +export function getParentNodeOfType(node: ParseNode, containerType: ParseNodeType): ParseNode | undefined { + let curNode: ParseNode | undefined = node; + while (curNode) { + if (curNode.nodeType === containerType) { + return curNode; + } + + curNode = curNode.parent; + } + + return undefined; +} + +export function isNodeContainedWithinNodeType(node: ParseNode, containerType: ParseNodeType): boolean { + return getParentNodeOfType(node, containerType) !== undefined; +} + export function isSuiteEmpty(node: SuiteNode): boolean { + let sawEllipsis = false; + for (const statement of node.statements) { if (statement.nodeType === ParseNodeType.StatementList) { for (const substatement of statement.statements) { if (substatement.nodeType === ParseNodeType.Ellipsis) { // Allow an ellipsis + sawEllipsis = true; } else if (substatement.nodeType === ParseNodeType.StringList) { // Allow doc strings } else { @@ -874,7 +945,7 @@ export function isSuiteEmpty(node: SuiteNode): boolean { } } - return true; + return sawEllipsis; } export function isMatchingExpression(reference: ExpressionNode, expression: ExpressionNode): boolean { @@ -1091,7 +1162,7 @@ export function isWithinLoop(node: ParseNode): boolean { return false; } -export function isWithinTryBlock(node: ParseNode): boolean { +export function isWithinTryBlock(node: ParseNode, treatWithAsTryBlock = false): boolean { let curNode: ParseNode | undefined = node; let prevNode: ParseNode | undefined; @@ -1101,10 +1172,17 @@ export function isWithinTryBlock(node: ParseNode): boolean { return curNode.trySuite === prevNode; } + case ParseNodeType.With: { + if (treatWithAsTryBlock && curNode.suite === prevNode) { + return true; + } + break; + } + case ParseNodeType.Function: case ParseNodeType.Module: case ParseNodeType.Class: { - break; + return false; } } @@ -1143,32 +1221,40 @@ export function getDocString(statements: StatementNode[]): string | undefined { return undefined; } + if (!isDocString(statements[0])) { + return undefined; + } + + // It's up to the user to convert normalize/convert this as needed. + const strings = (statements[0].statements[0] as StringListNode).strings; + if (strings.length === 1) { + // Common case. + return strings[0].value; + } + + return strings.map((s) => s.value).join(''); +} + +export function isDocString(statementList: StatementListNode): boolean { // If the first statement in the suite isn't a StringNode, // assume there is no docString. - const statementList = statements[0]; if (statementList.statements.length === 0 || statementList.statements[0].nodeType !== ParseNodeType.StringList) { - return undefined; + return false; } // A docstring can consist of multiple joined strings in a single expression. const strings = statementList.statements[0].strings; if (strings.length === 0) { - return undefined; + return false; } // Any f-strings invalidate the entire docstring. if (strings.some((n) => (n.token.flags & StringTokenFlags.Format) !== 0)) { - return undefined; + return false; } // It's up to the user to convert normalize/convert this as needed. - - if (strings.length === 1) { - // Common case. - return strings[0].value; - } - - return strings.map((s) => s.value).join(''); + return true; } // Sometimes a NamedTuple assignment statement is followed by a statement @@ -1285,6 +1371,17 @@ export class NameNodeWalker extends ParseTreeWalker { } } +export class CallNodeWalker extends ParseTreeWalker { + constructor(private _callback: (node: CallNode) => void) { + super(); + } + + override visitCall(node: CallNode) { + this._callback(node); + return true; + } +} + export function getEnclosingParameter(node: ParseNode): ParameterNode | undefined { let curNode: ParseNode | undefined = node; @@ -1306,10 +1403,14 @@ export function getCallNodeAndActiveParameterIndex( // Find the call node that contains the specified node. let curNode: ParseNode | undefined = node; let callNode: CallNode | undefined; + while (curNode !== undefined) { + // make sure we only look at callNodes when we are inside their arguments if (curNode.nodeType === ParseNodeType.Call) { - callNode = curNode; - break; + if (isOffsetInsideCallArgs(curNode, insertionOffset)) { + callNode = curNode; + break; + } } curNode = curNode.parent; } @@ -1318,15 +1419,6 @@ export function getCallNodeAndActiveParameterIndex( return undefined; } - const index = tokens.getItemAtPosition(callNode.leftExpression.start); - if (index >= 0 && index + 1 < tokens.count) { - const token = tokens.getItemAt(index + 1); - if (token.type === TokenType.OpenParenthesis && insertionOffset < TextRange.getEnd(token)) { - // position must be after '(' - return undefined; - } - } - const endPosition = TextRange.getEnd(callNode); if (insertionOffset > endPosition) { return undefined; @@ -1390,14 +1482,39 @@ export function getCallNodeAndActiveParameterIndex( activeOrFake, }; - function getTokenAt(tokens: TextRangeCollection, position: number) { - const index = tokens.getItemAtPosition(position); - if (index < 0) { - return undefined; + function isOffsetInsideCallArgs(node: CallNode, offset: number) { + let found = true; + const argumentStart = + node.leftExpression.length > 0 ? TextRange.getEnd(node.leftExpression) - 1 : node.leftExpression.start; + const index = tokens.getItemAtPosition(argumentStart); + if (index >= 0 && index + 1 < tokens.count) { + const token = tokens.getItemAt(index + 1); + if (token.type === TokenType.OpenParenthesis && insertionOffset < TextRange.getEnd(token)) { + // position must be after '(' + found = false; + } } + return found; + } +} + +export function getTokenAt(tokens: TextRangeCollection, position: number) { + const index = tokens.getItemAtPosition(position); + if (index < 0) { + return undefined; + } + + return tokens.getItemAt(index); +} - return tokens.getItemAt(index); +export function getTokenOverlapping(tokens: TextRangeCollection, position: number) { + const index = tokens.getItemAtPosition(position); + if (index < 0) { + return undefined; } + + const token = tokens.getItemAt(index); + return TextRange.overlaps(token, position) ? token : undefined; } export function printParseNodeType(type: ParseNodeType) { @@ -1746,3 +1863,240 @@ export function isFunctionSuiteEmpty(node: FunctionNode) { return isEmpty; } + +export function isImportModuleName(node: ParseNode): boolean { + return getFirstAncestorOrSelfOfKind(node, ParseNodeType.ModuleName)?.parent?.nodeType === ParseNodeType.ImportAs; +} + +export function isImportAlias(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportAs && node.parent.alias === node; +} + +export function isFromImportModuleName(node: ParseNode): boolean { + return getFirstAncestorOrSelfOfKind(node, ParseNodeType.ModuleName)?.parent?.nodeType === ParseNodeType.ImportFrom; +} + +export function isFromImportName(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportFromAs && node.parent.name === node; +} + +export function isFromImportAlias(node: ParseNode): boolean { + return node.parent?.nodeType === ParseNodeType.ImportFromAs && node.parent.alias === node; +} + +export function isLastNameOfModuleName(node: NameNode): boolean { + if (node.parent?.nodeType !== ParseNodeType.ModuleName) { + return false; + } + + const module = node.parent; + if (module.nameParts.length === 0) { + return false; + } + + return module.nameParts[module.nameParts.length - 1] === node; +} + +function* _getAncestorsIncludingSelf(node: ParseNode | undefined) { + while (node !== undefined) { + yield node; + node = node.parent; + } +} + +type NodeForType = T extends ParseNode & { nodeType: NT } ? T : never; + +export function getFirstAncestorOrSelfOfKind( + node: ParseNode | undefined, + type: NT +): NodeForType | undefined { + return getFirstAncestorOrSelf(node, (n) => n.nodeType === type) as NodeForType | undefined; +} + +export function getFirstAncestorOrSelf( + node: ParseNode | undefined, + predicate: (node: ParseNode) => boolean +): ParseNode | undefined { + for (const current of _getAncestorsIncludingSelf(node)) { + if (predicate(current)) { + return current; + } + } + + return undefined; +} + +export function getDottedNameWithGivenNodeAsLastName(node: NameNode): MemberAccessNode | NameNode { + // Shape of dotted name is + // MemberAccess (ex, a.b) + // Name Name + // or + // MemberAccess (ex, a.b.c) + // MemberAccess Name + // Name Name + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return node; + } + + if (node.parent.leftExpression === node) { + return node; + } + + return node.parent; +} + +export function getDottedName(node: MemberAccessNode | NameNode): NameNode[] | undefined { + // ex) [a] or [a].b + // simple case, [a] + if (node.nodeType === ParseNodeType.Name) { + return [node]; + } + + // dotted name case. + const names: NameNode[] = []; + if (_getDottedName(node, names)) { + return names.reverse(); + } + + return undefined; + + function _getDottedName(node: MemberAccessNode | NameNode, names: NameNode[]): boolean { + if (node.nodeType === ParseNodeType.Name) { + names.push(node); + return true; + } + + names.push(node.memberName); + + if ( + node.leftExpression.nodeType === ParseNodeType.Name || + node.leftExpression.nodeType === ParseNodeType.MemberAccess + ) { + return _getDottedName(node.leftExpression, names); + } + + return false; + } +} + +export function getFirstNameOfDottedName(node: MemberAccessNode | NameNode): NameNode | undefined { + // ex) [a] or [a].b + if (node.nodeType === ParseNodeType.Name) { + return node; + } + + if ( + node.leftExpression.nodeType === ParseNodeType.Name || + node.leftExpression.nodeType === ParseNodeType.MemberAccess + ) { + return getFirstNameOfDottedName(node.leftExpression); + } + + return undefined; +} + +export function isFirstNameOfDottedName(node: NameNode): boolean { + // ex) [A] or [A].B.C.D + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return true; + } + + if (node.parent.leftExpression === node) { + return true; + } + + return false; +} + +export function isLastNameOfDottedName(node: NameNode): boolean { + // ex) A or D.C.B.[A] + if (node.parent?.nodeType !== ParseNodeType.MemberAccess) { + return true; + } + + if ( + node.parent.leftExpression.nodeType !== ParseNodeType.Name && + node.parent.leftExpression.nodeType !== ParseNodeType.MemberAccess + ) { + return false; + } + + if (node.parent.leftExpression === node) { + return false; + } + + return node.parent.parent?.nodeType !== ParseNodeType.MemberAccess; +} + +export function getStringNodeValueRange(node: StringNode) { + return getStringValueRange(node.token); +} + +export function getStringValueRange(token: StringToken) { + const length = token.quoteMarkLength; + const hasEnding = !(token.flags & StringTokenFlags.Unterminated); + return TextRange.create(token.start + length, token.length - length - (hasEnding ? length : 0)); +} + +export function getFullStatementRange(statementNode: ParseNode, tokenizerOutput: TokenizerOutput): Range { + const range = convertTextRangeToRange(statementNode, tokenizerOutput.lines); + + // First, see whether there are other tokens except semicolon or new line on the same line. + const endPosition = _getEndPositionIfMultipleStatementsAreOnSameLine( + range, + TextRange.getEnd(statementNode), + tokenizerOutput + ); + + if (endPosition) { + return { start: range.start, end: endPosition }; + } + + // If not, delete the whole line. + if (range.end.line === tokenizerOutput.lines.count - 1) { + return range; + } + + return { start: range.start, end: { line: range.end.line + 1, character: 0 } }; +} + +export function isUnannotatedFunction(node: FunctionNode) { + return ( + node.returnTypeAnnotation === undefined && + node.parameters.every( + (param) => param.typeAnnotation === undefined && param.typeAnnotationComment === undefined + ) + ); +} + +function _getEndPositionIfMultipleStatementsAreOnSameLine( + range: Range, + tokenPosition: number, + tokenizerOutput: TokenizerOutput +): Position | undefined { + const tokenIndex = tokenizerOutput.tokens.getItemAtPosition(tokenPosition); + if (tokenIndex < 0) { + return undefined; + } + + let currentIndex = tokenIndex; + for (; currentIndex < tokenizerOutput.tokens.count; currentIndex++) { + const token = tokenizerOutput.tokens.getItemAt(currentIndex); + const tokenRange = convertTextRangeToRange(token, tokenizerOutput.lines); + if (range.end.line !== tokenRange.start.line) { + break; + } + } + + for (let index = tokenIndex; index < currentIndex; index++) { + const token = tokenizerOutput.tokens.getItemAt(index); + if (token.type === TokenType.Semicolon || token.type === TokenType.NewLine) { + continue; + } + + const tokenRange = convertTextRangeToRange(token, tokenizerOutput.lines); + return tokenRange.start; + } + + return undefined; +} diff --git a/packages/pyright-internal/src/analyzer/parseTreeWalker.ts b/packages/pyright-internal/src/analyzer/parseTreeWalker.ts index efababa681cc..2a3dde31881c 100644 --- a/packages/pyright-internal/src/analyzer/parseTreeWalker.ts +++ b/packages/pyright-internal/src/analyzer/parseTreeWalker.ts @@ -114,6 +114,12 @@ export class ParseTreeWalker { // returned. visitNode(node: ParseNode): ParseNodeArray { switch (node.nodeType) { + case ParseNodeType.Error: + if (this.visitError(node)) { + return [node.child, ...(node.decorators ?? [])]; + } + break; + case ParseNodeType.Argument: if (this.visitArgument(node)) { return [node.name, node.valueExpression]; @@ -168,24 +174,12 @@ export class ParseTreeWalker { } break; - case ParseNodeType.Case: - if (this.visitCase(node)) { - return [node.pattern, node.guardExpression, node.suite]; - } - break; - case ParseNodeType.Class: if (this.visitClass(node)) { return [...node.decorators, node.name, ...node.arguments, node.suite]; } break; - case ParseNodeType.Ternary: - if (this.visitTernary(node)) { - return [node.ifExpression, node.testExpression, node.elseExpression]; - } - break; - case ParseNodeType.Constant: if (this.visitConstant(node)) { return []; @@ -216,21 +210,21 @@ export class ParseTreeWalker { } break; - case ParseNodeType.DictionaryKeyEntry: - if (this.visitDictionaryKeyEntry(node)) { - return [node.keyExpression, node.valueExpression]; - } - break; - case ParseNodeType.DictionaryExpandEntry: if (this.visitDictionaryExpandEntry(node)) { return [node.expandExpression]; } break; - case ParseNodeType.Error: - if (this.visitError(node)) { - return [node.child, ...(node.decorators ?? [])]; + case ParseNodeType.DictionaryKeyEntry: + if (this.visitDictionaryKeyEntry(node)) { + return [node.keyExpression, node.valueExpression]; + } + break; + + case ParseNodeType.Ellipsis: + if (this.visitEllipsis(node)) { + return []; } break; @@ -270,12 +264,6 @@ export class ParseTreeWalker { } break; - case ParseNodeType.Ellipsis: - if (this.visitEllipsis(node)) { - return []; - } - break; - case ParseNodeType.Except: if (this.visitExcept(node)) { return [node.typeExpression, node.name, node.exceptSuite]; @@ -307,12 +295,6 @@ export class ParseTreeWalker { } break; - case ParseNodeType.FunctionAnnotation: - if (this.visitFunctionAnnotation(node)) { - return [...node.paramTypeAnnotations, node.returnTypeAnnotation]; - } - break; - case ParseNodeType.Global: if (this.visitGlobal(node)) { return node.nameList; @@ -333,7 +315,7 @@ export class ParseTreeWalker { case ParseNodeType.ListComprehension: if (this.visitListComprehension(node)) { - return [node.expression, ...node.comprehensions]; + return [node.expression, ...node.forIfNodes]; } break; @@ -349,12 +331,6 @@ export class ParseTreeWalker { } break; - case ParseNodeType.Match: - if (this.visitMatch(node)) { - return [node.subjectExpression, ...node.cases]; - } - break; - case ParseNodeType.MemberAccess: if (this.visitMemberAccess(node)) { return [node.leftExpression, node.memberName]; @@ -403,66 +379,6 @@ export class ParseTreeWalker { } break; - case ParseNodeType.PatternCapture: - if (this.visitPatternCapture(node)) { - return [node.target]; - } - break; - - case ParseNodeType.PatternClass: - if (this.visitPatternClass(node)) { - return [node.className, ...node.arguments]; - } - break; - - case ParseNodeType.PatternClassArgument: - if (this.visitPatternClassArgument(node)) { - return [node.name, node.pattern]; - } - break; - - case ParseNodeType.PatternAs: - if (this.visitPatternAs(node)) { - return [...node.orPatterns, node.target]; - } - break; - - case ParseNodeType.PatternLiteral: - if (this.visitPatternLiteral(node)) { - return [node.expression]; - } - break; - - case ParseNodeType.PatternMapping: - if (this.visitPatternMapping(node)) { - return [...node.entries]; - } - break; - - case ParseNodeType.PatternMappingKeyEntry: - if (this.visitPatternMappingKeyEntry(node)) { - return [node.keyPattern, node.valuePattern]; - } - break; - - case ParseNodeType.PatternMappingExpandEntry: - if (this.visitPatternMappingExpandEntry(node)) { - return [node.target]; - } - break; - - case ParseNodeType.PatternSequence: - if (this.visitPatternSequence(node)) { - return [...node.entries]; - } - break; - - case ParseNodeType.PatternValue: - if (this.visitPatternValue(node)) { - return [node.expression]; - } - break; - case ParseNodeType.Raise: if (this.visitRaise(node)) { return [node.typeExpression, node.valueExpression, node.tracebackExpression]; @@ -493,24 +409,30 @@ export class ParseTreeWalker { } break; - case ParseNodeType.String: - if (this.visitString(node)) { - return []; - } - break; - case ParseNodeType.StringList: if (this.visitStringList(node)) { return [node.typeAnnotation, ...node.strings]; } break; + case ParseNodeType.String: + if (this.visitString(node)) { + return []; + } + break; + case ParseNodeType.Suite: if (this.visitSuite(node)) { return [...node.statements]; } break; + case ParseNodeType.Ternary: + if (this.visitTernary(node)) { + return [node.ifExpression, node.testExpression, node.elseExpression]; + } + break; + case ParseNodeType.Tuple: if (this.visitTuple(node)) { return node.expressions; @@ -571,6 +493,84 @@ export class ParseTreeWalker { } break; + case ParseNodeType.FunctionAnnotation: + if (this.visitFunctionAnnotation(node)) { + return [...node.paramTypeAnnotations, node.returnTypeAnnotation]; + } + break; + + case ParseNodeType.Match: + if (this.visitMatch(node)) { + return [node.subjectExpression, ...node.cases]; + } + break; + + case ParseNodeType.Case: + if (this.visitCase(node)) { + return [node.pattern, node.guardExpression, node.suite]; + } + break; + + case ParseNodeType.PatternSequence: + if (this.visitPatternSequence(node)) { + return [...node.entries]; + } + break; + + case ParseNodeType.PatternAs: + if (this.visitPatternAs(node)) { + return [...node.orPatterns, node.target]; + } + break; + + case ParseNodeType.PatternLiteral: + if (this.visitPatternLiteral(node)) { + return [node.expression]; + } + break; + + case ParseNodeType.PatternClass: + if (this.visitPatternClass(node)) { + return [node.className, ...node.arguments]; + } + break; + + case ParseNodeType.PatternCapture: + if (this.visitPatternCapture(node)) { + return [node.target]; + } + break; + + case ParseNodeType.PatternMapping: + if (this.visitPatternMapping(node)) { + return [...node.entries]; + } + break; + + case ParseNodeType.PatternMappingKeyEntry: + if (this.visitPatternMappingKeyEntry(node)) { + return [node.keyPattern, node.valuePattern]; + } + break; + + case ParseNodeType.PatternMappingExpandEntry: + if (this.visitPatternMappingExpandEntry(node)) { + return [node.target]; + } + break; + + case ParseNodeType.PatternValue: + if (this.visitPatternValue(node)) { + return [node.expression]; + } + break; + + case ParseNodeType.PatternClassArgument: + if (this.visitPatternClassArgument(node)) { + return [node.name, node.pattern]; + } + break; + default: fail('Unexpected node type'); break; diff --git a/packages/pyright-internal/src/analyzer/patternMatching.ts b/packages/pyright-internal/src/analyzer/patternMatching.ts index 6bc4f902b0c2..e05e07569735 100644 --- a/packages/pyright-internal/src/analyzer/patternMatching.ts +++ b/packages/pyright-internal/src/analyzer/patternMatching.ts @@ -10,7 +10,6 @@ */ import { assert } from '../common/debug'; -import { DiagnosticAddendum } from '../common/diagnostic'; import { DiagnosticRule } from '../common/diagnosticRules'; import { Localizer } from '../localization/localize'; import { @@ -28,15 +27,19 @@ import { } from '../parser/parseNodes'; import { getFileInfo } from './analyzerNodeInfo'; import { getTypedDictMembersForClass } from './typedDicts'; -import { TypeEvaluator } from './typeEvaluatorTypes'; +import { EvaluatorFlags, TypeEvaluator } from './typeEvaluatorTypes'; +import { enumerateLiteralsForType } from './typeGuards'; import { AnyType, ClassType, combineTypes, isAnyOrUnknown, + isClass, isClassInstance, isInstantiableClass, isNever, + isNoneInstance, + isSameWithoutLiteralValue, isUnknown, NeverType, Type, @@ -44,17 +47,23 @@ import { UnknownType, } from './types'; import { + addConditionToType, + applySolvedTypeVars, convertToInstance, doForEachSubtype, getTypeCondition, + getTypeVarScopeId, isLiteralType, - isOpenEndedTupleClass, isTupleClass, + isUnboundedTupleClass, lookUpClassMember, mapSubtypes, partiallySpecializeType, + specializeClassType, specializeTupleClass, + stripLiteralValue, } from './typeUtils'; +import { TypeVarMap } from './typeVarMap'; // PEP 634 indicates that several built-in classes are handled differently // when used with class pattern matching. @@ -75,8 +84,9 @@ const classPatternSpecialCases = [ interface SequencePatternInfo { subtype: Type; entryTypes: Type[]; - isIndeterminateLength: boolean; - isTuple: boolean; + isIndeterminateLength?: boolean; + isTuple?: boolean; + isObject?: boolean; } interface MappingPatternInfo { @@ -121,7 +131,7 @@ export function narrowTypeBasedOnPattern( case ParseNodeType.PatternCapture: { // A capture captures everything, so nothing remains in the negative case. - return isPositiveTest ? type : NeverType.create(); + return isPositiveTest ? type : NeverType.createNever(); } case ParseNodeType.Error: { @@ -142,6 +152,7 @@ function narrowTypeBasedOnSequencePattern( } let sequenceInfo = getSequencePatternInfo(evaluator, type, pattern.entries.length, pattern.starEntryIndex); + // Further narrow based on pattern entry types. sequenceInfo = sequenceInfo.filter((entry) => { let isPlausibleMatch = true; @@ -155,7 +166,9 @@ function narrowTypeBasedOnSequencePattern( entry, index, pattern.entries.length, - pattern.starEntryIndex + pattern.starEntryIndex, + /* unpackStarEntry */ true, + /* isSubjectObject */ false ); const narrowedEntryType = narrowTypeBasedOnPattern( @@ -164,33 +177,58 @@ function narrowTypeBasedOnSequencePattern( sequenceEntry, /* isPositiveTest */ true ); + if (index === pattern.starEntryIndex) { if ( isClassInstance(narrowedEntryType) && narrowedEntryType.tupleTypeArguments && - !isOpenEndedTupleClass(narrowedEntryType) && + !isUnboundedTupleClass(narrowedEntryType) && narrowedEntryType.tupleTypeArguments ) { - narrowedEntryTypes.push(...narrowedEntryType.tupleTypeArguments); + narrowedEntryTypes.push(...narrowedEntryType.tupleTypeArguments.map((t) => t.type)); } else { + narrowedEntryTypes.push(narrowedEntryType); canNarrowTuple = false; } } else { narrowedEntryTypes.push(narrowedEntryType); - } - if (isNever(narrowedEntryType)) { - isPlausibleMatch = false; + if (isNever(narrowedEntryType)) { + isPlausibleMatch = false; + } } }); - // If this is a tuple, we can narrow it to a specific tuple type. - // Other sequences cannot be narrowed because we don't know if they - // are immutable (covariant). - if (isPlausibleMatch && canNarrowTuple) { - const tupleClassType = evaluator.getBuiltInType(pattern, 'tuple'); - if (tupleClassType && isInstantiableClass(tupleClassType)) { - entry.subtype = ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, narrowedEntryTypes)); + if (isPlausibleMatch) { + // If this is a tuple, we can narrow it to a specific tuple type. + // Other sequences cannot be narrowed because we don't know if they + // are immutable (covariant). + if (canNarrowTuple) { + const tupleClassType = evaluator.getBuiltInType(pattern, 'tuple'); + if (tupleClassType && isInstantiableClass(tupleClassType)) { + entry.subtype = ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + narrowedEntryTypes.map((t) => { + return { type: t, isUnbounded: false }; + }) + ) + ); + } + } + + // If this is an object, we can narrow it to a specific Sequence type. + if (entry.isObject) { + const sequenceType = evaluator.getTypingType(pattern, 'Sequence'); + if (sequenceType && isInstantiableClass(sequenceType)) { + entry.subtype = ClassType.cloneAsInstance( + ClassType.cloneForSpecialization( + sequenceType, + [stripLiteralValue(combineTypes(narrowedEntryTypes))], + /* isTypeArgumentExplicit */ true + ) + ); + } } } @@ -326,19 +364,19 @@ function getPositionalMatchArgNames(evaluator: TypeEvaluator, type: ClassType): if ( isClassInstance(matchArgsType) && isTupleClass(matchArgsType) && - !isOpenEndedTupleClass(matchArgsType) && + !isUnboundedTupleClass(matchArgsType) && matchArgsType.tupleTypeArguments ) { const tupleArgs = matchArgsType.tupleTypeArguments; // Are all the args string literals? if ( - !tupleArgs.some( - (argType) => - !isClassInstance(argType) || !ClassType.isBuiltIn(argType, 'str') || !isLiteralType(argType) + tupleArgs.every( + (arg) => + isClassInstance(arg.type) && ClassType.isBuiltIn(arg.type, 'str') && isLiteralType(arg.type) ) ) { - return tupleArgs.map((argType) => (argType as ClassType).literalValue as string); + return tupleArgs.map((arg) => (arg.type as ClassType).literalValue as string); } } } @@ -361,11 +399,15 @@ function narrowTypeBasedOnLiteralPattern( isLiteralType(literalType) && isClassInstance(subtype) && isLiteralType(subtype) && - evaluator.canAssignType(literalType, subtype, new DiagnosticAddendum()) + evaluator.canAssignType(literalType, subtype) ) { return undefined; } + if (isNoneInstance(subtype) && isNoneInstance(literalType)) { + return undefined; + } + // Narrow a non-literal bool based on a literal bool pattern. if ( isClassInstance(subtype) && @@ -383,7 +425,7 @@ function narrowTypeBasedOnLiteralPattern( } return mapSubtypes(type, (subtype) => { - if (evaluator.canAssignType(subtype, literalType, new DiagnosticAddendum())) { + if (evaluator.canAssignType(subtype, literalType)) { return literalType; } return undefined; @@ -396,56 +438,108 @@ function narrowTypeBasedOnClassPattern( pattern: PatternClassNode, isPositiveTest: boolean ): Type { - const classType = evaluator.getTypeOfExpression(pattern.className).type; + let exprType = evaluator.getTypeOfExpression( + pattern.className, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; + + // If this is a class (but not a type alias that refers to a class), + // specialize it with Unknown type arguments. + if (isClass(exprType) && !exprType.typeAliasInfo) { + exprType = specializeClassType(exprType); + } if (!isPositiveTest) { // Don't attempt to narrow if the class type is a more complex type (e.g. a TypeVar or union). - if (!isInstantiableClass(classType)) { + if (!isInstantiableClass(exprType)) { return type; } - // Don't attempt to narrow if there are arguments. - let hasArguments = pattern.arguments.length > 0; - if ( - pattern.arguments.length === 1 && - !pattern.arguments[0].name && - classPatternSpecialCases.some((className) => classType.details.fullName === className) - ) { - hasArguments = false; - } - - if (hasArguments) { - return type; - } + let classType = exprType; - // Don't attempt to narrow if the class type is generic. if (classType.details.typeParameters.length > 0) { - return type; + classType = ClassType.cloneForSpecialization(classType, undefined, /* isTypeArgumentExplicit */ false); } - const diag = new DiagnosticAddendum(); const classInstance = convertToInstance(classType); - return mapSubtypes(type, (subtype) => { - if (evaluator.canAssignType(classInstance, subtype, diag)) { + return evaluator.mapSubtypesExpandTypeVars( + type, + /* conditionFilter */ undefined, + (subjectSubtypeExpanded, subjectSubtypeUnexpanded) => { + if (!isClassInstance(subjectSubtypeExpanded)) { + return subjectSubtypeUnexpanded; + } + + if (!evaluator.canAssignType(classInstance, subjectSubtypeExpanded)) { + return subjectSubtypeExpanded; + } + + // If there are no arguments, we're done. We know that this match + // will never succeed. + if (pattern.arguments.length === 0) { + return undefined; + } + + // We might be able to narrow further based on arguments, but only + // if the types match exactly or the subtype is a final class and + // therefore cannot be subclassed. + if (!evaluator.canAssignType(subjectSubtypeExpanded, classInstance)) { + if (!ClassType.isFinal(subjectSubtypeExpanded)) { + return subjectSubtypeExpanded; + } + } + + if ( + pattern.arguments.length === 1 && + !pattern.arguments[0].name && + classPatternSpecialCases.some((className) => classType.details.fullName === className) + ) { + return undefined; + } + + // Are there any positional arguments? If so, try to get the mappings for + // these arguments by fetching the __match_args__ symbol from the class. + let positionalArgNames: string[] = []; + if (pattern.arguments.some((arg) => !arg.name)) { + if (isClass(subjectSubtypeExpanded)) { + positionalArgNames = getPositionalMatchArgNames(evaluator, subjectSubtypeExpanded); + } + } + + for (let index = 0; index < pattern.arguments.length; index++) { + const narrowedArgType = narrowTypeOfClassPatternArgument( + evaluator, + pattern.arguments[index], + index, + positionalArgNames, + subjectSubtypeExpanded, + isPositiveTest + ); + + if (!isNever(narrowedArgType)) { + return subjectSubtypeUnexpanded; + } + } + + // We've completely eliminated the type based on the arguments. return undefined; } - - return subtype; - }); + ); } - if (!TypeBase.isInstantiable(classType)) { + if (!TypeBase.isInstantiable(exprType)) { evaluator.addDiagnostic( getFileInfo(pattern).diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - Localizer.DiagnosticAddendum.typeNotClass().format({ type: evaluator.printType(classType) }), + Localizer.DiagnosticAddendum.typeNotClass().format({ type: evaluator.printType(exprType) }), pattern.className ); - return NeverType.create(); + return NeverType.createNever(); } return evaluator.mapSubtypesExpandTypeVars( - classType, + exprType, /* conditionFilter */ undefined, (expandedSubtype, unexpandedSubtype) => { if (isAnyOrUnknown(expandedSubtype)) { @@ -453,65 +547,103 @@ function narrowTypeBasedOnClassPattern( } if (isInstantiableClass(expandedSubtype)) { - return mapSubtypes(type, (matchSubtype) => { - const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(matchSubtype); + return evaluator.mapSubtypesExpandTypeVars( + type, + /* conditionFilter */ undefined, + (subjectSubtypeExpanded) => { + if (isAnyOrUnknown(subjectSubtypeExpanded)) { + return convertToInstance(unexpandedSubtype); + } - if (isAnyOrUnknown(concreteSubtype)) { - return matchSubtype; - } + if (isClassInstance(subjectSubtypeExpanded)) { + let resultType: Type; - if (isClassInstance(concreteSubtype)) { - let resultType: Type; + if ( + evaluator.canAssignType( + expandedSubtype, + ClassType.cloneAsInstantiable(subjectSubtypeExpanded) + ) + ) { + resultType = subjectSubtypeExpanded; + } else if ( + evaluator.canAssignType( + ClassType.cloneAsInstantiable(subjectSubtypeExpanded), + expandedSubtype + ) + ) { + resultType = addConditionToType( + convertToInstance(unexpandedSubtype), + getTypeCondition(subjectSubtypeExpanded) + ); - if ( - evaluator.canAssignType( - expandedSubtype, - ClassType.cloneAsInstantiable(concreteSubtype), - new DiagnosticAddendum() - ) - ) { - resultType = matchSubtype; - } else if ( - evaluator.canAssignType( - ClassType.cloneAsInstantiable(concreteSubtype), - expandedSubtype, - new DiagnosticAddendum() - ) - ) { - resultType = convertToInstance(unexpandedSubtype); - } else { - return undefined; - } + // Try to retain the type arguments for the pattern class type. + if (isInstantiableClass(unexpandedSubtype) && isClassInstance(subjectSubtypeExpanded)) { + if ( + ClassType.isSpecialBuiltIn(unexpandedSubtype) || + unexpandedSubtype.details.typeParameters.length > 0 + ) { + const typeVarMap = new TypeVarMap(getTypeVarScopeId(unexpandedSubtype)); + const unspecializedMatchType = ClassType.cloneForSpecialization( + unexpandedSubtype, + /* typeArguments */ undefined, + /* isTypeArgumentExplicit */ false + ); + + const matchTypeInstance = ClassType.cloneAsInstance(unspecializedMatchType); + if ( + evaluator.populateTypeVarMapBasedOnExpectedType( + matchTypeInstance, + subjectSubtypeExpanded, + typeVarMap, + [] + ) + ) { + resultType = applySolvedTypeVars( + matchTypeInstance, + typeVarMap, + /* unknownIfNotFound */ true + ) as ClassType; + } + } + } + } else { + return undefined; + } - // Are there any positional arguments? If so, try to get the mappings for - // these arguments by fetching the __match_args__ symbol from the class. - let positionalArgNames: string[] = []; - if (pattern.arguments.some((arg) => !arg.name)) { - positionalArgNames = getPositionalMatchArgNames(evaluator, expandedSubtype); - } + // Are there any positional arguments? If so, try to get the mappings for + // these arguments by fetching the __match_args__ symbol from the class. + let positionalArgNames: string[] = []; + if (pattern.arguments.some((arg) => !arg.name)) { + positionalArgNames = getPositionalMatchArgNames(evaluator, expandedSubtype); + } - let isMatchValid = true; - pattern.arguments.forEach((arg, index) => { - const narrowedArgType = narrowTypeOfClassPatternArgument( - evaluator, - arg, - index, - positionalArgNames, - expandedSubtype - ); + let isMatchValid = true; + pattern.arguments.forEach((arg, index) => { + // Narrow the arg pattern. It's possible that the actual type of the object + // being matched is a subtype of the resultType, so it might contain additional + // attributes that we don't know about. + const narrowedArgType = narrowTypeOfClassPatternArgument( + evaluator, + arg, + index, + positionalArgNames, + resultType, + isPositiveTest + ); - if (isNever(narrowedArgType)) { - isMatchValid = false; - } - }); + if (isNever(narrowedArgType)) { + isMatchValid = false; + } + }); - if (isMatchValid) { - return resultType; + if (isMatchValid) { + return resultType; + } } - } - return undefined; - }); + return undefined; + } + ); } return undefined; @@ -519,28 +651,40 @@ function narrowTypeBasedOnClassPattern( ); } +// Narrows the pattern provided for a class pattern argument. function narrowTypeOfClassPatternArgument( evaluator: TypeEvaluator, arg: PatternClassArgumentNode, argIndex: number, positionalArgNames: string[], - classType: ClassType + matchType: Type, + isPositiveTest: boolean ) { let argName: string | undefined; + if (arg.name) { argName = arg.name.value; } else if (argIndex < positionalArgNames.length) { argName = positionalArgNames[argIndex]; } + if (isAnyOrUnknown(matchType)) { + return matchType; + } + + if (!isClass(matchType)) { + return UnknownType.create(); + } + const useSelfForPattern = - classPatternSpecialCases.some((className) => classType.details.fullName === className) && + isClass(matchType) && + classPatternSpecialCases.some((className) => matchType.details.fullName === className) && argIndex === 0 && !arg.name; let argType: Type | undefined; if (useSelfForPattern) { - argType = ClassType.cloneAsInstance(classType); + argType = ClassType.cloneAsInstance(matchType); } else { if (argName) { argType = evaluator.useSpeculativeMode(arg, () => @@ -548,50 +692,85 @@ function narrowTypeOfClassPatternArgument( // not technically an ExpressionNode, but it is OK to use it in this context. evaluator.getTypeFromObjectMember( arg as any as ExpressionNode, - ClassType.cloneAsInstance(classType), + ClassType.cloneAsInstance(matchType), argName! ) )?.type; } if (!argType) { + if (!isPositiveTest) { + return matchType; + } + + // If the class type in question is "final", we know that no additional + // attributes can be added by subtypes, so it's safe to eliminate this + // type entirely. + if (ClassType.isFinal(matchType)) { + return NeverType.createNever(); + } + argType = UnknownType.create(); } } - return narrowTypeBasedOnPattern(evaluator, argType, arg.pattern, /* isPositiveTest */ true); + return narrowTypeBasedOnPattern(evaluator, argType, arg.pattern, isPositiveTest); } function narrowTypeBasedOnValuePattern( evaluator: TypeEvaluator, - type: Type, + subjectType: Type, pattern: PatternValueNode, isPositiveTest: boolean ): Type { - if (!isPositiveTest) { - // Never narrow in negative case. - return type; - } - const valueType = evaluator.getTypeOfExpression(pattern.expression).type; const narrowedSubtypes: Type[] = []; evaluator.mapSubtypesExpandTypeVars( valueType, /* conditionFilter */ undefined, - (leftSubtypeExpanded, leftSubtypeUnexpanded) => { + (valueSubtypeExpanded, valueSubtypeUnexpanded) => { narrowedSubtypes.push( evaluator.mapSubtypesExpandTypeVars( - type, - getTypeCondition(leftSubtypeExpanded), - (_, rightSubtypeUnexpanded) => { - if (isNever(leftSubtypeExpanded) || isNever(rightSubtypeUnexpanded)) { - return NeverType.create(); + subjectType, + getTypeCondition(valueSubtypeExpanded), + (subjectSubtypeExpanded) => { + // If this is a negative test, see if it's an enum value. + if (!isPositiveTest) { + if ( + isClassInstance(subjectSubtypeExpanded) && + ClassType.isEnumClass(subjectSubtypeExpanded) && + !isLiteralType(subjectSubtypeExpanded) && + isClassInstance(valueSubtypeExpanded) && + isSameWithoutLiteralValue(subjectSubtypeExpanded, valueSubtypeExpanded) && + isLiteralType(valueSubtypeExpanded) + ) { + const allEnumTypes = enumerateLiteralsForType(evaluator, subjectSubtypeExpanded); + if (allEnumTypes) { + return combineTypes( + allEnumTypes.filter( + (enumType) => !ClassType.isLiteralValueSame(valueSubtypeExpanded, enumType) + ) + ); + } + } else if ( + isClassInstance(subjectSubtypeExpanded) && + isClassInstance(valueSubtypeExpanded) && + ClassType.isLiteralValueSame(valueSubtypeExpanded, subjectSubtypeExpanded) + ) { + return undefined; + } + + return subjectSubtypeExpanded; + } + + if (isNever(valueSubtypeExpanded) || isNever(subjectSubtypeExpanded)) { + return NeverType.createNever(); } - if (isAnyOrUnknown(leftSubtypeExpanded) || isAnyOrUnknown(rightSubtypeUnexpanded)) { + if (isAnyOrUnknown(valueSubtypeExpanded) || isAnyOrUnknown(subjectSubtypeExpanded)) { // If either type is "Unknown" (versus Any), propagate the Unknown. - return isUnknown(leftSubtypeExpanded) || isUnknown(rightSubtypeUnexpanded) + return isUnknown(valueSubtypeExpanded) || isUnknown(subjectSubtypeExpanded) ? UnknownType.create() : AnyType.create(); } @@ -600,15 +779,15 @@ function narrowTypeBasedOnValuePattern( // value subtype and matching subtype. const returnType = evaluator.useSpeculativeMode(pattern.expression, () => evaluator.getTypeFromMagicMethodReturn( - leftSubtypeExpanded, - [rightSubtypeUnexpanded], + valueSubtypeExpanded, + [subjectSubtypeExpanded], '__eq__', pattern.expression, /* expectedType */ undefined ) ); - return returnType ? leftSubtypeUnexpanded : undefined; + return returnType ? valueSubtypeUnexpanded : undefined; } ) ); @@ -693,20 +872,32 @@ function getSequencePatternInfo( subtype, entryTypes: [concreteSubtype], isIndeterminateLength: true, - isTuple: false, }); - } else if (isClassInstance(concreteSubtype)) { + return; + } + + if (isClassInstance(concreteSubtype)) { + if (ClassType.isBuiltIn(concreteSubtype, 'object')) { + sequenceInfo.push({ + subtype, + entryTypes: [convertToInstance(concreteSubtype)], + isIndeterminateLength: true, + isObject: true, + }); + return; + } + for (const mroClass of concreteSubtype.details.mro) { if (!isInstantiableClass(mroClass)) { break; } - // Strings and bytes are explicitly excluded. - if (ClassType.isBuiltIn(mroClass, 'str')) { - break; - } - - if (ClassType.isBuiltIn(mroClass, 'bytes')) { + // Strings, bytes, and bytearray are explicitly excluded. + if ( + ClassType.isBuiltIn(mroClass, 'str') || + ClassType.isBuiltIn(mroClass, 'bytes') || + ClassType.isBuiltIn(mroClass, 'bytearray') + ) { break; } @@ -723,12 +914,13 @@ function getSequencePatternInfo( if (mroClassToSpecialize) { const specializedSequence = partiallySpecializeType(mroClassToSpecialize, concreteSubtype) as ClassType; + if (isTupleClass(specializedSequence)) { if (specializedSequence.tupleTypeArguments) { - if (isOpenEndedTupleClass(specializedSequence)) { + if (isUnboundedTupleClass(specializedSequence)) { sequenceInfo.push({ subtype, - entryTypes: [specializedSequence.tupleTypeArguments[0]], + entryTypes: [combineTypes(specializedSequence.tupleTypeArguments.map((t) => t.type))], isIndeterminateLength: true, isTuple: true, }); @@ -740,7 +932,7 @@ function getSequencePatternInfo( ) { sequenceInfo.push({ subtype, - entryTypes: specializedSequence.tupleTypeArguments, + entryTypes: specializedSequence.tupleTypeArguments.map((t) => t.type), isIndeterminateLength: false, isTuple: true, }); @@ -756,7 +948,6 @@ function getSequencePatternInfo( : UnknownType.create(), ], isIndeterminateLength: true, - isTuple: false, }); } } @@ -772,45 +963,56 @@ function getTypeForPatternSequenceEntry( sequenceInfo: SequencePatternInfo, entryIndex: number, entryCount: number, - starEntryIndex: number | undefined + starEntryIndex: number | undefined, + unpackStarEntry: boolean, + isSubjectObject: boolean ): Type { if (sequenceInfo.isIndeterminateLength) { - if (starEntryIndex === entryIndex) { - const tupleClassType = evaluator.getBuiltInType(node, 'tuple'); - if (tupleClassType && isInstantiableClass(tupleClassType)) { - return ClassType.cloneAsInstance( - specializeTupleClass(tupleClassType, [ - sequenceInfo.entryTypes[0], - AnyType.create(/* isEllipsis */ true), - ]) - ); - } else { - return UnknownType.create(); + let entryType = sequenceInfo.entryTypes[0]; + + // If the subject is typed as an "object", then the star entry + // is simply a list[object]. Without this special case, the list + // will be typed based on the union of all elements in the sequence. + if (isSubjectObject) { + const objectType = evaluator.getBuiltInObject(node, 'object'); + if (objectType && isClassInstance(objectType)) { + entryType = objectType; } - } else { - return sequenceInfo.entryTypes[0]; } - } else if (starEntryIndex === undefined || entryIndex < starEntryIndex) { + + if (!unpackStarEntry && entryIndex === starEntryIndex && !isNever(entryType)) { + entryType = wrapTypeInList(evaluator, node, entryType); + } + + return entryType; + } + + if (starEntryIndex === undefined || entryIndex < starEntryIndex) { return sequenceInfo.entryTypes[entryIndex]; - } else if (entryIndex === starEntryIndex) { - // Create a tuple out of the entries that map to the star entry. - const starEntryTypes = sequenceInfo.entryTypes.slice( - starEntryIndex, - starEntryIndex + sequenceInfo.entryTypes.length - entryCount + 1 - ); - const tupleClassType = evaluator.getBuiltInType(node, 'tuple'); - if (tupleClassType && isInstantiableClass(tupleClassType)) { - return ClassType.cloneAsInstance(specializeTupleClass(tupleClassType, starEntryTypes)); - } else { - return UnknownType.create(); + } + + if (entryIndex === starEntryIndex) { + // Create a list out of the entries that map to the star entry. + // Note that we strip literal types here. + const starEntryTypes = sequenceInfo.entryTypes + .slice(starEntryIndex, starEntryIndex + sequenceInfo.entryTypes.length - entryCount + 1) + .map((type) => stripLiteralValue(type)); + + let entryType = combineTypes(starEntryTypes); + + if (!unpackStarEntry) { + entryType = wrapTypeInList(evaluator, node, entryType); } - } else { - // The entry index is past the index of the star entry, so we need - // to index from the end of the sequence rather than the start. - const itemIndex = sequenceInfo.entryTypes.length - (entryCount - entryIndex); - assert(itemIndex >= 0 && itemIndex < sequenceInfo.entryTypes.length); - return sequenceInfo.entryTypes[itemIndex]; + + return entryType; } + + // The entry index is past the index of the star entry, so we need + // to index from the end of the sequence rather than the start. + const itemIndex = sequenceInfo.entryTypes.length - (entryCount - entryIndex); + assert(itemIndex >= 0 && itemIndex < sequenceInfo.entryTypes.length); + + return sequenceInfo.entryTypes[itemIndex]; } // Recursively assigns the specified type to the pattern and any capture @@ -819,6 +1021,7 @@ export function assignTypeToPatternTargets( evaluator: TypeEvaluator, type: Type, isTypeIncomplete: boolean, + isSubjectObject: boolean, pattern: PatternAtomNode ) { // Further narrow the type based on this pattern. @@ -842,12 +1045,14 @@ export function assignTypeToPatternTargets( info, index, pattern.entries.length, - pattern.starEntryIndex + pattern.starEntryIndex, + /* unpackStarEntry */ false, + isSubjectObject ) ) ); - assignTypeToPatternTargets(evaluator, entryType, isTypeIncomplete, entry); + assignTypeToPatternTargets(evaluator, entryType, isTypeIncomplete, /* isSubjectObject */ false, entry); }); break; } @@ -858,7 +1063,7 @@ export function assignTypeToPatternTargets( } pattern.orPatterns.forEach((orPattern) => { - assignTypeToPatternTargets(evaluator, type, isTypeIncomplete, orPattern); + assignTypeToPatternTargets(evaluator, type, isTypeIncomplete, isSubjectObject, orPattern); // OR patterns are evaluated left to right, so we can narrow // the type as we go. @@ -943,8 +1148,20 @@ export function assignTypeToPatternTargets( const valueType = combineTypes(valueTypes); if (mappingEntry.nodeType === ParseNodeType.PatternMappingKeyEntry) { - assignTypeToPatternTargets(evaluator, keyType, isTypeIncomplete, mappingEntry.keyPattern); - assignTypeToPatternTargets(evaluator, valueType, isTypeIncomplete, mappingEntry.valuePattern); + assignTypeToPatternTargets( + evaluator, + keyType, + isTypeIncomplete, + /* isSubjectObject */ false, + mappingEntry.keyPattern + ); + assignTypeToPatternTargets( + evaluator, + valueType, + isTypeIncomplete, + /* isSubjectObject */ false, + mappingEntry.valuePattern + ); } else if (mappingEntry.nodeType === ParseNodeType.PatternMappingExpandEntry) { const dictClass = evaluator.getBuiltInType(pattern, 'dict'); const strType = evaluator.getBuiltInObject(pattern, 'str'); @@ -974,8 +1191,8 @@ export function assignTypeToPatternTargets( evaluator.mapSubtypesExpandTypeVars(type, /* conditionFilter */ undefined, (expandedSubtype) => { if (isClassInstance(expandedSubtype)) { - doForEachSubtype(type, (matchSubtype) => { - const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(matchSubtype); + doForEachSubtype(type, (subjectSubtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subjectSubtype); if (isAnyOrUnknown(concreteSubtype)) { pattern.arguments.forEach((arg, index) => { @@ -998,7 +1215,8 @@ export function assignTypeToPatternTargets( arg, index, positionalArgNames, - ClassType.cloneAsInstantiable(expandedSubtype) + ClassType.cloneAsInstantiable(expandedSubtype), + /* isPositiveTest */ true ); argTypes[index].push(narrowedArgType); }); @@ -1014,7 +1232,13 @@ export function assignTypeToPatternTargets( }); pattern.arguments.forEach((arg, index) => { - assignTypeToPatternTargets(evaluator, combineTypes(argTypes[index]), isTypeIncomplete, arg.pattern); + assignTypeToPatternTargets( + evaluator, + combineTypes(argTypes[index]), + isTypeIncomplete, + /* isSubjectObject */ false, + arg.pattern + ); }); break; } @@ -1027,3 +1251,71 @@ export function assignTypeToPatternTargets( } } } + +function wrapTypeInList(evaluator: TypeEvaluator, node: ParseNode, type: Type): Type { + if (isNever(type)) { + return type; + } + + const listObjectType = convertToInstance(evaluator.getBuiltInObject(node, 'list')); + if (listObjectType && isClassInstance(listObjectType)) { + return ClassType.cloneForSpecialization(listObjectType, [type], /* isTypeArgumentExplicit */ true); + } + + return UnknownType.create(); +} + +export function validateClassPattern(evaluator: TypeEvaluator, pattern: PatternClassNode) { + const exprType = evaluator.getTypeOfExpression( + pattern.className, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; + + if (isAnyOrUnknown(exprType)) { + return; + } + + // Check for certain uses of type aliases that generate runtime exceptions. + if ( + exprType.typeAliasInfo && + isInstantiableClass(exprType) && + exprType.typeArguments && + exprType.isTypeArgumentExplicit + ) { + evaluator.addDiagnostic( + getFileInfo(pattern).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.classPatternTypeAlias().format({ type: evaluator.printType(exprType) }), + pattern.className + ); + } else if (!isInstantiableClass(exprType) || exprType.includeSubclasses) { + evaluator.addDiagnostic( + getFileInfo(pattern).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.DiagnosticAddendum.typeNotClass().format({ type: evaluator.printType(exprType) }), + pattern.className + ); + } else { + const isBuiltIn = classPatternSpecialCases.some((className) => exprType.details.fullName === className); + + // If it's a special-case builtin class, only one positional argument is allowed. + if (isBuiltIn) { + if (pattern.arguments.length > 1) { + evaluator.addDiagnostic( + getFileInfo(pattern).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.classPatternBuiltInArgCount(), + pattern.arguments[1] + ); + } else if (pattern.arguments.length === 1 && pattern.arguments[0].name) { + evaluator.addDiagnostic( + getFileInfo(pattern).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.classPatternBuiltInArgPositional(), + pattern.arguments[0].name + ); + } + } + } +} diff --git a/packages/pyright-internal/src/analyzer/program.ts b/packages/pyright-internal/src/analyzer/program.ts index c2d60b951005..e3fb3c7d6786 100644 --- a/packages/pyright-internal/src/analyzer/program.ts +++ b/packages/pyright-internal/src/analyzer/program.ts @@ -14,17 +14,20 @@ import { CallHierarchyIncomingCall, CallHierarchyItem, CallHierarchyOutgoingCall, + CompletionList, DocumentHighlight, MarkupKind, } from 'vscode-languageserver-types'; +import { ApiDocsEntry, ApiDocsResponse } from '../apidocsProtocol'; import { OperationCanceledException, throwIfCancellationRequested } from '../common/cancellationUtils'; +import { removeArrayElements } from '../common/collectionUtils'; import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; import { ConsoleInterface, StandardConsole } from '../common/console'; import { assert } from '../common/debug'; import { Diagnostic } from '../common/diagnostic'; import { FileDiagnostics } from '../common/diagnosticSink'; -import { FileEditAction, TextEditAction } from '../common/editAction'; +import { FileEditAction, FileEditActions, TextEditAction } from '../common/editAction'; import { LanguageServiceExtension } from '../common/extensibility'; import { LogTracker } from '../common/logTracker'; import { @@ -32,6 +35,7 @@ import { getDirectoryPath, getFileName, getRelativePath, + isFile, makeDirectories, normalizePath, normalizePathCase, @@ -41,7 +45,6 @@ import { convertPositionToOffset, convertRangeToTextRange } from '../common/posi import { computeCompletionSimilarity } from '../common/stringUtils'; import { DocumentRange, doesRangeContain, doRangesIntersect, Position, Range } from '../common/textRange'; import { Duration, timingStats } from '../common/timing'; -import { ApiDocsEntry, ApiDocsResponse } from '../apidocsProtocol'; import { AutoImporter, AutoImportResult, @@ -49,21 +52,28 @@ import { ModuleSymbolMap, } from '../languageService/autoImporter'; import { CallHierarchyProvider } from '../languageService/callHierarchyProvider'; -import { AbbreviationMap, CompletionOptions, CompletionResults } from '../languageService/completionProvider'; +import { + AbbreviationMap, + CompletionMap, + CompletionOptions, + CompletionResultsList, +} from '../languageService/completionProvider'; import { DefinitionFilter } from '../languageService/definitionProvider'; +import { DocumentSymbolCollector } from '../languageService/documentSymbolCollector'; import { IndexOptions, IndexResults, WorkspaceSymbolCallback } from '../languageService/documentSymbolProvider'; import { HoverResults } from '../languageService/hoverProvider'; import { ReferenceCallback, ReferencesResult } from '../languageService/referencesProvider'; +import { RenameModuleProvider } from '../languageService/renameModuleProvider'; import { SignatureHelpResults } from '../languageService/signatureHelpProvider'; import { ParameterCategory, ParseNodeType } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; -import { ImportLookupResult } from './analyzerFileInfo'; +import { AbsoluteModuleDescriptor, ImportLookupResult } from './analyzerFileInfo'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; import { CircularDependency } from './circularDependency'; -import { AliasDeclaration, DeclarationType, VariableDeclaration } from './declaration'; +import { DeclarationType, isAliasDeclaration, VariableDeclaration } from './declaration'; +import { convertDocStringToMarkdown, convertDocStringToPlainText } from './docStringConversion'; import { ImportedModuleDescriptor, ImportResolver } from './importResolver'; import { ImportResult, ImportType } from './importResult'; -import { ModuleInfo } from './packageTypeReport'; import { findNodeByOffset, getDocString, printExpression, PrintExpressionFlags } from './parseTreeUtils'; import { Scope } from './scope'; import { getScopeForNode } from './scopeUtils'; @@ -77,7 +87,6 @@ import { createTypeEvaluatorWithTracker } from './typeEvaluatorWithTracker'; import { PrintTypeFlags } from './typePrinter'; import { ClassType, FunctionType, isClass, isFunction, isModule, isOverloadedFunction, Type } from './types'; import { TypeStubWriter } from './typeStubWriter'; -import { convertDocStringToMarkdown, convertDocStringToPlainText } from './docStringConversion'; const _maxImportDepth = 256; @@ -95,7 +104,16 @@ export interface SourceFileInfo { isThirdPartyImport: boolean; isThirdPartyPyTypedPresent: boolean; diagnosticsVersion?: number | undefined; + builtinsImport?: SourceFileInfo | undefined; + ipythonDisplayImport?: SourceFileInfo | undefined; + + // Information about the chained source file + // Chained source file is not supposed to exist on file system but + // must exist in the program's source file list. Module level + // scope of the chained source file will be inserted before + // current file's scope. + chainedSourceFile?: SourceFileInfo | undefined; // Information about why the file is included in the program // and its relation to other source files in the program. @@ -136,6 +154,12 @@ interface UpdateImportInfo { export type PreCheckCallback = (parseResults: ParseResults, evaluator: TypeEvaluator) => void; +export interface OpenFileOptions { + isTracked: boolean; + ipythonMode: boolean; + chainedFilePath: string | undefined; +} + // Container for all of the files that are being analyzed. Files // can fall into one or more of the following categories: // Tracked - specified by the config options @@ -274,7 +298,7 @@ export class Program { filePath: string, version: number | null, contents: TextDocumentContentChangeEvent[], - isTracked = false + options?: OpenFileOptions ) { let sourceFileInfo = this._getSourceFileInfoFromPath(filePath); if (!sourceFileInfo) { @@ -286,11 +310,18 @@ export class Program { /* isThirdPartyImport */ false, /* isInPyTypedPackage */ false, this._console, - this._logTracker + this._logTracker, + options?.ipythonMode ?? false ); + + // ChainedSourceFile can only be set by open file. And once it is set, + // it can't be changed. It can only be removed (deleted). File from fs + // can't set chained source file. + const chainedFilePath = options?.chainedFilePath; sourceFileInfo = { sourceFile, - isTracked: isTracked, + isTracked: options?.isTracked ?? false, + chainedSourceFile: chainedFilePath ? this._getSourceFileInfoFromPath(chainedFilePath) : undefined, isOpenByClient: true, isTypeshedFile: false, isThirdPartyImport: false, @@ -320,19 +351,28 @@ export class Program { if (sourceFileInfo) { sourceFileInfo.isOpenByClient = false; sourceFileInfo.sourceFile.setClientVersion(null, []); + + // There is no guarantee that content is saved before the file is closed. + // We need to mark the file dirty so we can re-analyze next time. + // This won't matter much for OpenFileOnly users, but it will matter for + // people who use diagnosticMode Workspace. + if (sourceFileInfo.sourceFile.didContentsChangeOnDisk()) { + sourceFileInfo.sourceFile.markDirty(); + this._markFileDirtyRecursive(sourceFileInfo, new Map()); + } } return this._removeUnneededFiles(); } - markAllFilesDirty(evenIfContentsAreSame: boolean) { + markAllFilesDirty(evenIfContentsAreSame: boolean, indexingNeeded = true) { const markDirtyMap = new Map(); this._sourceFileList.forEach((sourceFileInfo) => { if (evenIfContentsAreSame) { - sourceFileInfo.sourceFile.markDirty(); + sourceFileInfo.sourceFile.markDirty(indexingNeeded); } else if (sourceFileInfo.sourceFile.didContentsChangeOnDisk()) { - sourceFileInfo.sourceFile.markDirty(); + sourceFileInfo.sourceFile.markDirty(indexingNeeded); // Mark any files that depend on this file as dirty // also. This will retrigger analysis of these other files. @@ -345,11 +385,20 @@ export class Program { } } - markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean) { + markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean, indexingNeeded = true) { const markDirtyMap = new Map(); filePaths.forEach((filePath) => { const sourceFileInfo = this._getSourceFileInfoFromPath(filePath); if (sourceFileInfo) { + const fileName = getFileName(filePath); + + // Handle builtins and __builtins__ specially. They are implicitly + // included by all source files. + if (fileName === 'builtins.pyi' || fileName === '__builtins__.pyi') { + this.markAllFilesDirty(evenIfContentsAreSame, indexingNeeded); + return; + } + // If !evenIfContentsAreSame, see if the on-disk contents have // changed. If the file is open, the on-disk contents don't matter // because we'll receive updates directly from the client. @@ -357,7 +406,7 @@ export class Program { evenIfContentsAreSame || (!sourceFileInfo.isOpenByClient && sourceFileInfo.sourceFile.didContentsChangeOnDisk()) ) { - sourceFileInfo.sourceFile.markDirty(); + sourceFileInfo.sourceFile.markDirty(indexingNeeded); // Mark any files that depend on this file as dirty // also. This will retrigger analysis of these other files. @@ -379,6 +428,10 @@ export class Program { return this._sourceFileList.filter((s) => s.isTracked); } + getOpened(): SourceFileInfo[] { + return this._sourceFileList.filter((s) => s.isOpenByClient); + } + getFilesToAnalyzeCount() { let sourceFileCount = 0; @@ -734,10 +787,12 @@ export class Program { this._evaluator = createTypeEvaluatorWithTracker( this._lookUpImport, { - disableInferenceForPyTypedSources: this._configOptions.disableInferenceForPyTypedSources, printTypeFlags: Program._getPrintTypeFlags(this._configOptions), logCalls: this._configOptions.logTypeEvaluationTime, minimumLoggingThreshold: this._configOptions.typeEvaluationTimeThreshold, + analyzeUnannotatedFunctions: this._configOptions.analyzeUnannotatedFunctions, + evaluateUnknownImportsAsAny: !!this._configOptions.evaluateUnknownImportsAsAny, + verifyTypeCacheEvaluatorFlags: !!this._configOptions.internalTestMode, }, this._logTracker, this._configOptions.logTypeEvaluationTime @@ -784,24 +839,74 @@ export class Program { this._parseFile(fileToAnalyze, content); - // We need to parse and bind the builtins import first. - let builtinsScope: Scope | undefined; - if (fileToAnalyze.builtinsImport && fileToAnalyze.builtinsImport !== fileToAnalyze) { - this._bindFile(fileToAnalyze.builtinsImport); + const getScopeIfAvailable = (fileInfo: SourceFileInfo | undefined) => { + if (!fileInfo || fileInfo === fileToAnalyze) { + return undefined; + } + + this._bindFile(fileInfo); + if (fileInfo.sourceFile.isFileDeleted()) { + return undefined; + } - // Get the builtins scope to pass to the binding pass. - const parseResults = fileToAnalyze.builtinsImport.sourceFile.getParseResults(); - if (parseResults) { - builtinsScope = AnalyzerNodeInfo.getScope(parseResults.parseTree); - assert(builtinsScope !== undefined); + const parseResults = fileInfo.sourceFile.getParseResults(); + if (!parseResults) { + return undefined; } + + const scope = AnalyzerNodeInfo.getScope(parseResults.parseTree); + assert(scope !== undefined); + + return scope; + }; + + let builtinsScope: Scope | undefined; + if (fileToAnalyze.builtinsImport && fileToAnalyze.builtinsImport !== fileToAnalyze) { + // If it is not builtin module itself, we need to parse and bind + // the ipython display import if required. Otherwise, get builtin module. + builtinsScope = + getScopeIfAvailable(fileToAnalyze.chainedSourceFile) ?? + getScopeIfAvailable(fileToAnalyze.ipythonDisplayImport) ?? + getScopeIfAvailable(fileToAnalyze.builtinsImport); } fileToAnalyze.sourceFile.bind(this._configOptions, this._lookUpImport, builtinsScope); } - private _lookUpImport = (filePath: string): ImportLookupResult | undefined => { - const sourceFileInfo = this._getSourceFileInfoFromPath(filePath); + private _lookUpImport = (filePathOrModule: string | AbsoluteModuleDescriptor): ImportLookupResult | undefined => { + let sourceFileInfo: SourceFileInfo | undefined; + + if (typeof filePathOrModule === 'string') { + sourceFileInfo = this._getSourceFileInfoFromPath(filePathOrModule); + } else { + // Resolve the import. + const importResult = this._importResolver.resolveImport( + filePathOrModule.importingFilePath, + this._configOptions.findExecEnvironment(filePathOrModule.importingFilePath), + { + leadingDots: 0, + nameParts: filePathOrModule.nameParts, + importedSymbols: undefined, + } + ); + + if (importResult.isImportFound && !importResult.isNativeLib && importResult.resolvedPaths.length > 0) { + let resolvedPath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; + if (resolvedPath) { + // See if the source file already exists in the program. + sourceFileInfo = this._getSourceFileInfoFromPath(resolvedPath); + + if (!sourceFileInfo) { + resolvedPath = normalizePathCase(this._fs, resolvedPath); + + // Start tracking the source file. + this.addTrackedFile(resolvedPath); + sourceFileInfo = this._getSourceFileInfoFromPath(resolvedPath); + } + } + } + } + if (!sourceFileInfo) { return undefined; } @@ -810,7 +915,7 @@ export class Program { // Bind the file if it's not already bound. Don't count this time // against the type checker. timingStats.typeCheckerTime.subtractFromTime(() => { - this._bindFile(sourceFileInfo); + this._bindFile(sourceFileInfo!); }); } @@ -822,9 +927,12 @@ export class Program { const parseResults = sourceFileInfo.sourceFile.getParseResults(); const moduleNode = parseResults!.parseTree; + const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(parseResults!.parseTree); + return { symbolTable, - dunderAllNames: AnalyzerNodeInfo.getDunderAllInfo(parseResults!.parseTree)?.names, + dunderAllNames: dunderAllInfo?.names, + usesUnsupportedDunderAllForm: dunderAllInfo?.usesUnsupportedDunderAllForm ?? false, get docString() { return getDocString(moduleNode.statements); }, @@ -1011,28 +1119,35 @@ export class Program { firstSourceFile.sourceFile.addCircularDependency(circDep); } - private _markFileDirtyRecursive(sourceFileInfo: SourceFileInfo, markMap: Map) { + private _markFileDirtyRecursive( + sourceFileInfo: SourceFileInfo, + markMap: Map, + forceRebinding = false + ) { const filePath = normalizePathCase(this._fs, sourceFileInfo.sourceFile.getFilePath()); // Don't mark it again if it's already been visited. if (!markMap.has(filePath)) { - sourceFileInfo.sourceFile.markReanalysisRequired(); + sourceFileInfo.sourceFile.markReanalysisRequired(forceRebinding); markMap.set(filePath, true); sourceFileInfo.importedBy.forEach((dep) => { - this._markFileDirtyRecursive(dep, markMap); + // Changes on chained source file can change symbols in the symbol table and + // dependencies on the dependent file. Force rebinding. + const forceRebinding = dep.chainedSourceFile === sourceFileInfo; + this._markFileDirtyRecursive(dep, markMap, forceRebinding); }); } } getTextOnRange(filePath: string, range: Range, token: CancellationToken): string | undefined { - const sourceFileInfo = this._sourceFileMap.get(filePath); + const sourceFileInfo = this._getSourceFileInfoFromPath(filePath); if (!sourceFileInfo) { return undefined; } const sourceFile = sourceFileInfo.sourceFile; - const fileContents = sourceFile.getFileContents(); + const fileContents = sourceFile.getOpenFileContents(); if (fileContents === undefined) { // this only works with opened file return undefined; @@ -1067,7 +1182,7 @@ export class Program { } const sourceFile = sourceFileInfo.sourceFile; - const fileContents = sourceFile.getFileContents(); + const fileContents = sourceFile.getOpenFileContents(); if (fileContents === undefined) { // this only works with opened file return []; @@ -1099,7 +1214,7 @@ export class Program { this._importResolver, parseTree, range.start, - new Set(), + new CompletionMap(), map, { lazyEdit, @@ -1211,6 +1326,30 @@ export class Program { }); } + getTypeDefinitionsForPosition( + filePath: string, + position: Position, + token: CancellationToken + ): DocumentRange[] | undefined { + return this._runEvaluatorWithCancellationToken(token, () => { + const sourceFileInfo = this._getSourceFileInfoFromPath(filePath); + if (!sourceFileInfo) { + return undefined; + } + + this._bindFile(sourceFileInfo); + + const execEnv = this._configOptions.findExecEnvironment(filePath); + return sourceFileInfo.sourceFile.getTypeDefinitionsForPosition( + this._createSourceMapper(execEnv, /* mapCompiled */ false, /* preferStubs */ true), + position, + this._evaluator!, + filePath, + token + ); + }); + } + reportReferencesForPosition( filePath: string, position: Position, @@ -1252,19 +1391,24 @@ export class Program { !invokedFromUserFile || this._isUserCode(curSourceFileInfo) ) { - this._bindFile(curSourceFileInfo); + // See if the reference symbol's string is located somewhere within the file. + // If not, we can skip additional processing for the file. + const fileContents = curSourceFileInfo.sourceFile.getFileContent(); + if (!fileContents || fileContents.search(referencesResult.symbolName) >= 0) { + this._bindFile(curSourceFileInfo); + + curSourceFileInfo.sourceFile.addReferences( + referencesResult, + includeDeclaration, + this._evaluator!, + token + ); + } - curSourceFileInfo.sourceFile.addReferences( - referencesResult, - includeDeclaration, - this._evaluator!, - token - ); + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._handleMemoryHighUsage(); } - - // This operation can consume significant memory, so check - // for situations where we need to discard the type cache. - this._handleMemoryHighUsage(); } // Make sure to include declarations regardless where they are defined @@ -1324,22 +1468,16 @@ export class Program { return undefined; } - let content: string | undefined = undefined; + const content = sourceFileInfo.sourceFile.getFileContent() ?? ''; if ( options.indexingForAutoImportMode && !sourceFileInfo.sourceFile.isStubFile() && - !sourceFileInfo.sourceFile.isThirdPartyPyTypedPresent() && - sourceFileInfo.sourceFile.getClientVersion() === undefined + !sourceFileInfo.sourceFile.isThirdPartyPyTypedPresent() ) { - try { - // Perf optimization. if py file doesn't contain __all__ - // No need to parse and bind. - content = this._fs.readFileSync(filePath, 'utf8'); - if (content.indexOf('__all__') < 0) { - return undefined; - } - } catch (error) { - content = undefined; + // Perf optimization. if py file doesn't contain __all__ + // No need to parse and bind. + if (content.indexOf('__all__') < 0) { + return undefined; } } @@ -1474,7 +1612,7 @@ export class Program { nameMap: AbbreviationMap | undefined, libraryMap: Map | undefined, token: CancellationToken - ): Promise { + ): Promise { const sourceFileInfo = this._getSourceFileInfoFromPath(filePath); if (!sourceFileInfo) { return undefined; @@ -1509,13 +1647,20 @@ export class Program { ); }); - ls.add(`found ${result?.completionList?.items.length ?? 'null'} items`); + ls.add(`found ${result?.completionMap?.size ?? 'null'} items`); return result; } ); - if (!completionResult?.completionList || !this._extension?.completionListExtension) { - return completionResult; + const completionResultsList: CompletionResultsList = { + completionList: CompletionList.create(completionResult?.completionMap?.toArray()), + memberAccessInfo: completionResult?.memberAccessInfo, + autoImportInfo: completionResult?.autoImportInfo, + extensionInfo: completionResult?.extensionInfo, + }; + + if (!completionResult?.completionMap || !this._extension?.completionListExtension) { + return completionResultsList; } const parseResults = sourceFileInfo.sourceFile.getParseResults(); @@ -1523,7 +1668,7 @@ export class Program { const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); if (offset !== undefined) { await this._extension.completionListExtension.updateCompletionResults( - completionResult, + completionResultsList, parseResults, offset, token @@ -1531,7 +1676,7 @@ export class Program { } } - return completionResult; + return completionResultsList; } resolveCompletionItem( @@ -1573,6 +1718,92 @@ export class Program { }); } + renameModule(path: string, newPath: string, token: CancellationToken): FileEditAction[] | undefined { + return this._runEvaluatorWithCancellationToken(token, () => { + if (isFile(this._fs, path)) { + const fileInfo = this._getSourceFileInfoFromPath(path); + if (!fileInfo) { + return undefined; + } + } + + const renameModuleProvider = RenameModuleProvider.createForModule( + this._importResolver, + this._configOptions, + this._evaluator!, + path, + newPath, + token + ); + if (!renameModuleProvider) { + return undefined; + } + + this._processModuleReferences(renameModuleProvider, renameModuleProvider.lastModuleName, path); + return renameModuleProvider.getEdits(); + }); + } + + moveSymbolAtPosition( + filePath: string, + newFilePath: string, + position: Position, + token: CancellationToken + ): FileEditActions | undefined { + return this._runEvaluatorWithCancellationToken(token, () => { + const fileInfo = this._getSourceFileInfoFromPath(filePath); + if (!fileInfo) { + return undefined; + } + + this._bindFile(fileInfo); + const parseResults = fileInfo.sourceFile.getParseResults(); + if (!parseResults) { + return undefined; + } + + const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + const node = findNodeByOffset(parseResults.parseTree, offset); + if (node === undefined) { + return undefined; + } + + // If this isn't a name node, there are no references to be found. + if (node.nodeType !== ParseNodeType.Name) { + return undefined; + } + + const execEnv = this._configOptions.findExecEnvironment(filePath); + const declarations = DocumentSymbolCollector.getDeclarationsForNode( + node, + this._evaluator!, + /* resolveLocalNames */ false, + token, + this._createSourceMapper(execEnv) + ); + + const renameModuleProvider = RenameModuleProvider.createForSymbol( + this._importResolver, + this._configOptions, + this._evaluator!, + filePath, + newFilePath, + declarations, + token + ); + if (!renameModuleProvider) { + return undefined; + } + + this._processModuleReferences(renameModuleProvider, node.value, filePath); + return { edits: renameModuleProvider.getEdits(), fileOperations: [] }; + }); + } + renameSymbolAtPosition( filePath: string, position: Position, @@ -1601,6 +1832,36 @@ export class Program { return undefined; } + // We only allow renaming module alias, filter out any other alias decls. + removeArrayElements(referencesResult.declarations, (d) => { + if (!isAliasDeclaration(d)) { + return false; + } + + // We must have alias and decl node that point to import statement. + if (!d.usesLocalName || !d.node) { + return true; + } + + // d.node can't be ImportFrom if usesLocalName is true. + // but we are doing this for type checker. + if (d.node.nodeType === ParseNodeType.ImportFrom) { + return true; + } + + // Check alias and what we are renaming is same thing. + if (d.node.alias?.value !== referencesResult.symbolName) { + return true; + } + + return false; + }); + + if (referencesResult.declarations.length === 0) { + // There is no symbol we can rename. + return undefined; + } + if ( !isDefaultWorkspace && referencesResult.declarations.some((d) => !this._isUserCode(this._getSourceFileInfoFromPath(d.path))) @@ -1609,11 +1870,6 @@ export class Program { return undefined; } - if (referencesResult.declarations.length === 0) { - // There is no symbol we can rename. - return undefined; - } - // Do we need to do a global search as well? if (referencesResult.requiresGlobalSearch && !isDefaultWorkspace) { for (const curSourceFileInfo of this._sourceFileList) { @@ -1941,6 +2197,47 @@ export class Program { })); } + test_createSourceMapper(execEnv: ExecutionEnvironment) { + return this._createSourceMapper(execEnv, /*mapCompiled*/ false); + } + + private _processModuleReferences( + renameModuleProvider: RenameModuleProvider, + filteringText: string, + currentFilePath: string + ) { + // _sourceFileList contains every user files that match "include" pattern including + // py file even if corresponding pyi exists. + for (const currentFileInfo of this._sourceFileList) { + // Make sure we only touch user code to prevent us + // from accidentally changing third party library or type stub. + if (!this._isUserCode(currentFileInfo)) { + continue; + } + + // If module name isn't mentioned in the current file, skip the file + // except the file that got actually renamed/moved. + // The file that got moved might have relative import paths we need to update. + const filePath = currentFileInfo.sourceFile.getFilePath(); + const content = currentFileInfo.sourceFile.getFileContent() ?? ''; + if (filePath !== currentFilePath && content.indexOf(filteringText) < 0) { + continue; + } + + this._bindFile(currentFileInfo, content); + const parseResult = currentFileInfo.sourceFile.getParseResults(); + if (!parseResult) { + continue; + } + + renameModuleProvider.renameReferences(filePath, parseResult); + + // This operation can consume significant memory, so check + // for situations where we need to discard the type cache. + this._handleMemoryHighUsage(); + } + } + private _handleMemoryHighUsage() { const typeCacheSize = this._evaluator!.getTypeCacheSize(); @@ -1948,12 +2245,17 @@ export class Program { // Don't bother doing this until we hit this point because the heap usage may not // drop immediately after we empty the cache due to garbage collection timing. if (typeCacheSize > 750000 || this._parsedFileCount > 1000) { - const heapSizeInMb = Math.round(process.memoryUsage().heapUsed / (1024 * 1024)); + const memoryUsage = process.memoryUsage(); + + // If we use more than 90% of the available heap size, avoid a crash + // by emptying the type cache. + if (memoryUsage.heapUsed > memoryUsage.rss * 0.9) { + const heapSizeInMb = Math.round(memoryUsage.rss / (1024 * 1024)); + const heapUsageInMb = Math.round(memoryUsage.heapUsed / (1024 * 1024)); - // Don't allow the heap to get close to the 2GB limit imposed by - // the OS when running Node in a 32-bit process. - if (heapSizeInMb > 1536) { - this._console.info(`Emptying type cache to avoid heap overflow. Heap size used: ${heapSizeInMb}MB`); + this._console.info( + `Emptying type cache to avoid heap overflow. Used ${heapUsageInMb}MB out of ${heapSizeInMb}MB` + ); this._createNewEvaluator(); this._discardCachedParseResults(); this._parsedFileCount = 0; @@ -2018,7 +2320,10 @@ export class Program { // they are no longer referenced. fileInfo.imports.forEach((importedFile) => { const indexToRemove = importedFile.importedBy.findIndex((fi) => fi === fileInfo); - assert(indexToRemove >= 0); + if (indexToRemove < 0) { + return; + } + importedFile.importedBy.splice(indexToRemove, 1); // See if we need to remove the imported file because it @@ -2111,7 +2416,7 @@ export class Program { return false; } - private _createSourceMapper(execEnv: ExecutionEnvironment, mapCompiled?: boolean) { + private _createSourceMapper(execEnv: ExecutionEnvironment, mapCompiled?: boolean, preferStubs?: boolean) { const sourceMapper = new SourceMapper( this._importResolver, execEnv, @@ -2125,7 +2430,8 @@ export class Program { return this.getBoundSourceFile(implFilePath); }, (f) => this.getBoundSourceFile(f), - mapCompiled ?? false + mapCompiled ?? false, + preferStubs ?? false ); return sourceMapper; } @@ -2219,6 +2525,22 @@ export class Program { // Create a map of unique imports, since imports can appear more than once. const newImportPathMap = new Map(); + + // Add chained source file as import if it exists. + if (sourceFileInfo.chainedSourceFile) { + if (sourceFileInfo.chainedSourceFile.sourceFile.isFileDeleted()) { + sourceFileInfo.chainedSourceFile = undefined; + } else { + const filePath = sourceFileInfo.chainedSourceFile.sourceFile.getFilePath(); + newImportPathMap.set(normalizePathCase(this._fs, filePath), { + path: filePath, + isTypeshedFile: false, + isThirdPartyImport: false, + isPyTypedPresent: false, + }); + } + } + imports.forEach((importResult) => { if (importResult.isImportFound) { if (this._isImportAllowed(sourceFileInfo, importResult, importResult.isStubFile)) { @@ -2338,6 +2660,16 @@ export class Program { sourceFileInfo.builtinsImport = this._getSourceFileInfoFromPath(resolvedBuiltinsPath); } + // Resolve the ipython display import for the file. This needs to be + // analyzed before the file can be analyzed. + sourceFileInfo.ipythonDisplayImport = undefined; + const ipythonDisplayImport = sourceFileInfo.sourceFile.getIPythonDisplayImport(); + if (ipythonDisplayImport && ipythonDisplayImport.isImportFound) { + const resolvedIPythonDisplayPath = + ipythonDisplayImport.resolvedPaths[ipythonDisplayImport.resolvedPaths.length - 1]; + sourceFileInfo.ipythonDisplayImport = this._getSourceFileInfoFromPath(resolvedIPythonDisplayPath); + } + return filesAdded; } diff --git a/packages/pyright-internal/src/analyzer/properties.ts b/packages/pyright-internal/src/analyzer/properties.ts new file mode 100644 index 000000000000..81cc7fa71a4c --- /dev/null +++ b/packages/pyright-internal/src/analyzer/properties.ts @@ -0,0 +1,454 @@ +/* + * properties.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * Author: Eric Traut + * + * Provides type evaluation logic that is specific to properties. + */ + +import { DiagnosticAddendum } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { Localizer } from '../localization/localize'; +import { DecoratorNode, FunctionNode, ParameterCategory, ParseNode } from '../parser/parseNodes'; +import { getFileInfo } from './analyzerNodeInfo'; +import { getClassFullName, getTypeSourceId } from './parseTreeUtils'; +import { Symbol, SymbolFlags } from './symbol'; +import { TypeEvaluator } from './typeEvaluatorTypes'; +import { + AnyType, + ClassType, + ClassTypeFlags, + combineTypes, + FunctionType, + FunctionTypeFlags, + isAnyOrUnknown, + isFunction, + isInstantiableClass, + isTypeSame, + isTypeVar, + NoneType, + OverloadedFunctionType, + Type, + UnknownType, +} from './types'; +import { CanAssignFlags, computeMroLinearization, getTypeVarScopeId, isProperty } from './typeUtils'; +import { TypeVarMap } from './typeVarMap'; + +export function validatePropertyMethod(evaluator: TypeEvaluator, method: FunctionType, errorNode: ParseNode) { + if (FunctionType.isStaticMethod(method)) { + evaluator.addDiagnostic( + getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.propertyStaticMethod(), + errorNode + ); + } +} + +export function createProperty( + evaluator: TypeEvaluator, + decoratorNode: DecoratorNode, + decoratorType: ClassType, + fget: FunctionType +): ClassType { + const fileInfo = getFileInfo(decoratorNode); + const typeMetaclass = evaluator.getBuiltInType(decoratorNode, 'type'); + const typeSourceId = ClassType.isBuiltIn(decoratorType, 'property') + ? getTypeSourceId(decoratorNode) + : decoratorType.details.typeSourceId; + + const propertyClass = ClassType.createInstantiable( + decoratorType.details.name, + getClassFullName(decoratorNode, fileInfo.moduleName, `__property_${fget.details.name}`), + fileInfo.moduleName, + fileInfo.filePath, + ClassTypeFlags.PropertyClass, + typeSourceId, + /* declaredMetaclass */ undefined, + isInstantiableClass(typeMetaclass) ? typeMetaclass : UnknownType.create() + ); + + propertyClass.details.typeVarScopeId = decoratorType.details.typeVarScopeId; + computeMroLinearization(propertyClass); + + // Clone the symbol table of the old class type. + const fields = propertyClass.details.fields; + decoratorType.details.fields.forEach((symbol, name) => { + const ignoredMethods = ['__get__', '__set__', '__delete__', 'fget', 'fset', 'fdel']; + + if (!symbol.isIgnoredForProtocolMatch()) { + if (!ignoredMethods.some((m) => m === name)) { + fields.set(name, symbol); + } + } + }); + + const propertyObject = ClassType.cloneAsInstance(propertyClass); + propertyClass.isAsymmetricDescriptor = false; + + // Fill in the fget method. + const fgetSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fget); + fields.set('fget', fgetSymbol); + + if (FunctionType.isClassMethod(fget)) { + propertyClass.details.flags |= ClassTypeFlags.ClassProperty; + } + + // Fill in the __get__ method with an overload. + const getFunction1 = FunctionType.createInstance( + '__get__', + '', + '', + FunctionTypeFlags.SynthesizedMethod | FunctionTypeFlags.Overloaded + ); + FunctionType.addParameter(getFunction1, { + category: ParameterCategory.Simple, + name: 'self', + type: propertyObject, + hasDeclaredType: true, + }); + FunctionType.addParameter(getFunction1, { + category: ParameterCategory.Simple, + name: 'obj', + type: NoneType.createInstance(), + hasDeclaredType: true, + }); + FunctionType.addParameter(getFunction1, { + category: ParameterCategory.Simple, + name: 'objtype', + type: AnyType.create(), + hasDeclaredType: true, + hasDefault: true, + defaultType: AnyType.create(), + }); + getFunction1.details.declaredReturnType = FunctionType.isClassMethod(fget) + ? FunctionType.getSpecializedReturnType(fget) + : propertyObject; + getFunction1.details.declaration = fget.details.declaration; + + const getFunction2 = FunctionType.createInstance( + '__get__', + '', + '', + FunctionTypeFlags.SynthesizedMethod | FunctionTypeFlags.Overloaded + ); + FunctionType.addParameter(getFunction2, { + category: ParameterCategory.Simple, + name: 'self', + type: propertyObject, + hasDeclaredType: true, + }); + + const objType = fget.details.parameters.length > 0 ? fget.details.parameters[0].type : AnyType.create(); + FunctionType.addParameter(getFunction2, { + category: ParameterCategory.Simple, + name: 'obj', + type: objType, + hasDeclaredType: true, + }); + FunctionType.addParameter(getFunction2, { + category: ParameterCategory.Simple, + name: 'objtype', + type: AnyType.create(), + hasDeclaredType: true, + hasDefault: true, + defaultType: AnyType.create(), + }); + getFunction2.details.declaredReturnType = FunctionType.getSpecializedReturnType(fget); + getFunction2.details.declaration = fget.details.declaration; + + // Override the scope ID since we're using parameter types from the + // decorated function. + getFunction2.details.typeVarScopeId = getTypeVarScopeId(fget); + + const getFunctionOverload = OverloadedFunctionType.create([getFunction1, getFunction2]); + const getSymbol = Symbol.createWithType(SymbolFlags.ClassMember, getFunctionOverload); + fields.set('__get__', getSymbol); + + // Fill in the getter, setter and deleter methods. + ['getter', 'setter', 'deleter'].forEach((accessorName) => { + const accessorFunction = FunctionType.createInstance(accessorName, '', '', FunctionTypeFlags.SynthesizedMethod); + FunctionType.addParameter(accessorFunction, { + category: ParameterCategory.Simple, + name: 'self', + type: AnyType.create(), + hasDeclaredType: true, + }); + FunctionType.addParameter(accessorFunction, { + category: ParameterCategory.Simple, + name: 'accessor', + type: AnyType.create(), + hasDeclaredType: true, + }); + accessorFunction.details.declaredReturnType = propertyObject; + const accessorSymbol = Symbol.createWithType(SymbolFlags.ClassMember, accessorFunction); + fields.set(accessorName, accessorSymbol); + }); + + return propertyObject; +} + +export function clonePropertyWithSetter( + evaluator: TypeEvaluator, + prop: Type, + fset: FunctionType, + errorNode: FunctionNode +): Type { + if (!isProperty(prop)) { + return prop; + } + + const classType = prop as ClassType; + const flagsToClone = classType.details.flags; + let isAsymmetricDescriptor = !!classType.isAsymmetricDescriptor; + + // Verify parameters for fset. + // We'll skip this test if the diagnostic rule is disabled because it + // can be somewhat expensive, especially in code that is not annotated. + const fileInfo = getFileInfo(errorNode); + if (errorNode.parameters.length >= 2) { + const typeAnnotation = evaluator.getTypeAnnotationForParameter(errorNode, 1); + if (typeAnnotation) { + // Verify consistency of the type. + const fgetType = evaluator.getGetterTypeFromProperty(classType, /* inferTypeIfNeeded */ false); + if (fgetType && !isAnyOrUnknown(fgetType)) { + const fsetType = evaluator.getTypeOfAnnotation(typeAnnotation, { + associateTypeVarsWithScope: true, + disallowRecursiveTypeAlias: true, + }); + + // The setter type should be assignable to the getter type. + if (fileInfo.diagnosticRuleSet.reportPropertyTypeMismatch !== 'none') { + const diag = new DiagnosticAddendum(); + if (!evaluator.canAssignType(fgetType, fsetType, diag)) { + evaluator.addDiagnostic( + fileInfo.diagnosticRuleSet.reportPropertyTypeMismatch, + DiagnosticRule.reportPropertyTypeMismatch, + Localizer.Diagnostic.setterGetterTypeMismatch() + diag.getString(), + typeAnnotation + ); + } + } + + if (!isTypeSame(fgetType, fsetType)) { + isAsymmetricDescriptor = true; + } + } + } + } + + const propertyClass = ClassType.createInstantiable( + classType.details.name, + classType.details.fullName, + classType.details.moduleName, + getFileInfo(errorNode).filePath, + flagsToClone, + classType.details.typeSourceId, + classType.details.declaredMetaclass, + classType.details.effectiveMetaclass + ); + propertyClass.details.typeVarScopeId = classType.details.typeVarScopeId; + computeMroLinearization(propertyClass); + + const propertyObject = ClassType.cloneAsInstance(propertyClass); + propertyClass.isAsymmetricDescriptor = isAsymmetricDescriptor; + + // Clone the symbol table of the old class type. + const fields = propertyClass.details.fields; + classType.details.fields.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + fields.set(name, symbol); + } + }); + + // Fill in the fset method. + const fsetSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fset); + fields.set('fset', fsetSymbol); + + // Fill in the __set__ method. + const setFunction = FunctionType.createInstance('__set__', '', '', FunctionTypeFlags.SynthesizedMethod); + FunctionType.addParameter(setFunction, { + category: ParameterCategory.Simple, + name: 'self', + type: prop, + hasDeclaredType: true, + }); + let objType = fset.details.parameters.length > 0 ? fset.details.parameters[0].type : AnyType.create(); + if (isTypeVar(objType) && objType.details.isSynthesizedSelf) { + objType = evaluator.makeTopLevelTypeVarsConcrete(objType); + } + FunctionType.addParameter(setFunction, { + category: ParameterCategory.Simple, + name: 'obj', + type: combineTypes([objType, NoneType.createInstance()]), + hasDeclaredType: true, + }); + setFunction.details.declaredReturnType = NoneType.createInstance(); + let setParamType: Type = UnknownType.create(); + if ( + fset.details.parameters.length >= 2 && + fset.details.parameters[1].category === ParameterCategory.Simple && + fset.details.parameters[1].name + ) { + setParamType = fset.details.parameters[1].type; + } + FunctionType.addParameter(setFunction, { + category: ParameterCategory.Simple, + name: 'value', + type: setParamType, + hasDeclaredType: true, + }); + const setSymbol = Symbol.createWithType(SymbolFlags.ClassMember, setFunction); + fields.set('__set__', setSymbol); + + return propertyObject; +} + +export function clonePropertyWithDeleter( + evaluator: TypeEvaluator, + prop: Type, + fdel: FunctionType, + errorNode: FunctionNode +): Type { + if (!isProperty(prop)) { + return prop; + } + + const classType = prop as ClassType; + const propertyClass = ClassType.createInstantiable( + classType.details.name, + classType.details.fullName, + classType.details.moduleName, + getFileInfo(errorNode).filePath, + classType.details.flags, + classType.details.typeSourceId, + classType.details.declaredMetaclass, + classType.details.effectiveMetaclass + ); + propertyClass.details.typeVarScopeId = classType.details.typeVarScopeId; + computeMroLinearization(propertyClass); + + const propertyObject = ClassType.cloneAsInstance(propertyClass); + propertyClass.isAsymmetricDescriptor = classType.isAsymmetricDescriptor ?? false; + + // Clone the symbol table of the old class type. + const fields = propertyClass.details.fields; + classType.details.fields.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + fields.set(name, symbol); + } + }); + + // Fill in the fdel method. + const fdelSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fdel); + fields.set('fdel', fdelSymbol); + + // Fill in the __delete__ method. + const delFunction = FunctionType.createInstance('__delete__', '', '', FunctionTypeFlags.SynthesizedMethod); + FunctionType.addParameter(delFunction, { + category: ParameterCategory.Simple, + name: 'self', + type: prop, + hasDeclaredType: true, + }); + let objType = fdel.details.parameters.length > 0 ? fdel.details.parameters[0].type : AnyType.create(); + if (isTypeVar(objType) && objType.details.isSynthesizedSelf) { + objType = evaluator.makeTopLevelTypeVarsConcrete(objType); + } + FunctionType.addParameter(delFunction, { + category: ParameterCategory.Simple, + name: 'obj', + type: combineTypes([objType, NoneType.createInstance()]), + hasDeclaredType: true, + }); + delFunction.details.declaredReturnType = NoneType.createInstance(); + const delSymbol = Symbol.createWithType(SymbolFlags.ClassMember, delFunction); + fields.set('__delete__', delSymbol); + + return propertyObject; +} + +export function canAssignProperty( + evaluator: TypeEvaluator, + destPropertyType: ClassType, + srcPropertyType: ClassType, + srcClass: ClassType, + diag: DiagnosticAddendum | undefined, + typeVarMap?: TypeVarMap, + recursionCount = 0 +): boolean { + const objectToBind = ClassType.cloneAsInstance(srcClass); + let isAssignable = true; + const accessors: { name: string; missingDiagMsg: () => string; incompatibleDiagMsg: () => string }[] = [ + { + name: 'fget', + missingDiagMsg: Localizer.DiagnosticAddendum.missingGetter, + incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleGetter, + }, + { + name: 'fset', + missingDiagMsg: Localizer.DiagnosticAddendum.missingSetter, + incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleSetter, + }, + { + name: 'fdel', + missingDiagMsg: Localizer.DiagnosticAddendum.missingDeleter, + incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleDeleter, + }, + ]; + + accessors.forEach((accessorInfo) => { + const destAccessSymbol = destPropertyType.details.fields.get(accessorInfo.name); + const destAccessType = destAccessSymbol ? evaluator.getDeclaredTypeOfSymbol(destAccessSymbol) : undefined; + + if (destAccessType && isFunction(destAccessType)) { + const srcAccessSymbol = srcPropertyType.details.fields.get(accessorInfo.name); + const srcAccessType = srcAccessSymbol ? evaluator.getDeclaredTypeOfSymbol(srcAccessSymbol) : undefined; + + if (!srcAccessType || !isFunction(srcAccessType)) { + if (diag) { + diag.addMessage(accessorInfo.missingDiagMsg()); + } + isAssignable = false; + return; + } + + const boundDestAccessType = evaluator.bindFunctionToClassOrObject( + objectToBind, + destAccessType, + /* memberClass */ undefined, + /* errorNode */ undefined, + recursionCount + ); + const boundSrcAccessType = evaluator.bindFunctionToClassOrObject( + objectToBind, + srcAccessType, + /* memberClass */ undefined, + /* errorNode */ undefined, + recursionCount + ); + + if ( + !boundDestAccessType || + !boundSrcAccessType || + !evaluator.canAssignType( + boundDestAccessType, + boundSrcAccessType, + diag?.createAddendum(), + typeVarMap, + CanAssignFlags.Default, + recursionCount + ) + ) { + if (diag) { + diag.addMessage('getter type is incompatible'); + } + isAssignable = false; + return; + } + } + }); + + return isAssignable; +} diff --git a/packages/pyright-internal/src/analyzer/scope.ts b/packages/pyright-internal/src/analyzer/scope.ts index d1b2a2e05b6e..b70a484c7700 100644 --- a/packages/pyright-internal/src/analyzer/scope.ts +++ b/packages/pyright-internal/src/analyzer/scope.ts @@ -54,8 +54,8 @@ export interface SymbolWithScope { // Indicates that the recursion needed to proceed // to a scope that is beyond the current execution - // scope. An execution scope is defined as a function - // or a module. Classes are not considered execution + // scope. An execution scope is defined as a function, + // module, or lambda. Classes are not considered execution // scopes because they are "executed" immediately as // part of the scope in which they are contained. isBeyondExecutionScope: boolean; diff --git a/packages/pyright-internal/src/analyzer/scopeUtils.ts b/packages/pyright-internal/src/analyzer/scopeUtils.ts index 3df858266632..d7d489be9db8 100644 --- a/packages/pyright-internal/src/analyzer/scopeUtils.ts +++ b/packages/pyright-internal/src/analyzer/scopeUtils.ts @@ -30,3 +30,52 @@ export function getScopeForNode(node: ParseNode): Scope | undefined { const scopeNode = getEvaluationScopeNode(node); return getScope(scopeNode); } + +// Returns a list of scopes associated with the node and its ancestor nodes. +// If stopScope is provided, the search will stop at that scope. +// Returns undefined if stopScope is not found. +export function getScopeHierarchy(node: ParseNode, stopScope?: Scope): Scope[] | undefined { + const scopeHierarchy: Scope[] = []; + let curNode: ParseNode | undefined = node; + + while (curNode) { + const curScope = getScopeForNode(curNode); + + if (!curScope) { + return undefined; + } + + if (scopeHierarchy.length === 0 || scopeHierarchy[scopeHierarchy.length - 1] !== curScope) { + scopeHierarchy.push(curScope); + } + + if (curScope === stopScope) { + return scopeHierarchy; + } + + curNode = curNode.parent; + } + + return stopScope ? undefined : scopeHierarchy; +} + +// Walks up the parse tree from the specified node to find the top-most node +// that is within specified scope. +export function findTopNodeInScope(node: ParseNode, scope: Scope): ParseNode | undefined { + let curNode: ParseNode | undefined = node; + let prevNode: ParseNode | undefined; + let foundScope = false; + + while (curNode) { + if (getScope(curNode) === scope) { + foundScope = true; + } else if (foundScope) { + return prevNode; + } + + prevNode = curNode; + curNode = curNode.parent; + } + + return undefined; +} diff --git a/packages/pyright-internal/src/analyzer/service.ts b/packages/pyright-internal/src/analyzer/service.ts index e0f40f215352..2eb46bb039d8 100644 --- a/packages/pyright-internal/src/analyzer/service.ts +++ b/packages/pyright-internal/src/analyzer/service.ts @@ -25,7 +25,7 @@ import { MarkupKind, } from 'vscode-languageserver-types'; -import { BackgroundAnalysisBase } from '../backgroundAnalysisBase'; +import { BackgroundAnalysisBase, IndexOptions } from '../backgroundAnalysisBase'; import { CancellationProvider, DefaultCancellationProvider } from '../common/cancellationUtils'; import { CommandLineOptions } from '../common/commandLineOptions'; import { ConfigOptions } from '../common/configOptions'; @@ -44,6 +44,8 @@ import { getFileSpec, getFileSystemEntries, isDirectory, + isFile, + makeDirectories, normalizePath, normalizeSlashes, stripFileExtension, @@ -52,7 +54,7 @@ import { } from '../common/pathUtils'; import { DocumentRange, Position, Range } from '../common/textRange'; import { timingStats } from '../common/timing'; -import { AbbreviationMap, CompletionOptions, CompletionResults } from '../languageService/completionProvider'; +import { AbbreviationMap, CompletionOptions, CompletionResultsList } from '../languageService/completionProvider'; import { DefinitionFilter } from '../languageService/definitionProvider'; import { IndexResults, WorkspaceSymbolCallback } from '../languageService/documentSymbolProvider'; import { HoverResults } from '../languageService/hoverProvider'; @@ -60,7 +62,7 @@ import { ReferenceCallback } from '../languageService/referencesProvider'; import { SignatureHelpResults } from '../languageService/signatureHelpProvider'; import { AnalysisCompleteCallback } from './analysis'; import { BackgroundAnalysisProgram, BackgroundAnalysisProgramFactory } from './backgroundAnalysisProgram'; -import { ImportedModuleDescriptor, ImportResolver, ImportResolverFactory } from './importResolver'; +import { createImportedModuleDescriptor, ImportResolver, ImportResolverFactory } from './importResolver'; import { MaxAnalysisTime } from './program'; import { findPythonSearchPaths } from './pythonPathUtils'; import { TypeEvaluator } from './typeEvaluatorTypes'; @@ -75,6 +77,10 @@ const _userActivityBackoffTimeInMs = 250; const _gitDirectory = normalizeSlashes('/.git/'); const _includeFileRegex = /\.pyi?$/; +// How long since the last library activity should we wait until +// re-analyzing the libraries? (10min) +const _libraryActivityBackoffTimeInMs = 60 * 1000 * 10; + export class AnalyzerService { private _hostFactory: HostFactory; private _instanceName: string; @@ -148,10 +154,10 @@ export class AnalyzerService { ); } - clone(instanceName: string, backgroundAnalysis?: BackgroundAnalysisBase): AnalyzerService { - return new AnalyzerService( + clone(instanceName: string, backgroundAnalysis?: BackgroundAnalysisBase, fs?: FileSystem): AnalyzerService { + const service = new AnalyzerService( instanceName, - this._fs, + fs ?? this._fs, this._console, this._hostFactory, this._importResolverFactory, @@ -162,6 +168,20 @@ export class AnalyzerService { this._backgroundAnalysisProgramFactory, this._cancellationProvider ); + + // Make sure we keep editor content (open file) which could be different than one in the file system. + for (const fileInfo of this.backgroundAnalysisProgram.program.getOpened()) { + const version = fileInfo.sourceFile.getClientVersion(); + if (version !== undefined) { + service.setFileOpened( + fileInfo.sourceFile.getFilePath(), + version, + fileInfo.sourceFile.getOpenFileContents()! + ); + } + } + + return service; } dispose() { @@ -187,7 +207,7 @@ export class AnalyzerService { this._backgroundAnalysisProgram.setCompletionCallback(callback); } - setOptions(commandLineOptions: CommandLineOptions, reanalyze = true): void { + setOptions(commandLineOptions: CommandLineOptions): void { this._commandLineOptions = commandLineOptions; const host = this._hostFactory(); @@ -205,17 +225,47 @@ export class AnalyzerService { this._executionRootPath = normalizePath( combinePaths(commandLineOptions.executionRoot, configOptions.projectRoot) ); - this._applyConfigOptions(host, reanalyze); + this._applyConfigOptions(host); } - setFileOpened(path: string, version: number | null, contents: string) { - this._backgroundAnalysisProgram.setFileOpened(path, version, contents, this._isTracked(path)); - this._scheduleReanalysis(false); + isTracked(filePath: string): boolean { + for (const includeSpec of this._configOptions.include) { + if (this._matchIncludeFileSpec(includeSpec.regExp, this._configOptions.exclude, filePath)) { + return true; + } + } + + return false; } - updateOpenFileContents(path: string, version: number | null, contents: TextDocumentContentChangeEvent[]) { - this._backgroundAnalysisProgram.updateOpenFileContents(path, version, contents, this._isTracked(path)); - this._scheduleReanalysis(false); + setFileOpened( + path: string, + version: number | null, + contents: string, + ipythonMode = false, + chainedFilePath?: string + ) { + this._backgroundAnalysisProgram.setFileOpened(path, version, contents, { + isTracked: this.isTracked(path), + ipythonMode, + chainedFilePath, + }); + this._scheduleReanalysis(/*requireTrackedFileUpdate*/ false); + } + + updateOpenFileContents( + path: string, + version: number | null, + contents: TextDocumentContentChangeEvent[], + ipythonMode = false, + chainedFilePath?: string + ) { + this._backgroundAnalysisProgram.updateOpenFileContents(path, version, contents, { + isTracked: this.isTracked(path), + ipythonMode, + chainedFilePath, + }); + this._scheduleReanalysis(/*requireTrackedFileUpdate*/ false); } test_setIndexing( @@ -225,8 +275,8 @@ export class AnalyzerService { this._backgroundAnalysisProgram.test_setIndexing(workspaceIndices, libraryIndices); } - startIndexing() { - this._backgroundAnalysisProgram.startIndexing(); + startIndexing(indexOptions: IndexOptions) { + this._backgroundAnalysisProgram.startIndexing(indexOptions); } setFileClosed(path: string) { @@ -272,6 +322,14 @@ export class AnalyzerService { return this._program.getDefinitionsForPosition(filePath, position, filter, token); } + getTypeDefinitionForPosition( + filePath: string, + position: Position, + token: CancellationToken + ): DocumentRange[] | undefined { + return this._program.getTypeDefinitionsForPosition(filePath, position, token); + } + reportReferencesForPosition( filePath: string, position: Position, @@ -327,7 +385,7 @@ export class AnalyzerService { options: CompletionOptions, nameMap: AbbreviationMap | undefined, token: CancellationToken - ): Promise { + ): Promise { return this._program.getCompletionsForPosition( filePath, position, @@ -369,6 +427,10 @@ export class AnalyzerService { return this._program.performQuickAction(filePath, command, args, token); } + renameModule(filePath: string, newFilePath: string, token: CancellationToken): FileEditAction[] | undefined { + return this._program.renameModule(filePath, newFilePath, token); + } + renameSymbolAtPosition( filePath: string, position: Position, @@ -512,7 +574,7 @@ export class AnalyzerService { } const configOptions = new ConfigOptions(projectRoot, this._typeCheckingMode); - const defaultExcludes = ['**/node_modules', '**/__pycache__', '.git']; + const defaultExcludes = ['**/node_modules', '**/__pycache__', '**/.*']; if (commandLineOptions.pythonPath) { this._console.info( @@ -596,6 +658,8 @@ export class AnalyzerService { configOptions.applyDiagnosticOverrides(commandLineOptions.diagnosticSeverityOverrides); } + configOptions.analyzeUnannotatedFunctions = commandLineOptions.analyzeUnannotatedFunctions ?? true; + const reportDuplicateSetting = (settingName: string, configValue: number | string | boolean) => { const settingSource = commandLineOptions.fromVsCodeExtension ? 'the client settings' @@ -719,7 +783,7 @@ export class AnalyzerService { const typingsSubdirPath = this._getTypeStubFolder(); this._program.writeTypeStub( - this._typeStubTargetPath!, + this._typeStubTargetPath ?? '', this._typeStubTargetIsSingleFile, typingsSubdirPath, token @@ -730,7 +794,7 @@ export class AnalyzerService { const typingsSubdirPath = this._getTypeStubFolder(); return this._backgroundAnalysisProgram.writeTypeStub( - this._typeStubTargetPath!, + this._typeStubTargetPath ?? '', this._typeStubTargetIsSingleFile, typingsSubdirPath, token @@ -740,9 +804,17 @@ export class AnalyzerService { // This is called after a new type stub has been created. It allows // us to invalidate caches and force reanalysis of files that potentially // are affected by the appearance of a new type stub. - invalidateAndForceReanalysis(rebuildLibraryIndexing = true) { + invalidateAndForceReanalysis( + rebuildUserFileIndexing = true, + rebuildLibraryIndexing = true, + updateTrackedFileList = false + ) { + if (updateTrackedFileList) { + this._updateTrackedFileList(/* markFilesDirtyUnconditionally */ false); + } + // Mark all files with one or more errors dirty. - this._backgroundAnalysisProgram.invalidateAndForceReanalysis(rebuildLibraryIndexing); + this._backgroundAnalysisProgram.invalidateAndForceReanalysis(rebuildUserFileIndexing, rebuildLibraryIndexing); } // Forces the service to stop all analysis, discard all its caches, @@ -796,6 +868,7 @@ export class AnalyzerService { this._console.error(errMsg); throw new Error(errMsg); } + if (!stubPath) { // We should never get here because we always generate a // default typings path if none was specified. @@ -803,6 +876,7 @@ export class AnalyzerService { this._console.info(errMsg); throw new Error(errMsg); } + const typeStubInputTargetParts = this._typeStubTargetImportName.split('.'); if (typeStubInputTargetParts[0].length === 0) { // We should never get here because the import resolution @@ -811,6 +885,7 @@ export class AnalyzerService { this._console.error(errMsg); throw new Error(errMsg); } + try { // Generate a new typings directory if necessary. if (!this._fs.existsSync(stubPath)) { @@ -821,18 +896,22 @@ export class AnalyzerService { this._console.error(errMsg); throw new Error(errMsg); } - // Generate a typings subdirectory. + + // Generate a typings subdirectory hierarchy. const typingsSubdirPath = combinePaths(stubPath, typeStubInputTargetParts[0]); + const typingsSubdirHierarchy = combinePaths(stubPath, ...typeStubInputTargetParts); + try { // Generate a new typings subdirectory if necessary. - if (!this._fs.existsSync(typingsSubdirPath)) { - this._fs.mkdirSync(typingsSubdirPath); + if (!this._fs.existsSync(typingsSubdirHierarchy)) { + makeDirectories(this._fs, typingsSubdirHierarchy, stubPath); } } catch (e: any) { - const errMsg = `Could not create typings subdirectory '${typingsSubdirPath}'`; + const errMsg = `Could not create typings subdirectory '${typingsSubdirHierarchy}'`; this._console.error(errMsg); throw new Error(errMsg); } + return typingsSubdirPath; } @@ -880,7 +959,7 @@ export class AnalyzerService { throw e; } - this._console.error(`Pyproject file "${pyprojectPath}" is missing "[tool.pyright] section.`); + this._console.error(`Pyproject file "${pyprojectPath}" is missing "[tool.pyright]" section.`); return undefined; }); } @@ -952,12 +1031,7 @@ export class AnalyzerService { // for a different set of files. if (this._typeStubTargetImportName) { const execEnv = this._configOptions.findExecEnvironment(this._executionRootPath); - const moduleDescriptor: ImportedModuleDescriptor = { - leadingDots: 0, - nameParts: this._typeStubTargetImportName.split('.'), - importedSymbols: [], - }; - + const moduleDescriptor = createImportedModuleDescriptor(this._typeStubTargetImportName); const importResult = this._backgroundAnalysisProgram.importResolver.resolveImport( '', execEnv, @@ -967,38 +1041,43 @@ export class AnalyzerService { if (importResult.isImportFound) { const filesToImport: string[] = []; - // Namespace packages resolve to a directory name, so - // don't include those. - const resolvedPath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; + // Determine the directory that contains the root package. + const finalResolvedPath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; + const isFinalPathFile = isFile(this._fs, finalResolvedPath); + const isFinalPathInitFile = + isFinalPathFile && stripFileExtension(getFileName(finalResolvedPath)) === '__init__'; + + let rootPackagePath = finalResolvedPath; + + if (isFinalPathFile) { + // If the module is a __init__.pyi? file, use its parent directory instead. + rootPackagePath = getDirectoryPath(rootPackagePath); + } - // Get the directory that contains the root package. - let targetPath = getDirectoryPath(resolvedPath); - let prevResolvedPath = resolvedPath; for (let i = importResult.resolvedPaths.length - 2; i >= 0; i--) { - const resolvedPath = importResult.resolvedPaths[i]; - if (resolvedPath) { - targetPath = getDirectoryPath(resolvedPath); - prevResolvedPath = resolvedPath; + if (importResult.resolvedPaths[i]) { + rootPackagePath = importResult.resolvedPaths[i]; } else { // If there was no file corresponding to this portion // of the name path, assume that it's contained // within its parent directory. - targetPath = getDirectoryPath(prevResolvedPath); - prevResolvedPath = targetPath; + rootPackagePath = getDirectoryPath(rootPackagePath); } } - if (isDirectory(this._fs, targetPath)) { - this._typeStubTargetPath = targetPath; + if (isDirectory(this._fs, rootPackagePath)) { + this._typeStubTargetPath = rootPackagePath; + } else if (isFile(this._fs, rootPackagePath)) { + // This can occur if there is a "dir/__init__.py" at the same level as a + // module "dir/module.py" that is specifically targeted for stub generation. + this._typeStubTargetPath = getDirectoryPath(rootPackagePath); } - if (!resolvedPath) { + if (!finalResolvedPath) { this._typeStubTargetIsSingleFile = false; } else { - filesToImport.push(resolvedPath); - this._typeStubTargetIsSingleFile = - importResult.resolvedPaths.length === 1 && - stripFileExtension(getFileName(importResult.resolvedPaths[0])) !== '__init__'; + filesToImport.push(finalResolvedPath); + this._typeStubTargetIsSingleFile = importResult.resolvedPaths.length === 1 && !isFinalPathInitFile; } // Add the implicit import paths. @@ -1202,7 +1281,16 @@ export class AnalyzerService { if (!isTemporaryFile) { // Added/deleted/renamed files impact imports, // clear the import resolver cache and reanalyze everything. - this.invalidateAndForceReanalysis(/* rebuildLibraryIndexing */ false); + // + // Here we don't need to rebuild any indexing since this kind of change can't affect + // indices. For library, since the changes are on workspace files, it won't affect library + // indices. For user file, since user file indices don't contains import alias symbols, + // it won't affect those indices. we only need to rebuild user file indices when symbols + // defined in the file are changed. ex) user modified the file. + this.invalidateAndForceReanalysis( + /* rebuildUserFileIndexing */ false, + /* rebuildLibraryIndexing */ false + ); this._scheduleReanalysis(/* requireTrackedFileUpdate */ true); } } @@ -1250,7 +1338,7 @@ export class AnalyzerService { } if (this._verboseOutput) { - this._console.info(`LibraryFile: Received fs event '${event}' for path '${path}'}'`); + this._console.info(`LibraryFile: Received fs event '${event}' for path '${path}'`); } if (isIgnored(path)) { @@ -1289,9 +1377,9 @@ export class AnalyzerService { // Invalidate import resolver, mark all files dirty unconditionally, // and reanalyze. - this.invalidateAndForceReanalysis(); + this.invalidateAndForceReanalysis(/* rebuildUserFileIndexing */ false); this._scheduleReanalysis(false); - }, 1000); + }, _libraryActivityBackoffTimeInMs); } private _removeConfigFileWatcher() { @@ -1373,7 +1461,7 @@ export class AnalyzerService { } } - private _applyConfigOptions(host: Host, reanalyze = true) { + private _applyConfigOptions(host: Host) { // Allocate a new import resolver because the old one has information // cached based on the previous config options. const importResolver = this._importResolverFactory( @@ -1400,9 +1488,7 @@ export class AnalyzerService { this._updateSourceFileWatchers(); this._updateTrackedFileList(true); - if (reanalyze) { - this._scheduleReanalysis(false); - } + this._scheduleReanalysis(false); } private _clearReanalysisTimer() { @@ -1413,7 +1499,7 @@ export class AnalyzerService { } private _scheduleReanalysis(requireTrackedFileUpdate: boolean) { - if (this._disposed) { + if (this._disposed || !this._commandLineOptions?.enableAmbientAnalysis) { // already disposed return; } @@ -1493,14 +1579,4 @@ export class AnalyzerService { return false; } - - private _isTracked(filePath: string): boolean { - for (const includeSpec of this._configOptions.include) { - if (this._matchIncludeFileSpec(includeSpec.regExp, this._configOptions.exclude, filePath)) { - return true; - } - } - - return false; - } } diff --git a/packages/pyright-internal/src/analyzer/sourceFile.ts b/packages/pyright-internal/src/analyzer/sourceFile.ts index 677d340d022f..31666eb3953d 100644 --- a/packages/pyright-internal/src/analyzer/sourceFile.ts +++ b/packages/pyright-internal/src/analyzer/sourceFile.ts @@ -28,6 +28,7 @@ import { TextEditAction } from '../common/editAction'; import { FileSystem } from '../common/fileSystem'; import { LogTracker } from '../common/logTracker'; import { getFileName, normalizeSlashes, stripFileExtension } from '../common/pathUtils'; +import { convertOffsetsToRange } from '../common/positionUtils'; import * as StringUtils from '../common/stringUtils'; import { DocumentRange, getEmptyRange, Position, TextRange } from '../common/textRange'; import { TextRangeCollection } from '../common/textRangeCollection'; @@ -43,7 +44,7 @@ import { performQuickAction } from '../languageService/quickActions'; import { ReferenceCallback, ReferencesProvider, ReferencesResult } from '../languageService/referencesProvider'; import { SignatureHelpProvider, SignatureHelpResults } from '../languageService/signatureHelpProvider'; import { Localizer } from '../localization/localize'; -import { ModuleNode } from '../parser/parseNodes'; +import { ModuleNode, NameNode } from '../parser/parseNodes'; import { ModuleImport, ParseOptions, Parser, ParseResults } from '../parser/parser'; import { Token } from '../parser/tokenizerTypes'; import { AnalyzerFileInfo, ImportLookup } from './analyzerFileInfo'; @@ -64,15 +65,14 @@ import { TypeEvaluator } from './typeEvaluatorTypes'; // Limit the number of import cycles tracked per source file. const _maxImportCyclesPerFile = 4; -// Allow files up to 32MB in length. -const _maxSourceFileSize = 32 * 1024 * 1024; +// Allow files up to 50MB in length, same as VS Code. +// https://github.com/microsoft/vscode/blob/1e750a7514f365585d8dab1a7a82e0938481ea2f/src/vs/editor/common/model/textModel.ts#L194 +const _maxSourceFileSize = 50 * 1024 * 1024; interface ResolveImportResult { imports: ImportResult[]; builtinsImportResult?: ImportResult | undefined; - typingModulePath?: string | undefined; - typeshedModulePath?: string | undefined; - collectionsModulePath?: string | undefined; + ipythonDisplayImportResult?: ImportResult | undefined; } export class SourceFile { @@ -146,8 +146,8 @@ export class SourceFile { private _parseDiagnostics: Diagnostic[] = []; private _bindDiagnostics: Diagnostic[] = []; private _checkerDiagnostics: Diagnostic[] = []; - private _typeIgnoreLines: { [line: number]: boolean } = {}; - private _typeIgnoreAll = false; + private _typeIgnoreLines = new Map(); + private _typeIgnoreAll: TextRange | undefined; // Settings that control which diagnostics should be output. private _diagnosticRuleSet = getBasicDiagnosticRuleSet(); @@ -167,12 +167,13 @@ export class SourceFile { // Do we need to perform an indexing step? private _indexingNeeded = true; + // Indicate whether this file is for ipython or not. + private _ipythonMode = false; + // Information about implicit and explicit imports from this file. private _imports: ImportResult[] | undefined; private _builtinsImport: ImportResult | undefined; - private _typingModulePath: string | undefined; - private _typeshedModulePath: string | undefined; - private _collectionsModulePath: string | undefined; + private _ipythonDisplayImport: ImportResult | undefined; private _logTracker: LogTracker; readonly fileSystem: FileSystem; @@ -184,7 +185,8 @@ export class SourceFile { isThirdPartyImport: boolean, isThirdPartyPyTypedPresent: boolean, console?: ConsoleInterface, - logTracker?: LogTracker + logTracker?: LogTracker, + ipythonMode = false ) { this.fileSystem = fs; this._console = console || new StandardConsole(); @@ -219,6 +221,7 @@ export class SourceFile { // 'FG' or 'BG' based on current thread. this._logTracker = logTracker ?? new LogTracker(console, isMainThread() ? 'FG' : 'BG'); + this._ipythonMode = ipythonMode; } getFilePath(): string { @@ -253,16 +256,18 @@ export class SourceFile { includeWarningsAndErrors = false; } - let diagList: Diagnostic[] = []; - diagList = diagList.concat(this._parseDiagnostics, this._bindDiagnostics, this._checkerDiagnostics); + let diagList = [...this._parseDiagnostics, ...this._bindDiagnostics, ...this._checkerDiagnostics]; + const prefilteredDiagList = diagList; + const typeIgnoreLinesClone = new Map(this._typeIgnoreLines); // Filter the diagnostics based on "type: ignore" lines. - if (options.diagnosticRuleSet.enableTypeIgnoreComments) { - if (Object.keys(this._typeIgnoreLines).length > 0) { + if (this._diagnosticRuleSet.enableTypeIgnoreComments) { + if (this._typeIgnoreLines.size > 0) { diagList = diagList.filter((d) => { - if (d.category !== DiagnosticCategory.UnusedCode) { + if (d.category !== DiagnosticCategory.UnusedCode && d.category !== DiagnosticCategory.Deprecated) { for (let line = d.range.start.line; line <= d.range.end.line; line++) { - if (this._typeIgnoreLines[line]) { + if (this._typeIgnoreLines.has(line)) { + typeIgnoreLinesClone.delete(line); return false; } } @@ -273,8 +278,51 @@ export class SourceFile { } } - if (options.diagnosticRuleSet.reportImportCycles !== 'none' && this._circularDependencies.length > 0) { - const category = convertLevelToCategory(options.diagnosticRuleSet.reportImportCycles); + const unnecessaryTypeIgnoreDiags: Diagnostic[] = []; + + if (this._diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment !== 'none') { + const diagCategory = convertLevelToCategory(this._diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment); + + const prefilteredErrorList = prefilteredDiagList.filter( + (diag) => + diag.category === DiagnosticCategory.Error || + diag.category === DiagnosticCategory.Warning || + diag.category === DiagnosticCategory.Information + ); + + if (prefilteredErrorList.length === 0 && this._typeIgnoreAll !== undefined) { + unnecessaryTypeIgnoreDiags.push( + new Diagnostic( + diagCategory, + Localizer.Diagnostic.unnecessaryTypeIgnore(), + convertOffsetsToRange( + this._typeIgnoreAll.start, + this._typeIgnoreAll.start + this._typeIgnoreAll.length, + this._parseResults!.tokenizerOutput.lines! + ) + ) + ); + } + + typeIgnoreLinesClone.forEach((textRange) => { + if (this._parseResults?.tokenizerOutput.lines) { + unnecessaryTypeIgnoreDiags.push( + new Diagnostic( + diagCategory, + Localizer.Diagnostic.unnecessaryTypeIgnore(), + convertOffsetsToRange( + textRange.start, + textRange.start + textRange.length, + this._parseResults!.tokenizerOutput.lines! + ) + ) + ); + } + }); + } + + if (this._diagnosticRuleSet.reportImportCycles !== 'none' && this._circularDependencies.length > 0) { + const category = convertLevelToCategory(this._diagnosticRuleSet.reportImportCycles); this._circularDependencies.forEach((cirDep) => { diagList.push( @@ -308,18 +356,29 @@ export class SourceFile { } // If there is a "type: ignore" comment at the top of the file, clear - // the diagnostic list. - if (options.diagnosticRuleSet.enableTypeIgnoreComments) { - if (this._typeIgnoreAll) { - diagList = []; + // the diagnostic list of all error, warning, and information diagnostics. + if (this._diagnosticRuleSet.enableTypeIgnoreComments) { + if (this._typeIgnoreAll !== undefined) { + diagList = diagList.filter( + (diag) => + diag.category !== DiagnosticCategory.Error && + diag.category !== DiagnosticCategory.Warning && + diag.category !== DiagnosticCategory.Information + ); } } + // Now add in the "unnecessary type ignore" diagnostics. + diagList.push(...unnecessaryTypeIgnoreDiags); + // If we're not returning any diagnostics, filter out all of // the errors and warnings, leaving only the unreachable code - // diagnostics. + // and deprecated diagnostics. if (!includeWarningsAndErrors) { - diagList = diagList.filter((diag) => diag.category === DiagnosticCategory.UnusedCode); + diagList = diagList.filter( + (diag) => + diag.category === DiagnosticCategory.UnusedCode || diag.category === DiagnosticCategory.Deprecated + ); } return diagList; @@ -333,6 +392,10 @@ export class SourceFile { return this._builtinsImport; } + getIPythonDisplayImport(): ImportResult | undefined { + return this._ipythonDisplayImport; + } + getModuleSymbolTable(): SymbolTable | undefined { return this._moduleSymbolTable; } @@ -381,16 +444,16 @@ export class SourceFile { this._isBindingNeeded = true; } - markDirty(): void { + markDirty(indexingNeeded = true): void { this._fileContentsVersion++; this._isCheckingNeeded = true; this._isBindingNeeded = true; - this._indexingNeeded = true; + this._indexingNeeded = indexingNeeded; this._moduleSymbolTable = undefined; this._cachedIndexResults = undefined; } - markReanalysisRequired(): void { + markReanalysisRequired(forceRebinding: boolean): void { // Keep the parse info, but reset the analysis to the beginning. this._isCheckingNeeded = true; @@ -399,13 +462,15 @@ export class SourceFile { if (this._parseResults) { if ( this._parseResults.containsWildcardImport || - AnalyzerNodeInfo.getDunderAllInfo(this._parseResults.parseTree) !== undefined + AnalyzerNodeInfo.getDunderAllInfo(this._parseResults.parseTree) !== undefined || + forceRebinding ) { + // We don't need to rebuild index data since wildcard + // won't affect user file indices. User file indices + // don't contain import alias info. this._parseTreeNeedsCleaning = true; this._isBindingNeeded = true; - this._indexingNeeded = true; this._moduleSymbolTable = undefined; - this._cachedIndexResults = undefined; } } } @@ -414,10 +479,35 @@ export class SourceFile { return this._clientDocument?.version; } - getFileContents() { + getOpenFileContents() { return this._clientDocument?.getText(); } + getFileContent(): string | undefined { + // Get current buffer content if the file is opened. + const openFileContent = this.getOpenFileContents(); + if (openFileContent) { + return openFileContent; + } + + // Otherwise, get content from file system. + try { + // Check the file's length before attempting to read its full contents. + const fileStat = this.fileSystem.statSync(this._filePath); + if (fileStat.size > _maxSourceFileSize) { + this._console.error( + `File length of "${this._filePath}" is ${fileStat.size} ` + + `which exceeds the maximum supported file size of ${_maxSourceFileSize}` + ); + throw new Error('File larger than max'); + } + + return this.fileSystem.readFileSync(this._filePath, 'utf8'); + } catch (error) { + return undefined; + } + } + setClientVersion(version: number | null, contents: TextDocumentContentChangeEvent[]): void { if (version === null) { this._clientDocument = undefined; @@ -437,6 +527,7 @@ export class SourceFile { this._lastFileContentLength = fileContents.length; this._lastFileContentHash = contentsHash; + this._isFileDeleted = false; } } @@ -522,23 +613,16 @@ export class SourceFile { } const diagSink = new DiagnosticSink(); - let fileContents = this.getFileContents(); + let fileContents = this.getOpenFileContents(); if (fileContents === undefined) { try { const startTime = timingStats.readFileTime.totalTime; timingStats.readFileTime.timeOperation(() => { - // Check the file's length before attempting to read its full contents. - const fileStat = this.fileSystem.statSync(this._filePath); - if (fileStat.size > _maxSourceFileSize) { - this._console.error( - `File length of "${this._filePath}" is ${fileStat.size} ` + - `which exceeds the maximum supported file size of ${_maxSourceFileSize}` - ); - throw new Error('File larger than max'); - } - // Read the file's contents. - fileContents = content ?? this.fileSystem.readFileSync(this._filePath, 'utf8'); + fileContents = content ?? this.getFileContent(); + if (fileContents === undefined) { + throw new Error("Can't get file content"); + } // Remember the length and hash for comparison purposes. this._lastFileContentLength = fileContents.length; @@ -560,6 +644,7 @@ export class SourceFile { const execEnvironment = configOptions.findExecEnvironment(this._filePath); const parseOptions = new ParseOptions(); + parseOptions.ipythonMode = this._ipythonMode; if (this._filePath.endsWith('pyi')) { parseOptions.isStubFile = true; } @@ -585,9 +670,7 @@ export class SourceFile { this._imports = importResult.imports; this._builtinsImport = importResult.builtinsImportResult; - this._typingModulePath = importResult.typingModulePath; - this._typeshedModulePath = importResult.typeshedModulePath; - this._collectionsModulePath = importResult.collectionsModulePath; + this._ipythonDisplayImport = importResult.ipythonDisplayImportResult; this._parseDiagnostics = diagSink.fetchAndClear(); }); @@ -620,16 +703,18 @@ export class SourceFile { tokenizerOutput: { tokens: new TextRangeCollection([]), lines: new TextRangeCollection([]), - typeIgnoreAll: false, - typeIgnoreLines: {}, + typeIgnoreAll: undefined, + typeIgnoreLines: new Map(), predominantEndOfLineSequence: '\n', predominantTabSequence: ' ', predominantSingleQuoteCharacter: "'", }, containsWildcardImport: false, + typingSymbolAliases: new Map(), }; this._imports = undefined; this._builtinsImport = undefined; + this._ipythonDisplayImport = undefined; const diagSink = new DiagnosticSink(); diagSink.addError( @@ -700,6 +785,43 @@ export class SourceFile { ); } + getTypeDefinitionsForPosition( + sourceMapper: SourceMapper, + position: Position, + evaluator: TypeEvaluator, + filePath: string, + token: CancellationToken + ): DocumentRange[] | undefined { + // If we have no completed analysis job, there's nothing to do. + if (!this._parseResults) { + return undefined; + } + + return DefinitionProvider.getTypeDefinitionsForPosition( + sourceMapper, + this._parseResults, + position, + evaluator, + filePath, + token + ); + } + + getDeclarationForNode( + sourceMapper: SourceMapper, + node: NameNode, + evaluator: TypeEvaluator, + reporter: ReferenceCallback | undefined, + token: CancellationToken + ): ReferencesResult | undefined { + // If we have no completed analysis job, there's nothing to do. + if (!this._parseResults) { + return undefined; + } + + return ReferencesProvider.getDeclarationForNode(sourceMapper, this._filePath, node, evaluator, reporter, token); + } + getDeclarationForPosition( sourceMapper: SourceMapper, position: Position, @@ -847,7 +969,7 @@ export class SourceFile { // This command should be called only for open files, in which // case we should have the file contents already loaded. - const fileContents = this.getFileContents(); + const fileContents = this.getOpenFileContents(); if (fileContents === undefined) { return undefined; } @@ -888,7 +1010,7 @@ export class SourceFile { completionItem: CompletionItem, token: CancellationToken ) { - const fileContents = this.getFileContents(); + const fileContents = this.getOpenFileContents(); if (!this._parseResults || fileContents === undefined) { return; } @@ -933,10 +1055,10 @@ export class SourceFile { } bind(configOptions: ConfigOptions, importLookup: ImportLookup, builtinsScope: Scope | undefined) { - assert(!this.isParseRequired()); - assert(this.isBindingRequired()); - assert(!this._isBindingInProgress); - assert(this._parseResults !== undefined); + assert(!this.isParseRequired(), 'Bind called before parsing'); + assert(this.isBindingRequired(), 'Bind called unnecessarily'); + assert(!this._isBindingInProgress, 'Bind called while binding in progress'); + assert(this._parseResults !== undefined, 'Parse results not available'); return this._logTracker.log(`binding: ${this._getPathForLogging(this._filePath)}`, () => { try { @@ -965,7 +1087,7 @@ export class SourceFile { this._bindDiagnostics = fileInfo.diagnosticSink.fetchAndClear(); const moduleScope = AnalyzerNodeInfo.getScope(this._parseResults!.parseTree); - assert(moduleScope !== undefined); + assert(moduleScope !== undefined, 'Module scope not returned by binder'); this._moduleSymbolTable = moduleScope!.symbolTable; }); } catch (e: any) { @@ -999,11 +1121,11 @@ export class SourceFile { } check(evaluator: TypeEvaluator) { - assert(!this.isParseRequired()); - assert(!this.isBindingRequired()); - assert(!this._isBindingInProgress); - assert(this.isCheckingRequired()); - assert(this._parseResults !== undefined); + assert(!this.isParseRequired(), 'Check called before parsing'); + assert(!this.isBindingRequired(), 'Check called before binding'); + assert(!this._isBindingInProgress, 'Check called while binding in progress'); + assert(this.isCheckingRequired(), 'Check called unnecessarily'); + assert(this._parseResults !== undefined, 'Parse results not available'); return this._logTracker.log(`checking: ${this._getPathForLogging(this._filePath)}`, () => { try { @@ -1048,27 +1170,29 @@ export class SourceFile { }); } + test_enableIPythonMode(enable: boolean) { + this._ipythonMode = enable; + } + private _buildFileInfo( configOptions: ConfigOptions, fileContents: string, importLookup: ImportLookup, builtinsScope?: Scope ) { - assert(this._parseResults !== undefined); + assert(this._parseResults !== undefined, 'Parse results not available'); const analysisDiagnostics = new TextRangeDiagnosticSink(this._parseResults!.tokenizerOutput.lines); const fileInfo: AnalyzerFileInfo = { importLookup, futureImports: this._parseResults!.futureImports, builtinsScope, - typingModulePath: this._typingModulePath, - typeshedModulePath: this._typeshedModulePath, - collectionsModulePath: this._collectionsModulePath, diagnosticSink: analysisDiagnostics, executionEnvironment: configOptions.findExecEnvironment(this._filePath), diagnosticRuleSet: this._diagnosticRuleSet, fileContents, lines: this._parseResults!.tokenizerOutput.lines, + typingSymbolAliases: this._parseResults!.typingSymbolAliases, filePath: this._filePath, moduleName: this._moduleName, isStubFile: this._isStubFile, @@ -1076,6 +1200,7 @@ export class SourceFile { isTypingExtensionsStubFile: this._isTypingExtensionsStubFile, isBuiltInStubFile: this._isBuiltInStubFile, isInPyTypedPackage: this._isThirdPartyPyTypedPresent, + isIPythonMode: this._ipythonMode, accessedSymbolMap: new Map(), }; return fileInfo; @@ -1098,57 +1223,42 @@ export class SourceFile { ): ResolveImportResult { const imports: ImportResult[] = []; - // Always include an implicit import of the builtins module. - let builtinsImportResult: ImportResult | undefined = importResolver.resolveImport(this._filePath, execEnv, { - leadingDots: 0, - nameParts: ['builtins'], - importedSymbols: undefined, - }); + const resolveAndAddIfNotSelf = (nameParts: string[], skipMissingImport = false) => { + const importResult = importResolver.resolveImport(this._filePath, execEnv, { + leadingDots: 0, + nameParts, + importedSymbols: undefined, + }); - // Avoid importing builtins from the builtins.pyi file itself. - if ( - builtinsImportResult.resolvedPaths.length === 0 || - builtinsImportResult.resolvedPaths[0] !== this.getFilePath() - ) { - imports.push(builtinsImportResult); - } else { - builtinsImportResult = undefined; - } + if (skipMissingImport && !importResult.isImportFound) { + return undefined; + } - // Always include an implicit import of the typing module. - const typingImportResult: ImportResult | undefined = importResolver.resolveImport(this._filePath, execEnv, { - leadingDots: 0, - nameParts: ['typing'], - importedSymbols: undefined, - }); + // Avoid importing module from the module file itself. + if (importResult.resolvedPaths.length === 0 || importResult.resolvedPaths[0] !== this._filePath) { + imports.push(importResult); + return importResult; + } - // Avoid importing typing from the typing.pyi file itself. - let typingModulePath: string | undefined; - if ( - typingImportResult.resolvedPaths.length === 0 || - typingImportResult.resolvedPaths[0] !== this.getFilePath() - ) { - imports.push(typingImportResult); - typingModulePath = typingImportResult.resolvedPaths[0]; - } + return undefined; + }; - // Always include an implicit import of the _typeshed module. - const typeshedImportResult: ImportResult | undefined = importResolver.resolveImport(this._filePath, execEnv, { - leadingDots: 0, - nameParts: ['_typeshed'], - importedSymbols: undefined, - }); + // Always include an implicit import of the builtins module. + let builtinsImportResult: ImportResult | undefined; - let typeshedModulePath: string | undefined; - if ( - typeshedImportResult.resolvedPaths.length === 0 || - typeshedImportResult.resolvedPaths[0] !== this.getFilePath() - ) { - imports.push(typeshedImportResult); - typeshedModulePath = typeshedImportResult.resolvedPaths[0]; + // If this is a project source file (not a stub), try to resolve + // the __builtins__ stub first. + if (!this._isThirdPartyImport && !this._isStubFile) { + builtinsImportResult = resolveAndAddIfNotSelf(['__builtins__'], /*skipMissingImport*/ true); } - let collectionsModulePath: string | undefined; + if (!builtinsImportResult) { + builtinsImportResult = resolveAndAddIfNotSelf(['builtins']); + } + + const ipythonDisplayImportResult = this._ipythonMode + ? resolveAndAddIfNotSelf(['IPython', 'display']) + : undefined; for (const moduleImport of moduleImports) { const importResult = importResolver.resolveImport(this._filePath, execEnv, { @@ -1157,15 +1267,6 @@ export class SourceFile { importedSymbols: moduleImport.importedSymbols, }); - // If the file imports the stdlib 'collections' module, stash - // away its file path. The type analyzer may need this to - // access types defined in the collections module. - if (importResult.isImportFound && importResult.isTypeshedFile) { - if (moduleImport.nameParts.length >= 1 && moduleImport.nameParts[0] === 'collections') { - collectionsModulePath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; - } - } - imports.push(importResult); // Associate the import results with the module import @@ -1177,9 +1278,7 @@ export class SourceFile { return { imports, builtinsImportResult, - typingModulePath, - typeshedModulePath, - collectionsModulePath, + ipythonDisplayImportResult, }; } diff --git a/packages/pyright-internal/src/analyzer/sourceMapper.ts b/packages/pyright-internal/src/analyzer/sourceMapper.ts index dfea8cb84d85..0e4beb53125f 100644 --- a/packages/pyright-internal/src/analyzer/sourceMapper.ts +++ b/packages/pyright-internal/src/analyzer/sourceMapper.ts @@ -20,7 +20,7 @@ import { isClassDeclaration, isFunctionDeclaration, isParameterDeclaration, - isSpecialBuiltInClassDeclarations, + isSpecialBuiltInClassDeclaration, isVariableDeclaration, ParameterDeclaration, SpecialBuiltInClassDeclaration, @@ -45,7 +45,8 @@ export class SourceMapper { private _evaluator: TypeEvaluator, private _fileBinder: ShadowFileBinder, private _boundSourceGetter: BoundSourceGetter, - private _mapCompiled: boolean + private _mapCompiled: boolean, + private _preferStubs: boolean ) {} findModules(stubFilePath: string): ModuleNode[] { @@ -62,7 +63,7 @@ export class SourceMapper { return this._findVariableDeclarations(stubDecl); } else if (isParameterDeclaration(stubDecl)) { return this._findParameterDeclarations(stubDecl); - } else if (isSpecialBuiltInClassDeclarations(stubDecl)) { + } else if (isSpecialBuiltInClassDeclaration(stubDecl)) { return this._findSpecialBuiltInClassDeclarations(stubDecl); } @@ -243,7 +244,7 @@ export class SourceMapper { variableName, (decl, cache, result) => { if (isVariableDeclaration(decl)) { - if (this._isStubFile(decl.path)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.path)) { for (const implDecl of this._findVariableDeclarations(decl, cache)) { if (isVariableDeclaration(implDecl)) { result.push(implDecl); @@ -282,7 +283,7 @@ export class SourceMapper { functionName, (decl, cache, result) => { if (isFunctionDeclaration(decl)) { - if (this._isStubFile(decl.path)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.path)) { result.push(...this._findFunctionOrTypeAliasDeclarations(decl, cache)); } else { result.push(decl); @@ -424,7 +425,7 @@ export class SourceMapper { recursiveDeclCache: Set ) { if (isVariableDeclaration(decl)) { - if (this._isStubFile(decl.path)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.path)) { result.push(...this._findVariableDeclarations(decl, recursiveDeclCache)); } else { result.push(decl); @@ -447,13 +448,13 @@ export class SourceMapper { recursiveDeclCache: Set ) { if (isClassDeclaration(decl)) { - if (this._isStubFile(decl.path)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.path)) { result.push(...this._findClassOrTypeAliasDeclarations(decl, recursiveDeclCache)); } else { result.push(decl); } } else if (isFunctionDeclaration(decl)) { - if (this._isStubFile(decl.path)) { + if (this._isStubThatShouldBeMappedToImplementation(decl.path)) { result.push(...this._findFunctionOrTypeAliasDeclarations(decl, recursiveDeclCache)); } else { result.push(decl); @@ -493,30 +494,22 @@ export class SourceMapper { result: ClassOrFunctionOrVariableDeclaration[], recursiveDeclCache: Set ) { - const importResult = this._importResolver.resolveImport(originated, this._execEnv, { - leadingDots: 0, - nameParts: type.details.moduleName.split('.'), - importedSymbols: [], - }); - - if (importResult.isImportFound && importResult.resolvedPaths.length > 0) { - const filePath = importResult.resolvedPaths[importResult.resolvedPaths.length - 1]; - const sourceFiles = this._getSourceFiles(filePath); - - const fullClassName = type.details.fullName.substring( - type.details.moduleName.length + 1 /* +1 for trailing dot */ - ); + const filePath = type.details.filePath; + const sourceFiles = this._getSourceFiles(filePath); - for (const sourceFile of sourceFiles) { - result.push(...this._findClassDeclarationsByName(sourceFile, fullClassName, recursiveDeclCache)); - } + const fullClassName = type.details.fullName.substring( + type.details.moduleName.length + 1 /* +1 for trailing dot */ + ); + + for (const sourceFile of sourceFiles) { + result.push(...this._findClassDeclarationsByName(sourceFile, fullClassName, recursiveDeclCache)); } } private _getSourceFiles(filePath: string) { const sourceFiles: SourceFile[] = []; - if (this._isStubFile(filePath)) { + if (this._isStubThatShouldBeMappedToImplementation(filePath)) { sourceFiles.push(...this._getBoundSourceFilesFromStubFile(filePath)); } else { const sourceFile = this._boundSourceGetter(filePath); @@ -634,7 +627,11 @@ export class SourceMapper { return paths.map((fp) => this._fileBinder(stubFilePath, fp)).filter(isDefined); } - private _isStubFile(filePath: string): boolean { + private _isStubThatShouldBeMappedToImplementation(filePath: string): boolean { + if (this._preferStubs) { + return false; + } + const stub = isStubFile(filePath); if (!stub) { return false; diff --git a/packages/pyright-internal/src/analyzer/staticExpressions.ts b/packages/pyright-internal/src/analyzer/staticExpressions.ts index d30338920123..939e37468545 100644 --- a/packages/pyright-internal/src/analyzer/staticExpressions.ts +++ b/packages/pyright-internal/src/analyzer/staticExpressions.ts @@ -20,6 +20,10 @@ export function evaluateStaticBoolExpression( typingImportAliases?: string[], sysImportAliases?: string[] ): boolean | undefined { + if (node.nodeType === ParseNodeType.AssignmentExpression) { + return evaluateStaticBoolExpression(node.rightExpression, execEnv, typingImportAliases, sysImportAliases); + } + if (node.nodeType === ParseNodeType.UnaryOperation) { if (node.operator === OperatorType.Or || node.operator === OperatorType.And) { const value = evaluateStaticBoolLikeExpression( @@ -156,11 +160,15 @@ function _convertTupleToVersion(node: TupleNode): number | undefined { ) { const majorVersion = node.expressions[0]; const minorVersion = node.expressions[1]; - comparisonVersion = majorVersion.value * 256 + minorVersion.value; + if (typeof majorVersion.value === 'number' && typeof minorVersion.value === 'number') { + comparisonVersion = majorVersion.value * 256 + minorVersion.value; + } } } else if (node.expressions.length === 1) { const majorVersion = node.expressions[0] as NumberNode; - comparisonVersion = majorVersion.value * 256; + if (typeof majorVersion.value === 'number') { + comparisonVersion = majorVersion.value * 256; + } } return comparisonVersion; @@ -168,10 +176,13 @@ function _convertTupleToVersion(node: TupleNode): number | undefined { function _evaluateNumericBinaryOperation( operatorType: OperatorType, - leftValue: number | undefined, - rightValue: number | undefined + leftValue: number | bigint | undefined, + rightValue: number | bigint | undefined ): any | undefined { if (leftValue !== undefined && rightValue !== undefined) { + leftValue = BigInt(leftValue); + rightValue = BigInt(rightValue); + if (operatorType === OperatorType.LessThan) { return leftValue < rightValue; } else if (operatorType === OperatorType.LessThanOrEqual) { diff --git a/packages/pyright-internal/src/analyzer/symbol.ts b/packages/pyright-internal/src/analyzer/symbol.ts index 4b6ba40aaf55..0469abcadc14 100644 --- a/packages/pyright-internal/src/analyzer/symbol.ts +++ b/packages/pyright-internal/src/analyzer/symbol.ts @@ -49,6 +49,9 @@ export const enum SymbolFlags { // Indicates that the symbol is a private import in a py.typed module. PrivatePyTypedImport = 1 << 9, + + // Indicates that the symbol is an InitVar as specified in PEP 557. + InitVar = 1 << 10, } let nextSymbolId = 1; @@ -132,6 +135,14 @@ export class Symbol { return !!(this._flags & SymbolFlags.ClassVar); } + setIsInitVar() { + this._flags |= SymbolFlags.InitVar; + } + + isInitVar() { + return !!(this._flags & SymbolFlags.InitVar); + } + setIsInDunderAll() { this._flags |= SymbolFlags.InDunderAll; } diff --git a/packages/pyright-internal/src/analyzer/testWalker.ts b/packages/pyright-internal/src/analyzer/testWalker.ts index 52962d449386..6062828e817a 100644 --- a/packages/pyright-internal/src/analyzer/testWalker.ts +++ b/packages/pyright-internal/src/analyzer/testWalker.ts @@ -64,7 +64,7 @@ export class TestWalker extends ParseTreeWalker { if (!skipCheck) { // Make sure the child is contained within the parent. if (child.start < node.start || TextRange.getEnd(child) > TextRange.getEnd(node)) { - fail(`Child node ${child.nodeType} is not ` + `contained within its parent ${node.nodeType}`); + fail(`Child node ${child.nodeType} is not contained within its parent ${node.nodeType}`); } if (prevNode) { // Make sure the child is after the previous child. @@ -92,7 +92,7 @@ export class NameTypeWalker extends ParseTreeWalker { override visitName(node: NameNode) { if (node.parent?.nodeType !== ParseNodeType.ImportFromAs && node.parent?.nodeType !== ParseNodeType.ImportAs) { - if (this._evaluator.isNodeReachable(node)) { + if (this._evaluator.isNodeReachable(node, /* sourceNode */ undefined)) { this._evaluator.getType(node); } } diff --git a/packages/pyright-internal/src/analyzer/tracePrinter.ts b/packages/pyright-internal/src/analyzer/tracePrinter.ts index e5c879a2db79..c7e21174aca1 100644 --- a/packages/pyright-internal/src/analyzer/tracePrinter.ts +++ b/packages/pyright-internal/src/analyzer/tracePrinter.ts @@ -10,6 +10,7 @@ import { isNumber, isString } from '../common/core'; import { assertNever } from '../common/debug'; import { ensureTrailingDirectorySeparator, stripFileExtension } from '../common/pathUtils'; import { isExpressionNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { AbsoluteModuleDescriptor } from './analyzerFileInfo'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; import { Declaration, DeclarationType } from './declaration'; import * as ParseTreeUtils from './parseTreeUtils'; @@ -20,7 +21,7 @@ export type PrintableType = ParseNode | Declaration | Symbol | Type | undefined; export interface TracePrinter { print(o: PrintableType): string; - printFileOrModuleName(filePath: string): string; + printFileOrModuleName(filePathOrModule: string | AbsoluteModuleDescriptor): string; } export function createTracePrinter(roots: string[]): TracePrinter { @@ -37,16 +38,20 @@ export function createTracePrinter(roots: string[]): TracePrinter { .reverse(); const separatorRegExp = /[\\/]/g; - function printFileOrModuleName(filePath: string | undefined) { - if (filePath) { - for (const root of roots) { - if (filePath.startsWith(root)) { - const subFile = filePath.substring(root.length); - return stripFileExtension(subFile).replace(separatorRegExp, '.'); + function printFileOrModuleName(filePathOrModule: string | AbsoluteModuleDescriptor | undefined) { + if (filePathOrModule) { + if (typeof filePathOrModule === 'string') { + for (const root of roots) { + if (filePathOrModule.startsWith(root)) { + const subFile = filePathOrModule.substring(root.length); + return stripFileExtension(subFile).replace(separatorRegExp, '.'); + } } - } - return filePath; + return filePathOrModule; + } else { + return filePathOrModule.nameParts.join('.'); + } } return ''; } diff --git a/packages/pyright-internal/src/analyzer/typeCache.ts b/packages/pyright-internal/src/analyzer/typeCache.ts index 0689d6d5c2f6..7dcfdd173752 100644 --- a/packages/pyright-internal/src/analyzer/typeCache.ts +++ b/packages/pyright-internal/src/analyzer/typeCache.ts @@ -40,6 +40,9 @@ export interface IncompleteType { // attempting to compute a type. recursiveVisitCount?: number; + // Number of times this type has been evaluated as the same. + recursiveConvergenceCount?: number; + // Tracks whether something has changed since this cache entry // was written that might change the incomplete type; if this // doesn't match the global "incomplete generation count", this diff --git a/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts b/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts index a5bea7c9f15a..d23d075332be 100644 --- a/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts +++ b/packages/pyright-internal/src/analyzer/typeDocStringUtils.ts @@ -100,12 +100,12 @@ export function getFunctionDocStringInherited( } } - return docString; + return docString || type.details.docString; } export function getOverloadedFunctionDocStringsInherited( type: OverloadedFunctionType, - resolvedDecl: Declaration | undefined, + resolvedDecls: Declaration[], sourceMapper: SourceMapper, evaluator: TypeEvaluator, classType?: ClassType @@ -116,9 +116,11 @@ export function getOverloadedFunctionDocStringsInherited( // they typically not helpful (and object's __init__ doc causes issues // with our current docstring traversal). if (!isInheritedFromBuiltin(type, classType)) { - docStrings = _getOverloadedFunctionDocStrings(type, resolvedDecl, sourceMapper); - if (docStrings && docStrings.length > 0) { - return docStrings; + for (const resolvedDecl of resolvedDecls) { + docStrings = _getOverloadedFunctionDocStrings(type, resolvedDecl, sourceMapper); + if (docStrings && docStrings.length > 0) { + return docStrings; + } } } diff --git a/packages/pyright-internal/src/analyzer/typeEvaluator.ts b/packages/pyright-internal/src/analyzer/typeEvaluator.ts index b88eb24a13c2..e4c0e171f1ed 100644 --- a/packages/pyright-internal/src/analyzer/typeEvaluator.ts +++ b/packages/pyright-internal/src/analyzer/typeEvaluator.ts @@ -24,7 +24,7 @@ import { AddMissingOptionalToParamAction, DiagnosticAddendum } from '../common/d import { DiagnosticRule } from '../common/diagnosticRules'; import { convertOffsetsToRange } from '../common/positionUtils'; import { PythonVersion } from '../common/pythonVersion'; -import { getEmptyRange, TextRange } from '../common/textRange'; +import { TextRange } from '../common/textRange'; import { Localizer } from '../localization/localize'; import { ArgumentCategory, @@ -47,6 +47,7 @@ import { IndexNode, isExpressionNode, LambdaNode, + ListComprehensionForIfNode, ListComprehensionNode, ListNode, MatchNode, @@ -73,37 +74,23 @@ import { KeywordType, OperatorType, StringTokenFlags } from '../parser/tokenizer import * as DeclarationUtils from './aliasDeclarationUtils'; import { AnalyzerFileInfo, ImportLookup } from './analyzerFileInfo'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; +import { CodeFlowAnalyzer, FlowNodeTypeResult, getCodeFlowEngine } from './codeFlowEngine'; import { CodeFlowReferenceExpressionNode, createKeyForReference, - createKeysForReferenceSubexpressions, - FlowAssignment, - FlowAssignmentAlias, - FlowBranchLabel, - FlowCall, - FlowCondition, - FlowExhaustedMatch, - FlowFlags, - FlowLabel, - FlowNarrowForPattern, FlowNode, - FlowPostContextManagerLabel, - FlowPostFinally, - FlowPreFinallyGate, - FlowVariableAnnotation, - FlowWildcardImport, isCodeFlowSupportedForReference, -} from './codeFlow'; +} from './codeFlowTypes'; +import { applyConstructorTransform } from './constructorTransform'; import { + applyDataClassClassBehaviorOverrides, applyDataClassDecorator, applyDataClassDefaultBehaviors, - applyDataClassMetaclassBehaviorOverrides, getDataclassDecoratorBehaviors, synthesizeDataClassMethods, validateDataClassTransformDecorator, } from './dataClasses'; import { - AliasDeclaration, ClassDeclaration, Declaration, DeclarationType, @@ -111,27 +98,33 @@ import { ModuleLoaderActions, VariableDeclaration, } from './declaration'; -import { isExplicitTypeAliasDeclaration, isPossibleTypeAliasDeclaration } from './declarationUtils'; +import { + createSynthesizedAliasDeclaration, + getDeclarationsWithUsesLocalNameRemoved, + isExplicitTypeAliasDeclaration, + isFinalVariableDeclaration, + isPossibleTypeAliasDeclaration, +} from './declarationUtils'; +import { applyFunctionTransform } from './functionTransform'; import { createNamedTupleType } from './namedTuples'; import * as ParseTreeUtils from './parseTreeUtils'; import { assignTypeToPatternTargets, narrowTypeBasedOnPattern } from './patternMatching'; -import { Scope, ScopeType } from './scope'; +import { + canAssignProperty, + clonePropertyWithDeleter, + clonePropertyWithSetter, + createProperty, + validatePropertyMethod, +} from './properties'; +import { Scope, ScopeType, SymbolWithScope } from './scope'; import * as ScopeUtils from './scopeUtils'; import { evaluateStaticBoolExpression } from './staticExpressions'; import { indeterminateSymbolId, Symbol, SymbolFlags } from './symbol'; -import { isConstantName, isPrivateOrProtectedName, isSingleDunderName } from './symbolNameUtils'; +import { isConstantName, isPrivateName, isPrivateOrProtectedName, isSingleDunderName } from './symbolNameUtils'; import { getLastTypedDeclaredForSymbol, isFinalVariable } from './symbolUtils'; +import { CachedType, IncompleteTypeTracker, isIncompleteType, SpeculativeTypeTracker, TypeCache } from './typeCache'; import { - CachedType, - IncompleteSubtypeInfo, - IncompleteType, - IncompleteTypeTracker, - isIncompleteType, - SpeculativeTypeTracker, - TypeCache, -} from './typeCache'; -import { - canAssignToTypedDict, + assignToTypedDict, canAssignTypedDict, createTypedDictType, getTypedDictMembersForClass, @@ -147,13 +140,14 @@ import { EffectiveTypeResult, EvaluatorFlags, EvaluatorUsage, + ExpectedTypeResult, FunctionArgument, FunctionTypeResult, + TypeArgumentResult, TypeEvaluator, TypeResult, ValidateArgTypeParams, } from './typeEvaluatorTypes'; -import { getTypeNarrowingCallback } from './typeGuards'; import * as TypePrinter from './typePrinter'; import { AnyType, @@ -175,15 +169,18 @@ import { isInstantiableClass, isModule, isNever, - isNone, + isNoneInstance, + isNoneTypeClass, isOverloadedFunction, isParamSpec, - isPossiblyUnbound, isTypeSame, isTypeVar, isUnbound, isUnion, isUnknown, + isUnpacked, + isUnpackedClass, + isUnpackedVariadicTypeVar, isVariadicTypeVar, LiteralValue, maxTypeRecursionCount, @@ -194,14 +191,14 @@ import { ParamSpecEntry, removeNoneFromUnion, removeUnbound, - removeUnknownFromUnion, + TupleTypeArgument, Type, TypeBase, TypeCategory, TypeCondition, TypedDictEntry, - TypeSourceId, TypeVarScopeId, + TypeVarScopeType, TypeVarType, UnboundType, UnionType, @@ -216,46 +213,48 @@ import { areTypesSame, buildTypeVarMapFromSpecializedClass, CanAssignFlags, - canBeFalsy, - canBeTruthy, ClassMember, ClassMemberLookupFlags, combineSameSizedTuples, computeMroLinearization, containsLiteralType, containsUnknown, + convertParamSpecValueToType, convertToInstance, convertToInstantiable, derivesFromClassRecursive, doForEachSubtype, explodeGenericClass, getDeclaredGeneratorReturnType, - getDeclaredGeneratorSendType, getGeneratorTypeArgs, + getLiteralTypeClassName, + getParameterListDetails, getSpecializedTupleType, getTypeCondition, getTypeVarArgumentsRecursive, getTypeVarScopeId, + getUnionSubtypeCount, + isEffectivelyInstantiable, isEllipsisType, isLiteralType, isLiteralTypeOrUnion, - isNoReturnType, - isOpenEndedTupleClass, isOptionalType, isPartlyUnknown, isProperty, isTupleClass, isTypeAliasPlaceholder, isTypeAliasRecursive, + isUnboundedTupleClass, isUnionableType, lookUpClassMember, lookUpObjectMember, mapSubtypes, + ParameterListDetails, + ParameterSource, partiallySpecializeType, - removeFalsinessFromType, - removeNoReturnFromUnion, + populateTypeVarMapForSelfType, + removeParamSpecVariadicsFromFunction, removeParamSpecVariadicsFromSignature, - removeTruthinessFromType, requiresSpecialization, requiresTypeArguments, setTypeArgumentsRecursive, @@ -266,6 +265,7 @@ import { synthesizeTypeVarForSelfCls, transformExpectedTypeForConstructor, transformPossibleRecursiveTypeAlias, + VirtualParameterDetails, } from './typeUtils'; import { TypeVarMap } from './typeVarMap'; @@ -305,6 +305,17 @@ const enum MemberAccessFlags { // Do not include the class itself, only base classes. SkipOriginalClass = 1 << 7, + + // Do not include the "type" base class in the search. + SkipTypeBaseClass = 1 << 8, +} + +interface ValidateTypeArgsOptions { + allowEmptyTuple?: boolean; + allowVariadicTypeVar?: boolean; + allowParamSpec?: boolean; + allowTypeArgList?: boolean; + allowUnpackedTuples?: boolean; } interface EffectiveTypeCacheEntry { @@ -314,25 +325,52 @@ interface EffectiveTypeCacheEntry { } interface MatchArgsToParamsResult { + overload: FunctionType; + overloadIndex: number; + argumentErrors: boolean; + isTypeIncomplete: boolean; argParams: ValidateArgTypeParams[]; activeParam?: FunctionParameter | undefined; paramSpecTarget?: TypeVarType | undefined; paramSpecArgList?: FunctionArgument[] | undefined; + + // A higher relevance means that it should be considered + // first, before lower relevance overloads. + relevance: number; } interface ArgResult { isCompatible: boolean; isTypeIncomplete?: boolean | undefined; + skippedOverloadArg?: boolean; } interface ClassMemberLookup { - // Type of value. + symbol: Symbol | undefined; + + // Type of symbol. type: Type; isTypeIncomplete: boolean; // True if class member, false otherwise. isClassMember: boolean; + + // The class that declares the accessed member. + classType?: ClassType | UnknownType; + + // True if the member is explicitly declared as ClassVar + // within a Protocol. + isClassVar: boolean; + + // Is member a descriptor object that is asymmetric with respect + // to __get__ and __set__ types? + isAsymmetricDescriptor: boolean; +} + +export interface DescriptorTypeResult { + type: Type; + isAsymmetricDescriptor: boolean; } interface AliasMapEntry { @@ -347,10 +385,24 @@ interface ParamAssignmentInfo { } interface CallResult { + // Specialized return type of call returnType?: Type | undefined; + + // Is return type incomplete? isTypeIncomplete?: boolean | undefined; + + // Were any errors discovered when evaluating argument types? argumentErrors: boolean; + + // The parameter associated with the "active" argument (used + // for signature help provider) activeParam?: FunctionParameter | undefined; + + // If the call is to an __init__ with an annotated self parameter, + // this field indicates the specialized type of that self type; this + // is used for overloaded constructors where the arguments to the + // constructor influence the specialized type of the constructed object. + specializedInitSelfType?: Type | undefined; } // Maps binary operators to the magic methods that implement them. @@ -419,26 +471,6 @@ const typePromotions: Map = new Map([ ['builtins.bytes', ['builtins.bytearray', 'builtins.memoryview']], ]); -interface CodeFlowAnalyzer { - getTypeFromCodeFlow: ( - flowNode: FlowNode, - reference: CodeFlowReferenceExpressionNode | undefined, - targetSymbolId: number | undefined, - initialType: Type | undefined, - isInitialTypeIncomplete: boolean - ) => FlowNodeTypeResult; -} - -interface FlowNodeTypeResult { - type: Type | undefined; - usedOuterScopeAlias: boolean; - isIncomplete: boolean; - generationCount?: number | undefined; - incompleteType?: Type | undefined; - incompleteSubtypes?: IncompleteSubtypeInfo[] | undefined; - recursiveVisitCount?: number; -} - interface SymbolResolutionStackEntry { // The symbol ID and declaration being resolved. symbolId: number; @@ -458,11 +490,16 @@ interface ReturnTypeInferenceContext { codeFlowAnalyzer: CodeFlowAnalyzer; } +interface ProtocolAssignmentStackEntry { + srcType: ClassType; + destType: ClassType; +} + // How many levels deep should we attempt to infer return // types based on call-site argument types? The deeper we go, // the more types we may be able to infer, but the worse the // performance. -const maxReturnTypeInferenceStackSize = 3; +const maxReturnTypeInferenceStackSize = 2; // What is the max number of input arguments we should allow // for call-site return type inference? We've found that large, @@ -470,6 +507,11 @@ const maxReturnTypeInferenceStackSize = 3; // analyze. const maxReturnTypeInferenceArgumentCount = 6; +// What is the max complexity of the code flow graph that +// we will analyze to determine the return type of a function +// when its parameters are unannotated? +const maxReturnTypeInferenceCodeFlowComplexity = 15; + // How many entries in a list, set, or dict should we examine // when inferring the type? We need to cut it off at some point // to avoid excessive computation. @@ -483,44 +525,45 @@ const maxSubtypesForInferredType = 64; // when resolving an overload. const maxOverloadUnionExpansionCount = 64; -// Maximum number of times a loop flow node will be evaluated -// with incomplete results before we give up. -const maxFlowNodeLoopVisitCount = 64; - -// Maximum number of times getTypeFromFlowNode can be called -// recursively within loop or branch processing before we give up. -const maxCodeFlowInvocationsPerLoop = 16 * 1024; +// This switch enables a special debug mode that attempts to catch +// bugs due to inconsistent evaluation flags used when reading types +// from the type cache. +const verifyTypeCacheEvaluatorFlags = false; export interface EvaluatorOptions { - disableInferenceForPyTypedSources: boolean; printTypeFlags: TypePrinter.PrintTypeFlags; logCalls: boolean; minimumLoggingThreshold: number; + analyzeUnannotatedFunctions: boolean; + evaluateUnknownImportsAsAny: boolean; + verifyTypeCacheEvaluatorFlags: boolean; } export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions: EvaluatorOptions): TypeEvaluator { const symbolResolutionStack: SymbolResolutionStackEntry[] = []; - const isReachableRecursionMap = new Map(); const functionRecursionMap = new Map(); - const callIsNoReturnCache = new Map(); - const isExceptionContextManagerCache = new Map(); const codeFlowAnalyzerCache = new Map(); const typeCache: TypeCache = new Map(); + const typeCacheFlags = new Map(); + const asymmetricDescriptorAssignmentCache = new Set(); + const expectedTypeCache = new Map(); const speculativeTypeTracker = new SpeculativeTypeTracker(); const effectiveTypeCache = new Map(); const suppressedNodeStack: ParseNode[] = []; const incompleteTypeTracker = new IncompleteTypeTracker(); + const protocolAssignmentStack: ProtocolAssignmentStackEntry[] = []; let cancellationToken: CancellationToken | undefined; - let codeFlowInvocations = 0; - let flowIncompleteGeneration = 1; let isBasicTypesInitialized = false; let noneType: Type | undefined; + let unionType: Type | undefined; let objectType: Type | undefined; let typeClassType: Type | undefined; + let functionObj: Type | undefined; let tupleClassType: Type | undefined; let boolClassType: Type | undefined; let strClassType: Type | undefined; let dictClassType: Type | undefined; + let typedDictClassType: Type | undefined; let incompleteTypeCache: TypeCache | undefined; const returnTypeInferenceContextStack: ReturnTypeInferenceContext[] = []; @@ -545,7 +588,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return typeCache.size; } - function readTypeCache(node: ParseNode): Type | undefined { + function isTypeCached(node: ParseNode) { + let cachedType: CachedType | undefined; + + if (returnTypeInferenceTypeCache && isNodeInReturnTypeInferenceContext(node)) { + cachedType = returnTypeInferenceTypeCache.get(node.id); + } else { + cachedType = typeCache.get(node.id); + } + + return cachedType !== undefined; + } + + function readTypeCache(node: ParseNode, flags: EvaluatorFlags | undefined): Type | undefined { let cachedType: CachedType | undefined; // Should we use a temporary cache associated with a contextual @@ -560,6 +615,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return undefined; } + if (evaluatorOptions.verifyTypeCacheEvaluatorFlags || verifyTypeCacheEvaluatorFlags) { + if (flags !== undefined) { + const expectedFlags = typeCacheFlags.get(node.id); + if (expectedFlags !== undefined && flags !== expectedFlags) { + fail( + `Type cache flag mismatch for node type ${node.nodeType}: ` + + `cached flags = ${expectedFlags}, access flags = ${flags}` + ); + } + } + } + assert(!isIncompleteType(cachedType)); return cachedType as Type; } @@ -567,6 +634,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function writeTypeCache( node: ParseNode, type: Type, + flags: EvaluatorFlags | undefined, isIncomplete: boolean, expectedType?: Type, allowSpeculativeCaching = false @@ -587,6 +655,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeCacheToUse.set(node.id, type); + if (evaluatorOptions.verifyTypeCacheEvaluatorFlags || verifyTypeCacheEvaluatorFlags) { + if (typeCacheToUse === typeCache && flags !== undefined) { + typeCacheFlags.set(node.id, flags); + } + } + // If the entry is located within a part of the parse tree that is currently being // "speculatively" evaluated, track it so we delete the cached entry when we leave // this speculative context. @@ -609,6 +683,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeCacheToUse.delete(node.id); } + function setTypeForNode(node: ParseNode, type: Type = UnknownType.create(), flags = EvaluatorFlags.None) { + writeTypeCache(node, type, flags, /* isIncomplete */ false); + } + + function setAsymmetricDescriptorAssignment(node: ParseNode) { + if (speculativeTypeTracker.isSpeculative(undefined)) { + return; + } + + asymmetricDescriptorAssignmentCache.add(node.id); + } + + function isAsymmetricDescriptorAssignment(node: ParseNode) { + return asymmetricDescriptorAssignmentCache.has(node.id); + } + // Determines whether the specified node is contained within // the function node corresponding to the function that we // are currently analyzing in the context of parameter types @@ -694,6 +784,28 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions })?.type; } + // Determines the expected type of a specified node based on surrounding + // context. For example, if it's a subexpression of an argument expression, + // the associated parameter type might inform the expected type. + function getExpectedType(node: ExpressionNode): ExpectedTypeResult | undefined { + evaluateTypesForExpressionInContext(node); + + let curNode: ParseNode | undefined = node; + while (curNode !== undefined) { + const expectedType = expectedTypeCache.get(curNode.id); + if (expectedType) { + return { + type: expectedType, + node: curNode, + }; + } + + curNode = curNode.parent; + } + + return undefined; + } + function initializedBasicTypes(node: ParseNode) { if (!isBasicTypesInitialized) { // Some of these types have cyclical dependencies on each other, @@ -702,6 +814,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions objectType = getBuiltInObject(node, 'object'); typeClassType = getBuiltInType(node, 'type'); + functionObj = getBuiltInObject(node, 'function'); // Initialize and cache "Collection" to break a cyclical dependency // that occurs when resolving tuple below. @@ -712,12 +825,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions boolClassType = getBuiltInType(node, 'bool'); strClassType = getBuiltInType(node, 'str'); dictClassType = getBuiltInType(node, 'dict'); + typedDictClassType = getTypingType(node, '_TypedDict'); } } function getTypeOfExpression(node: ExpressionNode, expectedType?: Type, flags = EvaluatorFlags.None): TypeResult { // Is this type already cached? - const cachedType = readTypeCache(node); + const cachedType = readTypeCache(node, flags); if (cachedType) { return { type: cachedType, node }; } else { @@ -755,7 +869,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Cache the type information in the member name node as well. if (!isTypeAliasPlaceholder(typeResult.type)) { - writeTypeCache(node.memberName, typeResult.type, !!typeResult.isIncomplete); + writeTypeCache(node.memberName, typeResult.type, flags, !!typeResult.isIncomplete); } break; } @@ -798,23 +912,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions (flags & EvaluatorFlags.EvaluateStringLiteralAsType) !== 0 && !isAnnotationLiteralValue(node); if (expectingType) { + let updatedFlags = flags | EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.ExpectingType; + + // In most cases, annotations within a string are not parsed by the interpreter. + // There are a few exceptions (e.g. the "bound" value for a TypeVar constructor). + if ((flags & EvaluatorFlags.InterpreterParsesStringLiteral) === 0) { + updatedFlags |= EvaluatorFlags.NotParsedByInterpreter; + } + if (node.typeAnnotation) { - typeResult = getTypeOfExpression( - node.typeAnnotation, - undefined, - flags | EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.ExpectingType - ); + typeResult = getTypeOfExpression(node.typeAnnotation, undefined, updatedFlags); } else if (!node.typeAnnotation && node.strings.length === 1) { // We didn't know at parse time that this string node was going // to be evaluated as a forward-referenced type. We need // to re-invoke the parser at this stage. const expr = parseStringAsTypeAnnotation(node); if (expr) { - typeResult = getTypeOfExpression( - expr, - undefined, - flags | EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.ExpectingType - ); + typeResult = getTypeOfExpression(expr, /* expectedType */ undefined, updatedFlags); } } @@ -920,11 +1034,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } case ParseNodeType.Await: { - typeResult = getTypeOfExpression(node.expression, undefined, flags); + const effectiveExpectedType = expectedType + ? createAwaitableReturnType(node, expectedType, /* isGenerator */ false) + : undefined; + + const exprTypeResult = getTypeOfExpression(node.expression, effectiveExpectedType, flags); typeResult = { - type: getTypeFromAwaitable(typeResult.type, node.expression), + type: getTypeFromAwaitable(exprTypeResult.type, node.expression), node, }; + + if (exprTypeResult.isIncomplete) { + typeResult.isIncomplete = true; + } break; } @@ -955,7 +1077,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeResult.type, /* isTypeIncomplete */ false, node.rightExpression, - /* ignoreEmptyContainers */ true + /* ignoreEmptyContainers */ true, + /* allowAssignmentToFinalVar */ true ); break; } @@ -997,7 +1120,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const iterType = getTypeOfExpression(node.expression, iterExpectedType, flags).type; + const iterTypeResult = getTypeOfExpression(node.expression, iterExpectedType, flags); + const iterType = iterTypeResult.type; if ( (flags & EvaluatorFlags.TypeVarTupleDisallowed) === 0 && isVariadicTypeVar(iterType) && @@ -1005,8 +1129,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { typeResult = { type: TypeVarType.cloneForUnpacked(iterType), node }; } else { - const type = getTypeFromIterator(iterType, /* isAsync */ false, node) || UnknownType.create(); - typeResult = { type, unpackedType: iterType, node }; + if ( + (flags & EvaluatorFlags.AllowUnpackedTupleOrTypeVarTuple) !== 0 && + isInstantiableClass(iterType) && + ClassType.isBuiltIn(iterType, 'tuple') + ) { + typeResult = { type: ClassType.cloneForUnpacked(iterType), node }; + } else { + const type = getTypeFromIterator(iterType, /* isAsync */ false, node) || UnknownType.create(); + typeResult = { type, unpackedType: iterType, node, isIncomplete: iterTypeResult.isIncomplete }; + } } break; } @@ -1063,6 +1195,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!isEmptyVariadic) { addExpectedClassDiagnostic(typeResult.type, node); + typeResult.type = UnknownType.create(); } } } @@ -1080,10 +1213,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions writeTypeCache( node, typeResult.type, + flags, !!typeResult.isIncomplete, expectedType, /* allowSpeculativeCaching */ true ); + + if (expectedType && !isAnyOrUnknown(expectedType) && !isNever(expectedType)) { + expectedTypeCache.set(node.id, expectedType); + } } return typeResult; @@ -1097,6 +1235,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); } + function getTypeOfParameterAnnotation(paramTypeNode: ExpressionNode, paramCategory: ParameterCategory) { + return getTypeOfAnnotation(paramTypeNode, { + associateTypeVarsWithScope: true, + allowTypeVarTuple: paramCategory === ParameterCategory.VarArgList, + disallowRecursiveTypeAlias: true, + allowUnpackedTypedDict: paramCategory === ParameterCategory.VarArgDictionary, + allowUnpackedTuple: paramCategory === ParameterCategory.VarArgList, + }); + } + function getTypeOfAnnotation(node: ExpressionNode, options?: AnnotationTypeOptions): Type { const fileInfo = AnalyzerNodeInfo.getFileInfo(node); @@ -1113,8 +1261,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions EvaluatorFlags.ExpectingType | EvaluatorFlags.ExpectingTypeAnnotation | EvaluatorFlags.ConvertEllipsisToAny | - EvaluatorFlags.EvaluateStringLiteralAsType | - EvaluatorFlags.ParamSpecDisallowed; + EvaluatorFlags.EvaluateStringLiteralAsType; if (options?.isVariableAnnotation) { evaluatorFlags |= EvaluatorFlags.VariableTypeAnnotation; @@ -1130,6 +1277,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!options?.allowTypeVarTuple) { evaluatorFlags |= EvaluatorFlags.TypeVarTupleDisallowed; + } else { + evaluatorFlags |= EvaluatorFlags.AllowUnpackedTupleOrTypeVarTuple; + } + + if (!options?.allowParamSpec) { + evaluatorFlags |= EvaluatorFlags.ParamSpecDisallowed; } if (options?.associateTypeVarsWithScope) { @@ -1142,6 +1295,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions evaluatorFlags |= EvaluatorFlags.DisallowRecursiveTypeAliasPlaceholder; } + if (options?.allowUnpackedTypedDict) { + evaluatorFlags |= EvaluatorFlags.AllowUnpackedTypedDict; + } + + if (options?.allowUnpackedTuple) { + evaluatorFlags |= EvaluatorFlags.AllowUnpackedTupleOrTypeVarTuple; + } + + if (options?.notParsedByInterpreter) { + evaluatorFlags |= EvaluatorFlags.NotParsedByInterpreter; + } + if (isAnnotationEvaluationPostponed(fileInfo)) { evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; } @@ -1149,31 +1314,41 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If the annotation is part of a comment, allow forward references // even if it's not enclosed in quotes. if (node?.parent?.nodeType === ParseNodeType.Assignment && node.parent.typeAnnotationComment === node) { - evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; + evaluatorFlags |= EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.NotParsedByInterpreter; } else if (node?.parent?.nodeType === ParseNodeType.FunctionAnnotation) { if (node.parent.returnTypeAnnotation === node || node.parent.paramTypeAnnotations.some((n) => n === node)) { - evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; + evaluatorFlags |= EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.NotParsedByInterpreter; } } else if (node?.parent?.nodeType === ParseNodeType.Parameter) { if (node.parent.typeAnnotationComment === node) { - evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; + evaluatorFlags |= EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.NotParsedByInterpreter; } } - const classType = getTypeOfExpression(node, /* expectedType */ undefined, evaluatorFlags).type; + const annotationType = getTypeOfExpression(node, /* expectedType */ undefined, evaluatorFlags).type; + + if (isModule(annotationType)) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.moduleAsType(), + node + ); + } - return convertToInstance(classType); + return convertToInstance(annotationType); } function getTypeFromDecorator(node: DecoratorNode, functionOrClassType: Type): Type { - // Evaluate the type of the decorator expression. Do not specialize - // if it's not a call expression because it could evaluate to a generic - // class that we are instantiating. - const decoratorTypeResult = getTypeOfExpression( - node.expression, - /* expectedType */ undefined, - node.expression.nodeType === ParseNodeType.Call ? EvaluatorFlags.None : EvaluatorFlags.DoNotSpecialize - ); + // Evaluate the type of the decorator expression. + let flags = AnalyzerNodeInfo.getFileInfo(node).isStubFile + ? EvaluatorFlags.AllowForwardReferences + : EvaluatorFlags.None; + if (node.expression.nodeType !== ParseNodeType.Call) { + flags |= EvaluatorFlags.DoNotSpecialize; + } + + const decoratorTypeResult = getTypeOfExpression(node.expression, /* expectedType */ undefined, flags); // Special-case the combination of a classmethod decorator applied // to a property. This is allowed in Python 3.9, but it's not reflected @@ -1243,6 +1418,219 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return returnType; } + function canBeFalsy(type: Type, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Unbound: + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.Never: + case TypeCategory.None: { + return true; + } + + case TypeCategory.Union: { + return findSubtype(type, (subtype) => canBeFalsy(subtype, recursionCount)) !== undefined; + } + + case TypeCategory.Function: + case TypeCategory.OverloadedFunction: + case TypeCategory.Module: + case TypeCategory.TypeVar: { + return false; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(type)) { + return false; + } + + // Handle tuples specially. + if (isTupleClass(type) && type.tupleTypeArguments) { + return isUnboundedTupleClass(type) || type.tupleTypeArguments.length === 0; + } + + // Check for Literal[False] and Literal[True]. + if (ClassType.isBuiltIn(type, 'bool') && type.literalValue !== undefined) { + return type.literalValue === false; + } + + // If this is a protocol class, don't make any assumptions about the absence + // of specific methods. These could be provided by a class that conforms + // to the protocol. + if (ClassType.isProtocolClass(type)) { + return true; + } + + const lenMethod = lookUpObjectMember(type, '__len__'); + if (lenMethod) { + return true; + } + + const boolMethod = lookUpObjectMember(type, '__bool__'); + if (boolMethod) { + const boolMethodType = getTypeOfMember(boolMethod); + + // If the __bool__ function unconditionally returns True, it can never be falsy. + if (isFunction(boolMethodType) && boolMethodType.details.declaredReturnType) { + const returnType = boolMethodType.details.declaredReturnType; + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'bool') && + returnType.literalValue === true + ) { + return false; + } + } + + return true; + } + + return false; + } + } + } + + function canBeTruthy(type: Type, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return true; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Unknown: + case TypeCategory.Function: + case TypeCategory.OverloadedFunction: + case TypeCategory.Module: + case TypeCategory.TypeVar: + case TypeCategory.Never: + case TypeCategory.Any: { + return true; + } + + case TypeCategory.Union: { + return findSubtype(type, (subtype) => canBeTruthy(subtype, recursionCount)) !== undefined; + } + + case TypeCategory.Unbound: + case TypeCategory.None: { + return false; + } + + case TypeCategory.Class: { + if (TypeBase.isInstantiable(type)) { + return true; + } + + // Check for Tuple[()] (an empty tuple). + if (isTupleClass(type)) { + if (type.tupleTypeArguments && type.tupleTypeArguments!.length === 0) { + return false; + } + } + + // Check for Literal[False], Literal[0], Literal[""]. + if ( + type.literalValue === false || + type.literalValue === 0 || + type.literalValue === BigInt(0) || + type.literalValue === '' + ) { + return false; + } + + // If this is a protocol class, don't make any assumptions about the absence + // of specific methods. These could be provided by a class that conforms + // to the protocol. + if (ClassType.isProtocolClass(type)) { + return true; + } + + const boolMethod = lookUpObjectMember(type, '__bool__'); + if (boolMethod) { + const boolMethodType = getTypeOfMember(boolMethod); + + // If the __bool__ function unconditionally returns False, it can never be truthy. + if (isFunction(boolMethodType) && boolMethodType.details.declaredReturnType) { + const returnType = boolMethodType.details.declaredReturnType; + if ( + isClassInstance(returnType) && + ClassType.isBuiltIn(returnType, 'bool') && + returnType.literalValue === false + ) { + return false; + } + } + } + + return true; + } + } + } + + // Filters a type such that that no part of it is definitely + // truthy. For example, if a type is a union of None + // and a custom class "Foo" that has no __len__ or __nonzero__ + // method, this method would strip off the "Foo" + // and return only the "None". + function removeTruthinessFromType(type: Type): Type { + return mapSubtypes(type, (subtype) => { + if (isClassInstance(subtype)) { + if (subtype.literalValue !== undefined) { + // If the object is already definitely falsy, it's fine to + // include, otherwise it should be removed. + return !subtype.literalValue ? subtype : undefined; + } + + // If the object is a bool, make it "false", since + // "true" is a truthy value. + if (ClassType.isBuiltIn(subtype, 'bool')) { + return ClassType.cloneWithLiteral(subtype, /* value */ false); + } + } + + // If it's possible for the type to be falsy, include it. + if (canBeFalsy(subtype)) { + return subtype; + } + + return undefined; + }); + } + + // Filters a type such that that no part of it is definitely + // falsy. For example, if a type is a union of None + // and an "int", this method would strip off the "None" + // and return only the "int". + function removeFalsinessFromType(type: Type): Type { + return mapSubtypes(type, (subtype) => { + if (isClassInstance(subtype)) { + if (subtype.literalValue !== undefined) { + // If the object is already definitely truthy, it's fine to + // include, otherwise it should be removed. + return subtype.literalValue ? subtype : undefined; + } + + // If the object is a bool, make it "true", since + // "false" is a falsy value. + if (ClassType.isBuiltIn(subtype, 'bool')) { + return ClassType.cloneWithLiteral(subtype, /* value */ true); + } + } + + // If it's possible for the type to be truthy, include it. + if (canBeTruthy(subtype)) { + return subtype; + } + + return undefined; + }); + } + // Gets a member type from an object and if it's a function binds // it to the object. If bindToClass is undefined, the binding is done // using the objectType parameter. Callers can specify these separately @@ -1253,7 +1641,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions objectType: ClassType, memberName: string, usage: EvaluatorUsage = { method: 'get' }, - diag: DiagnosticAddendum = new DiagnosticAddendum(), + diag: DiagnosticAddendum | undefined = undefined, memberAccessFlags = MemberAccessFlags.None, bindToType?: ClassType | TypeVarType ): TypeResult | undefined { @@ -1268,7 +1656,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); if (memberInfo) { - return { node: errorNode, type: memberInfo.type, isIncomplete: !!memberInfo.isTypeIncomplete }; + return { + node: errorNode, + type: memberInfo.type, + isIncomplete: !!memberInfo.isTypeIncomplete, + isAsymmetricDescriptor: memberInfo.isAsymmetricDescriptor, + }; } return undefined; } @@ -1280,7 +1673,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType: ClassType, memberName: string, usage: EvaluatorUsage = { method: 'get' }, - diag: DiagnosticAddendum = new DiagnosticAddendum(), + diag: DiagnosticAddendum | undefined = undefined, memberAccessFlags = MemberAccessFlags.None, bindToType?: ClassType | TypeVarType ): TypeResult | undefined { @@ -1308,6 +1701,29 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); } + // If this is a protocol class X and we're accessing a non ClassVar, + // emit an error. + if ( + memberInfo && + memberInfo.classType && + memberInfo.symbol && + isClass(memberInfo.classType) && + ClassType.isProtocolClass(memberInfo.classType) + ) { + const primaryDecl = getLastTypedDeclaredForSymbol(memberInfo.symbol); + if (primaryDecl && primaryDecl.type === DeclarationType.Variable && !memberInfo.isClassVar) { + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.protocolMemberNotClassVar().format({ + memberName, + className: memberInfo.classType.details.name, + }), + errorNode + ); + } + } + // If it wasn't found on the class, see if it's part of the metaclass. if (!memberInfo) { const metaclass = classType.details.effectiveMetaclass; @@ -1317,7 +1733,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions metaclass, memberName, usage, - new DiagnosticAddendum(), + /* diag */ undefined, memberAccessFlags, classType ); @@ -1325,8 +1741,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (memberInfo) { - return { node: errorNode, type: memberInfo.type, isIncomplete: !!memberInfo.isTypeIncomplete }; + return { + node: errorNode, + type: memberInfo.type, + isIncomplete: !!memberInfo.isTypeIncomplete, + isAsymmetricDescriptor: memberInfo.isAsymmetricDescriptor, + }; } + return undefined; } @@ -1490,13 +1912,34 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // more type information than __new__. methodType = getBoundMethod(subtype, '__init__'); - if ( - !methodType || - (isFunction(methodType) && FunctionType.isSkipConstructorCheck(methodType)) - ) { - // If there was no __init__ method, use the __new__ method - // instead. - methodType = getBoundMethod(subtype, '__new__', /* treatConstructorAsClassMember */ true); + // Is this the __init__ method provided by the object class? + const isObjectInit = + !!methodType && + isFunction(methodType) && + methodType.details.fullName === 'builtins.object.__init__'; + const isSkipConstructor = + !!methodType && isFunction(methodType) && FunctionType.isSkipConstructorCheck(methodType); + + // If there was no `__init__` or the only `__init__` that was found + // was form the `object` class, see if we can find a better `__new__` + // method. + if (!methodType || isObjectInit || isSkipConstructor) { + const constructorType = getBoundMethod( + subtype, + '__new__', + /* treatConstructorAsClassMember */ true + ); + + if (constructorType) { + // Is this the __new__ method provided by the object class? + const isObjectNew = + isFunction(constructorType) && + constructorType.details.fullName === 'builtins.object.__new__'; + + if (!isObjectNew) { + methodType = constructorType; + } + } } if (methodType) { @@ -1548,6 +1991,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let symbol: Symbol | undefined; let classOrObjectBase: ClassType | undefined; let memberAccessClass: Type | undefined; + let bindFunction = true; switch (expression.nodeType) { case ParseNodeType.Name: { @@ -1587,7 +2031,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } case ParseNodeType.MemberAccess: { - const baseType = makeTopLevelTypeVarsConcrete(getTypeOfExpression(expression.leftExpression).type); + const baseType = makeTopLevelTypeVarsConcrete( + getTypeOfExpression( + expression.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type + ); let classMemberInfo: ClassMember | undefined; if (isClassInstance(baseType)) { @@ -1598,6 +2048,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); classOrObjectBase = baseType; memberAccessClass = classMemberInfo?.classType; + + // If this is an instance member (e.g. a dataclass field), don't + // bind it to the object if it's a function. + if (classMemberInfo?.isInstanceMember) { + bindFunction = false; + } } else if (isInstantiableClass(baseType)) { classMemberInfo = lookUpClassMember( baseType, @@ -1674,12 +2130,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (isFunction(declaredType) || isOverloadedFunction(declaredType)) { - declaredType = bindFunctionToClassOrObject( - classOrObjectBase, - declaredType, - /* memberClass */ undefined, - expression - ); + if (bindFunction) { + declaredType = bindFunctionToClassOrObject( + classOrObjectBase, + declaredType, + /* memberClass */ undefined, + expression + ); + } } } @@ -1691,10 +2149,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Applies an "await" operation to the specified type and returns - // the result. According to PEP 492, await operates on: - // 1) a generator object - // 2) an Awaitable (object that provides an __await__ that - // returns a generator object) + // the result. According to PEP 492, await operates on an Awaitable + // (object that provides an __await__ that returns a generator object). // If errorNode is undefined, no errors are reported. function getTypeFromAwaitable(type: Type, errorNode?: ParseNode): Type { return mapSubtypes(type, (subtype) => { @@ -1702,11 +2158,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return subtype; } - const generatorReturnType = getReturnTypeFromGenerator(subtype); - if (generatorReturnType) { - return generatorReturnType; - } - if (isClassInstance(subtype)) { const awaitReturnType = getSpecializedReturnType(subtype, '__await__', errorNode); if (awaitReturnType) { @@ -1780,7 +2231,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions subtype.tupleTypeArguments && subtype.tupleTypeArguments.length === 0 ) { - return NeverType.create(); + return NeverType.createNever(); } iterReturnType = getSpecializedReturnType(subtype, iterMethodName, errorNode); @@ -1936,21 +2387,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } function getTypingType(node: ParseNode, symbolName: string): Type | undefined { - const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - return getTypeFromTypeshedModule(symbolName, fileInfo.typingModulePath); + return getTypeFromModule(node, symbolName, ['typing']); } function getTypeshedType(node: ParseNode, symbolName: string): Type | undefined { - const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - return getTypeFromTypeshedModule(symbolName, fileInfo.typeshedModulePath); + return getTypeFromModule(node, symbolName, ['_typeshed']); } - function getTypeFromTypeshedModule(symbolName: string, importPath: string | undefined) { - if (!importPath) { - return undefined; - } + function getTypeFromModule(node: ParseNode, symbolName: string, nameParts: string[]) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + const lookupResult = importLookup({ nameParts, importingFilePath: fileInfo.filePath }); - const lookupResult = importLookup(importPath); if (!lookupResult) { return undefined; } @@ -1963,13 +2410,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return getEffectiveTypeOfSymbol(symbol); } - function isNodeReachable(node: ParseNode): boolean { + function isNodeReachable(node: ParseNode, sourceNode?: ParseNode): boolean { const flowNode = AnalyzerNodeInfo.getFlowNode(node); if (!flowNode) { + if (node.parent) { + return isNodeReachable(node.parent, sourceNode); + } return false; } - if (!isFlowNodeReachable(flowNode)) { + const sourceFlowNode = sourceNode ? AnalyzerNodeInfo.getFlowNode(sourceNode) : undefined; + + if (!codeFlowEngine.isFlowNodeReachable(flowNode, sourceFlowNode)) { return false; } @@ -1982,7 +2434,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return false; } - if (!isFlowNodeReachable(returnFlowNode)) { + if (!codeFlowEngine.isFlowNodeReachable(returnFlowNode)) { return false; } @@ -2010,17 +2462,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Determines whether there is a code flow path from sourceNode to sinkNode. - function isFlowPathBetweenNodes(sourceNode: ParseNode, sinkNode: ParseNode) { + function isFlowPathBetweenNodes(sourceNode: ParseNode, sinkNode: ParseNode, allowSelf = true) { const sourceFlowNode = AnalyzerNodeInfo.getFlowNode(sourceNode); const sinkFlowNode = AnalyzerNodeInfo.getFlowNode(sinkNode); if (!sourceFlowNode || !sinkFlowNode) { return false; } if (sourceFlowNode === sinkFlowNode) { - return true; + return allowSelf; } - return isFlowNodeReachable(sinkFlowNode, sourceFlowNode); + return codeFlowEngine.isFlowNodeReachable(sinkFlowNode, sourceFlowNode); } // Determines whether the specified string literal is part @@ -2058,6 +2510,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + function addDeprecated(message: string, node: ParseNode) { + if (!isDiagnosticSuppressedForNode(node)) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); + fileInfo.diagnosticSink.addDeprecatedWithTextRange(message, node); + } + } + function addDiagnosticWithSuppressionCheck( diagLevel: DiagnosticLevel, message: string, @@ -2136,6 +2595,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type: Type, isTypeIncomplete: boolean, srcExpression?: ParseNode, + allowAssignmentToFinalVar = false, expectedTypeDiagAddendum?: DiagnosticAddendum ) { const nameValue = nameNode.value; @@ -2205,14 +2665,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // version by stripping off the literal. const scope = ScopeUtils.getScopeForNode(nameNode); if (scope?.type === ScopeType.Class) { - const isConstant = isConstantName(nameValue); - const isPrivate = isPrivateOrProtectedName(nameValue); - if ( TypeBase.isInstance(destType) && - !isConstant && - (!isPrivate || - AnalyzerNodeInfo.getFileInfo(nameNode).diagnosticRuleSet.reportPrivateUsage === 'none') + !isConstantName(nameValue) && + !isFinalVariable(symbolWithScope.symbol) ) { destType = stripLiteralValue(destType); } @@ -2233,12 +2689,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions nameNode ); } + } else if (varDecl.isFinal && !allowAssignmentToFinalVar) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.finalReassigned().format({ name: nameValue }), + nameNode + ); } } writeTypeCache( nameNode, destType, + EvaluatorFlags.None, isTypeIncomplete, /* expectedType */ undefined, /* allowSpeculativeCaching */ false @@ -2252,7 +2716,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions srcExpr?: ExpressionNode, expectedTypeDiagAddendum?: DiagnosticAddendum ) { - const baseTypeResult = getTypeOfExpression(target.leftExpression); + const baseTypeResult = getTypeOfExpression( + target.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ); const baseType = makeTopLevelTypeVarsConcrete(baseTypeResult.type); // Handle member accesses (e.g. self.x or cls.y). @@ -2266,11 +2734,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (classTypeResults && isInstantiableClass(classTypeResults.classType)) { if (isClassInstance(baseType)) { if (ClassType.isSameGenericClass(baseType, classTypeResults.classType)) { - assignTypeToMemberVariable(target, type, isTypeIncomplete, true, srcExpr); + assignTypeToMemberVariable( + target, + type, + isTypeIncomplete, + /* isInstanceMember */ true, + srcExpr + ); } } else if (isInstantiableClass(baseType)) { if (ClassType.isSameGenericClass(baseType, classTypeResults.classType)) { - assignTypeToMemberVariable(target, type, isTypeIncomplete, false, srcExpr); + assignTypeToMemberVariable( + target, + type, + isTypeIncomplete, + /* isInstanceMember */ false, + srcExpr + ); } } @@ -2291,16 +2771,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - getTypeFromMemberAccessWithBaseType( + const setTypeResult = getTypeFromMemberAccessWithBaseType( target, baseTypeResult, { method: 'set', setType: type, setErrorNode: srcExpr, setExpectedTypeDiag: expectedTypeDiagAddendum }, EvaluatorFlags.None ); + if (setTypeResult.isAsymmetricDescriptor) { + setAsymmetricDescriptorAssignment(target); + } + writeTypeCache( target.memberName, type, + EvaluatorFlags.None, isTypeIncomplete, /* expectedType */ undefined, /* allowSpeculativeCaching */ false @@ -2308,6 +2793,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions writeTypeCache( target, type, + EvaluatorFlags.None, isTypeIncomplete, /* expectedType */ undefined, /* allowSpeculativeCaching */ false @@ -2372,9 +2858,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assert(symbol !== undefined); const typedDecls = symbol.getDeclarations(); - let isFinalVar = isFinalVariable(symbol); - // Check for an attempt to overwrite a constant or final member variable. + // Check for an attempt to overwrite a constant member variable. if ( typedDecls.length > 0 && typedDecls[0].type === DeclarationType.Variable && @@ -2389,20 +2874,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.memberName ); } - - // If a Final instance variable is declared in the class body but is - // being assigned within an __init__ method, it's allowed. - const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(node); - if (enclosingFunctionNode && enclosingFunctionNode.name.value === '__init__') { - isFinalVar = false; - } - - if (isFinalVar) { - addError( - Localizer.Diagnostic.finalReassigned().format({ name: node.memberName.value }), - node.memberName - ); - } } } else { // Is the target a property? @@ -2441,105 +2912,133 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - function assignTypeToTupleNode(target: TupleNode, type: Type, isTypeIncomplete: boolean, srcExpr: ExpressionNode) { + function assignTypeToTupleOrListNode( + target: TupleNode | ListNode, + type: Type, + isTypeIncomplete: boolean, + srcExpr: ExpressionNode + ) { + const targetExpressions = target.nodeType === ParseNodeType.List ? target.entries : target.expressions; + // Initialize the array of target types, one for each target. - const targetTypes: Type[][] = new Array(target.expressions.length); - for (let i = 0; i < target.expressions.length; i++) { + const targetTypes: Type[][] = new Array(targetExpressions.length); + for (let i = 0; i < targetExpressions.length; i++) { targetTypes[i] = []; } + const targetUnpackIndex = targetExpressions.findIndex((expr) => expr.nodeType === ParseNodeType.Unpack); // Do any of the targets use an unpack operator? If so, it will consume all of the // entries at that location. - const unpackIndex = target.expressions.findIndex((expr) => expr.nodeType === ParseNodeType.Unpack); + const unpackIndex = targetExpressions.findIndex((expr) => expr.nodeType === ParseNodeType.Unpack); + + type = makeTopLevelTypeVarsConcrete(type); + + const diagAddendum = new DiagnosticAddendum(); doForEachSubtype(type, (subtype) => { // Is this subtype a tuple? const tupleType = getSpecializedTupleType(subtype); if (tupleType && tupleType.tupleTypeArguments) { - const sourceEntryTypes = tupleType.tupleTypeArguments; - const sourceEntryCount = sourceEntryTypes.length; + const sourceEntryTypes = tupleType.tupleTypeArguments.map((t) => + addConditionToType(t.type, getTypeCondition(subtype)) + ); - // Is this a homogenous tuple of indeterminate length? - if (isOpenEndedTupleClass(tupleType)) { - for (let index = 0; index < target.expressions.length; index++) { - targetTypes[index].push(addConditionToType(sourceEntryTypes[0], getTypeCondition(subtype))); - } - } else { - let sourceIndex = 0; - let targetIndex = 0; - for (targetIndex = 0; targetIndex < target.expressions.length; targetIndex++) { - if (targetIndex === unpackIndex) { - // Consume as many source entries as necessary to - // make the remaining tuple entry counts match. - const remainingTargetEntries = target.expressions.length - targetIndex - 1; - const remainingSourceEntries = sourceEntryCount - sourceIndex; - let entriesToPack = Math.max(remainingSourceEntries - remainingTargetEntries, 0); - while (entriesToPack > 0) { - targetTypes[targetIndex].push( - addConditionToType(sourceEntryTypes[sourceIndex], getTypeCondition(subtype)) - ); - sourceIndex++; - entriesToPack--; - } - } else { - if (sourceIndex >= sourceEntryCount) { - // No more source entries to assign. - break; - } + const unboundedIndex = tupleType.tupleTypeArguments.findIndex((t) => t.isUnbounded); - targetTypes[targetIndex].push( - addConditionToType(sourceEntryTypes[sourceIndex], getTypeCondition(subtype)) - ); - sourceIndex++; + if (unboundedIndex >= 0) { + if (sourceEntryTypes.length > targetTypes.length) { + // Splice out the unbounded since it might be zero length. + sourceEntryTypes.splice(unboundedIndex, 1); + } else if (sourceEntryTypes.length < targetTypes.length) { + const typeToReplicate = + sourceEntryTypes.length > 0 ? sourceEntryTypes[unboundedIndex] : AnyType.create(); + + // Add elements to make the count match the target count. + while (sourceEntryTypes.length < targetTypes.length) { + sourceEntryTypes.splice(unboundedIndex, 0, typeToReplicate); } } + } - // Have we accounted for all of the targets and sources? If not, we have a size mismatch. - if (targetIndex < target.expressions.length || sourceIndex < sourceEntryCount) { - const fileInfo = AnalyzerNodeInfo.getFileInfo(target); - const expectedEntryCount = - unpackIndex >= 0 ? target.expressions.length - 1 : target.expressions.length; - addDiagnostic( - fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.tupleSizeMismatch().format({ - expected: expectedEntryCount, - received: sourceEntryCount, - }), - target + // If there's an unpack operator in the target and we have too many source elements, + // combine them to assign to the unpacked target. + if (targetUnpackIndex >= 0) { + if (sourceEntryTypes.length > targetTypes.length) { + const removedEntries = sourceEntryTypes.splice( + targetUnpackIndex, + sourceEntryTypes.length - targetTypes.length + 1 ); + let combinedTypes = combineTypes(removedEntries); + if (target.nodeType === ParseNodeType.List) { + combinedTypes = stripLiteralValue(combinedTypes); + } + sourceEntryTypes.splice(targetUnpackIndex, 0, combinedTypes); + } else if (sourceEntryTypes.length === targetTypes.length - 1) { + sourceEntryTypes.splice(targetUnpackIndex, 0, NeverType.createNever()); + } + } + + sourceEntryTypes.forEach((type, targetIndex) => { + if (targetIndex < targetTypes.length) { + targetTypes[targetIndex].push(type); } + }); + + // Have we accounted for all of the targets and sources? If not, we have a size mismatch. + if (sourceEntryTypes.length !== targetExpressions.length) { + const expectedEntryCount = + unpackIndex >= 0 ? targetExpressions.length - 1 : targetExpressions.length; + const subDiag = diagAddendum.createAddendum(); + subDiag.addMessage( + (target.nodeType === ParseNodeType.List + ? Localizer.DiagnosticAddendum.listAssignmentMismatch() + : Localizer.DiagnosticAddendum.tupleAssignmentMismatch() + ).format({ + type: printType(subtype), + }) + ); + subDiag.createAddendum().addMessage( + Localizer.DiagnosticAddendum.tupleSizeMismatch().format({ + expected: expectedEntryCount, + received: sourceEntryTypes.length, + }) + ); } } else { // The assigned expression isn't a tuple, so it had better // be some iterable type. const iterableType = getTypeFromIterator(subtype, /* isAsync */ false, srcExpr) || UnknownType.create(); - for (let index = 0; index < target.expressions.length; index++) { + for (let index = 0; index < targetExpressions.length; index++) { targetTypes[index].push(addConditionToType(iterableType, getTypeCondition(subtype))); } } }); - // Assign the resulting types to the individual names in the tuple target expression. - target.expressions.forEach((expr, index) => { + if (!diagAddendum.isEmpty()) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(target); + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + (target.nodeType === ParseNodeType.List + ? Localizer.Diagnostic.listAssignmentMismatch() + : Localizer.Diagnostic.tupleAssignmentMismatch() + ).format({ + type: printType(type), + }) + diagAddendum.getString(), + target + ); + } + + // Assign the resulting types to the individual names in the tuple + // or list target expression. + targetExpressions.forEach((expr, index) => { const typeList = targetTypes[index]; - let targetType = typeList.length === 0 ? UnknownType.create() : combineTypes(typeList); - targetType = removeNoReturnFromUnion(targetType); - - // If the target uses an unpack operator, wrap the target type in a list. - if (index === unpackIndex) { - const listType = getBuiltInType(expr, 'list'); - if (isInstantiableClass(listType)) { - targetType = ClassType.cloneAsInstance( - ClassType.cloneForSpecialization(listType, [targetType], /* isTypeArgumentExplicit */ true) - ); - } - } + const targetType = typeList.length === 0 ? UnknownType.create() : combineTypes(typeList); assignTypeToExpression(expr, targetType, isTypeIncomplete, srcExpr, /* ignoreEmptyContainers */ true); }); - writeTypeCache(target, type, isTypeIncomplete); + writeTypeCache(target, type, EvaluatorFlags.None, isTypeIncomplete); } // Replaces all of the top-level TypeVars (as opposed to TypeVars @@ -2557,7 +3056,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isClassInstance(objectType) ) { return ClassType.cloneAsInstance( - specializeTupleClass(tupleClassType, [objectType, AnyType.create(/* isEllipsis */ true)]) + specializeTupleClass(tupleClassType, [{ type: objectType, isUnbounded: true }]) ); } @@ -2584,6 +3083,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + if (isVariadicTypeVar(subtype)) { + if (tupleClassType && isInstantiableClass(tupleClassType)) { + return convertToInstance( + specializeTupleClass( + tupleClassType, + [], + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true, + /* isUnpackedTuple */ true + ) + ); + } + } + if (isTypeVar(subtype) && !subtype.details.recursiveTypeAliasName) { if (subtype.details.boundType) { const boundType = TypeBase.isInstantiable(subtype) @@ -2593,7 +3106,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return subtype.details.isSynthesized ? boundType : addConditionToType(boundType, [ - { typeVarName: TypeVarType.getNameWithScope(subtype), constraintIndex: 0 }, + { + typeVarName: TypeVarType.getNameWithScope(subtype), + constraintIndex: 0, + isConstrainedTypeVar: false, + }, ]); } @@ -2631,6 +3148,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions { typeVarName: TypeVarType.getNameWithScope(subtype), constraintIndex, + isConstrainedTypeVar: true, }, ]) ); @@ -2639,6 +3157,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return combineTypes(typesToCombine); } + if (subtype.details.isExemptFromBoundCheck) { + return AnyType.create(); + } + // Convert to an "object" or "type" instance depending on whether // it's instantiable. if (TypeBase.isInstantiable(subtype)) { @@ -2649,6 +3171,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions { typeVarName: TypeVarType.getNameWithScope(subtype), constraintIndex: 0, + isConstrainedTypeVar: false, }, ]); } @@ -2656,7 +3179,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return subtype.details.isSynthesized ? objectType : addConditionToType(objectType, [ - { typeVarName: TypeVarType.getNameWithScope(subtype), constraintIndex: 0 }, + { + typeVarName: TypeVarType.getNameWithScope(subtype), + constraintIndex: 0, + isConstrainedTypeVar: false, + }, ]); } @@ -2682,9 +3209,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let typeChanged = false; const expandSubtype = (unexpandedType: Type) => { - const expandedType = isUnion(unexpandedType) - ? unexpandedType - : makeTopLevelTypeVarsConcrete(unexpandedType); + let expandedType = isUnion(unexpandedType) ? unexpandedType : makeTopLevelTypeVarsConcrete(unexpandedType); + + expandedType = transformPossibleRecursiveTypeAlias(expandedType); doForEachSubtype(expandedType, (subtype) => { if (conditionFilter) { @@ -2693,12 +3220,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const transformedType = callback(subtype, unexpandedType); + let transformedType = callback(subtype, unexpandedType); if (transformedType !== unexpandedType) { typeChanged = true; } if (transformedType) { - newSubtypes.push(addConditionToType(transformedType, getTypeCondition(subtype))); + // Apply the type condition if it's associated with a constrained TypeVar. + const typeCondition = getTypeCondition(subtype)?.filter( + (condition) => condition.isConstrainedTypeVar + ); + if (typeCondition && typeCondition.length > 0) { + transformedType = addConditionToType(transformedType, typeCondition); + } + + newSubtypes.push(transformedType); } return undefined; }); @@ -2712,7 +3247,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions expandSubtype(type); } - return typeChanged ? combineTypes(newSubtypes) : type; + if (!typeChanged) { + return type; + } + + const newType = combineTypes(newSubtypes); + + // Do our best to retain type aliases. + if (newType.category === TypeCategory.Union) { + UnionType.addTypeAliasSource(newType, type); + } + return newType; } function markNamesAccessed(node: ParseNode, names: string[]) { @@ -2735,12 +3280,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isTypeIncomplete: boolean, srcExpr: ExpressionNode, ignoreEmptyContainers = false, + allowAssignmentToFinalVar = false, expectedTypeDiagAddendum?: DiagnosticAddendum ) { // Is the source expression a TypeVar() call? if (isTypeVar(type)) { if (srcExpr && srcExpr.nodeType === ParseNodeType.Call) { - const callType = getTypeOfExpression(srcExpr.leftExpression).type; + const callType = getTypeOfExpression( + srcExpr.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; if ( isInstantiableClass(callType) && (ClassType.isBuiltIn(callType, 'TypeVar') || @@ -2781,7 +3331,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); } - assignTypeToNameNode(target, type, isTypeIncomplete, srcExpr, expectedTypeDiagAddendum); + assignTypeToNameNode( + target, + type, + isTypeIncomplete, + srcExpr, + allowAssignmentToFinalVar, + expectedTypeDiagAddendum + ); break; } @@ -2809,12 +3366,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions EvaluatorFlags.None ); - writeTypeCache(target, type, isTypeIncomplete); + writeTypeCache(target, type, EvaluatorFlags.None, isTypeIncomplete); break; } + case ParseNodeType.List: case ParseNodeType.Tuple: { - assignTypeToTupleNode(target, type, isTypeIncomplete, srcExpr); + assignTypeToTupleOrListNode(target, type, isTypeIncomplete, srcExpr); break; } @@ -2822,15 +3380,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const annotationType: Type | undefined = getTypeOfAnnotation(target.typeAnnotation, { isVariableAnnotation: true, allowFinal: ParseTreeUtils.isFinalAllowedForAssignmentTarget(target.valueExpression), + allowClassVar: ParseTreeUtils.isClassVarAllowedForAssignmentTarget(target.valueExpression), }); - // Handle a bare "Final" in a special manner. - if (!isClassInstance(annotationType) || !ClassType.isBuiltIn(annotationType, 'Final')) { + // Handle a bare "Final" or "ClassVar" in a special manner. + const isBareFinalOrClassVar = + isClassInstance(annotationType) && + (ClassType.isBuiltIn(annotationType, 'Final') || ClassType.isBuiltIn(annotationType, 'ClassVar')); + + if (!isBareFinalOrClassVar) { const isTypeAliasAnnotation = isClassInstance(annotationType) && ClassType.isBuiltIn(annotationType, 'TypeAlias'); if (!isTypeAliasAnnotation) { - if (canAssignType(annotationType, type, new DiagnosticAddendum())) { + if (canAssignType(annotationType, type)) { // Don't attempt to narrow based on the annotated type if the type // is a enum because the annotated type in an enum doesn't reflect // the type of the symbol. @@ -2847,6 +3410,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions /* isIncomplete */ false, srcExpr, ignoreEmptyContainers, + allowAssignmentToFinalVar, expectedTypeDiagAddendum ); break; @@ -2854,24 +3418,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case ParseNodeType.Unpack: { if (target.expression.nodeType === ParseNodeType.Name) { - assignTypeToNameNode(target.expression, type, /* isIncomplete */ false, srcExpr); - } - break; - } - - case ParseNodeType.List: { - // The assigned expression had better be some iterable type. - const iteratedType = getTypeFromIterator(type, /* isAsync */ false, srcExpr) || UnknownType.create(); - - target.entries.forEach((entry) => { - assignTypeToExpression( - entry, - iteratedType, + assignTypeToNameNode( + target.expression, + getBuiltInObject(target.expression, 'list', [type]), /* isIncomplete */ false, - srcExpr, - ignoreEmptyContainers + srcExpr ); - }); + } break; } @@ -2887,13 +3440,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } default: { - const fileInfo = AnalyzerNodeInfo.getFileInfo(target); - addDiagnostic( - fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.assignmentTargetExpr(), - target - ); + addError(Localizer.Diagnostic.assignmentTargetExpr(), target); break; } } @@ -2990,7 +3537,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case ParseNodeType.Name: { // Get the type to evaluate whether it's bound // and to mark it accessed. - getTypeOfExpression(node, /* expectedType */ undefined, EvaluatorFlags.SkipUnboundCheck); + getTypeOfExpression(node); break; } @@ -3000,10 +3547,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node, baseTypeResult, { method: 'del' }, - EvaluatorFlags.SkipUnboundCheck + EvaluatorFlags.None ); - writeTypeCache(node.memberName, memberType.type, /* isIncomplete */ false); - writeTypeCache(node, memberType.type, /* isIncomplete */ false); + writeTypeCache(node.memberName, memberType.type, EvaluatorFlags.None, /* isIncomplete */ false); + writeTypeCache(node, memberType.type, EvaluatorFlags.None, /* isIncomplete */ false); break; } @@ -3013,13 +3560,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions undefined, EvaluatorFlags.DoNotSpecialize ); - getTypeFromIndexWithBaseType( - node, - baseTypeResult.type, - { method: 'del' }, - EvaluatorFlags.SkipUnboundCheck - ); - writeTypeCache(node, UnboundType.create(), /* isIncomplete */ false); + getTypeFromIndexWithBaseType(node, baseTypeResult.type, { method: 'del' }, EvaluatorFlags.None); + writeTypeCache(node, UnboundType.create(), EvaluatorFlags.None, /* isIncomplete */ false); break; } @@ -3035,7 +3577,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // type information is cached for the completion handler. if (node.child) { suppressDiagnostics(node.child, () => { - getTypeOfExpression(node.child!, /* expectedType */ undefined, EvaluatorFlags.SkipUnboundCheck); + getTypeOfExpression(node.child!, /* expectedType */ undefined); }); } break; @@ -3120,9 +3662,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let isIncomplete = false; const allowForwardReferences = (flags & EvaluatorFlags.AllowForwardReferences) !== 0 || fileInfo.isStubFile; + if (!evaluatorOptions.analyzeUnannotatedFunctions) { + const containingFunction = ParseTreeUtils.getEnclosingFunction(node); + if (containingFunction && ParseTreeUtils.isUnannotatedFunction(containingFunction)) { + return { + node, + type: AnyType.create(), + isIncomplete: false, + }; + } + } + // Look for the scope that contains the value definition and // see if it has a declared type. - const symbolWithScope = lookUpSymbolRecursive(node, name, !allowForwardReferences); + const symbolWithScope = lookUpSymbolRecursive( + node, + name, + !allowForwardReferences, + allowForwardReferences && (flags & EvaluatorFlags.ExpectingTypeAnnotation) !== 0 + ); if (symbolWithScope) { let useCodeFlowAnalysis = !allowForwardReferences; @@ -3181,13 +3739,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isIncomplete = true; } - // If the symbol used by the code flow engine isn't the same - // as the original symbol, then an outer-scoped symbol was used, - // and we need to mark it as accessed. - if (codeFlowTypeResult.usedOuterScopeAlias) { - const outerScopeSymbol = symbolWithScope.scope.parent?.lookUpSymbolRecursive(name); - if (outerScopeSymbol) { - setSymbolAccessed(fileInfo, outerScopeSymbol.symbol, node); + if (!codeFlowTypeResult.type && symbolWithScope.isBeyondExecutionScope) { + const outerScopeTypeResult = getCodeFlowTypeForCapturedVariable( + node, + symbolWithScope, + effectiveType + ); + + if (outerScopeTypeResult?.type) { + type = outerScopeTypeResult.type; + } + + if (outerScopeTypeResult?.isIncomplete) { + isIncomplete = true; } } } @@ -3195,31 +3759,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Detect, report, and fill in missing type arguments if appropriate. type = reportMissingTypeArguments(node, type, flags); - // If there is a resolution cycle, don't report it as an unbound symbol - // at this time. It will be re-evaluated as the call stack unwinds, and - // its actual type will be known then. Also, if the node is unreachable - // but within a reachable statement (e.g. if False and ) then avoid - // reporting an unbound error. - if (!isIncomplete && !AnalyzerNodeInfo.isCodeUnreachable(node)) { - if ((flags & EvaluatorFlags.SkipUnboundCheck) === 0) { - if (isUnbound(type)) { - addDiagnostic( - fileInfo.diagnosticRuleSet.reportUnboundVariable, - DiagnosticRule.reportUnboundVariable, - Localizer.Diagnostic.symbolIsUnbound().format({ name }), - node - ); - } else if (isPossiblyUnbound(type)) { - addDiagnostic( - fileInfo.diagnosticRuleSet.reportUnboundVariable, - DiagnosticRule.reportUnboundVariable, - Localizer.Diagnostic.symbolIsPossiblyUnbound().format({ name }), - node - ); - } - } - } - setSymbolAccessed(fileInfo, symbol, node); if ((flags & EvaluatorFlags.ExpectingTypeAnnotation) !== 0) { @@ -3237,13 +3776,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // but was reconstituted in such a way that we lost the // typeAliasInfo. Avoid the false positive error by suppressing // the error when it looks like a plausible type alias type. - if (!TypeBase.isInstantiable(type)) { + if (!TypeBase.isInstantiable(type) || (flags & EvaluatorFlags.DoNotSpecialize) !== 0) { addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, Localizer.Diagnostic.typeAnnotationVariable(), node ); + type = UnknownType.create(); } } } @@ -3282,7 +3822,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if ((flags & EvaluatorFlags.ExpectingType) !== 0) { - if ((flags & EvaluatorFlags.GenericClassTypeAllowed) === 0) { + if ((flags & EvaluatorFlags.AllowGenericClassType) === 0) { if (isInstantiableClass(type) && ClassType.isBuiltIn(type, 'Generic')) { addDiagnostic( AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.reportGeneralTypeIssues, @@ -3301,6 +3841,84 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type, node, isIncomplete }; } + // Handles the case where a variable or parameter is defined in an outer + // scope and captured by an inner scope (either a function or a lambda). + function getCodeFlowTypeForCapturedVariable( + node: NameNode, + symbolWithScope: SymbolWithScope, + effectiveType: Type + ): FlowNodeTypeResult | undefined { + // This function applies only to variables and parameters, not to other + // types of symbols. + if ( + !symbolWithScope.symbol + .getDeclarations() + .every((decl) => decl.type === DeclarationType.Variable || decl.type === DeclarationType.Parameter) + ) { + return undefined; + } + + // If the symbol is a variable captured by an inner function + // or lambda, see if we can infer the type from the outer scope. + const scopeHierarchy = ScopeUtils.getScopeHierarchy(node, symbolWithScope.scope); + + // Handle the case where all of the nested scopes are functions, + // lambdas and modules. Don't allow other types of scopes. + if ( + scopeHierarchy && + scopeHierarchy.length >= 2 && + scopeHierarchy.every((s) => s.type === ScopeType.Function || s.type === ScopeType.Module) + ) { + // Find the parse node associated with the scope that is just inside of the + // scope that declares the captured variable. + const innerScopeNode = ScopeUtils.findTopNodeInScope(node, scopeHierarchy[scopeHierarchy.length - 2]); + if ( + innerScopeNode && + (innerScopeNode.nodeType === ParseNodeType.Function || innerScopeNode.nodeType === ParseNodeType.Lambda) + ) { + const innerScopeCodeFlowNode = AnalyzerNodeInfo.getFlowNode(innerScopeNode); + if (innerScopeCodeFlowNode) { + // See if any of the assignments of the symbol are reachable + // from this node. If so, we cannot apply any narrowing because + // the type could change after the capture. + if ( + symbolWithScope.symbol.getDeclarations().every((decl) => { + // Parameter declarations always start life at the beginning + // of the execution scope, so they are always safe to narrow. + if (decl.type === DeclarationType.Parameter) { + return true; + } + + const declCodeFlowNode = AnalyzerNodeInfo.getFlowNode(decl.node); + if (!declCodeFlowNode) { + return false; + } + + // Functions and lambdas do not create a new flow node, so it's + // possible that they share the flow node of the declaration. In this + // case, the declaration must come before, so it's safe. + if (declCodeFlowNode === innerScopeCodeFlowNode) { + return true; + } + + return !codeFlowEngine.isFlowNodeReachable(declCodeFlowNode, innerScopeCodeFlowNode); + }) + ) { + return getFlowTypeOfReference( + node, + symbolWithScope.symbol.id, + effectiveType, + /* isInitialTypeIncomplete */ false, + innerScopeNode + ); + } + } + } + } + + return undefined; + } + // Validates that a TypeVar is valid in this context. If so, it clones it // and provides a scope ID defined by its containing scope (class, function // or type alias). If not, it emits errors indicating why the TypeVar @@ -3353,7 +3971,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type = TypeVarType.cloneForScopeId( type, getScopeIdForNode(enclosingScope), - enclosingScope.name.value + enclosingScope.name.value, + enclosingScope.nodeType === ParseNodeType.Function + ? TypeVarScopeType.Function + : TypeVarScopeType.Class ); } else { fail('AssociateTypeVarsWithCurrentScope flag was set but enclosing scope not found'); @@ -3451,8 +4072,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // not change each time the file is parsed (unless, of course, the // file contents change). function getScopeIdForNode(node: ParseNode): string { + let name = ''; + if (node.nodeType === ParseNodeType.Class) { + name = node.name.value; + } else if (node.nodeType === ParseNodeType.Function) { + name = node.name.value; + } + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - return `${fileInfo.filePath}.${node.start.toString()}`; + return `${fileInfo.filePath}.${node.start.toString()}-${name}`; } // Walks up the parse tree and finds all scopes that can provide @@ -3542,7 +4170,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions curNode = node; while (curNode) { if (curNode.nodeType === ParseNodeType.Assignment) { - const leftType = readTypeCache(curNode.leftExpression); + const leftType = readTypeCache(curNode.leftExpression, EvaluatorFlags.None); // Is this a placeholder that was temporarily written to the cache for // purposes of resolving type aliases? @@ -3556,7 +4184,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type: TypeVarType.cloneForScopeId( type, leftType.details.recursiveTypeAliasScopeId, - leftType.details.recursiveTypeAliasName + leftType.details.recursiveTypeAliasName, + TypeVarScopeType.TypeAlias ), foundInterveningClass: false, }; @@ -3578,6 +4207,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions EvaluatorFlags.ExpectingTypeAnnotation | EvaluatorFlags.VariableTypeAnnotation | EvaluatorFlags.AllowForwardReferences | + EvaluatorFlags.NotParsedByInterpreter | EvaluatorFlags.DisallowTypeVarsWithScopeId | EvaluatorFlags.DisallowTypeVarsWithoutScopeId | EvaluatorFlags.AssociateTypeVarsWithCurrentScope)); @@ -3595,8 +4225,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isCodeFlowSupportedForReference(node)) { // Before performing code flow analysis, update the cache to prevent recursion. - writeTypeCache(node, memberTypeResult.type, /* isIncomplete */ true); - writeTypeCache(node.memberName, memberTypeResult.type, /* isIncomplete */ true); + writeTypeCache(node, memberTypeResult.type, flags, /* isIncomplete */ true); + writeTypeCache(node.memberName, memberTypeResult.type, flags, /* isIncomplete */ true); // If the type is initially unbound, see if there's a parent class that // potentially initialized the value. @@ -3667,6 +4297,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const fileInfo = AnalyzerNodeInfo.getFileInfo(node); let type: Type | undefined; let isIncomplete = !!baseTypeResult.isIncomplete; + let isAsymmetricDescriptor: boolean | undefined; // If the base type was incomplete and unbound, don't proceed // because false positive errors will be generated. @@ -3674,6 +4305,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type: UnknownType.create(), node, isIncomplete: true }; } + // Handle the special case where the expression is an actual + // UnionType special form. + if (isUnion(baseType) && TypeBase.isSpecialForm(baseType)) { + if (objectType) { + baseType = objectType; + } + } + const getTypeFromNoneBase = () => { if (noneType && isInstantiableClass(noneType)) { const typeResult = getTypeFromObjectMember( @@ -3781,11 +4420,53 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions MemberAccessFlags.None, baseTypeResult.bindToType ); + + type = typeResult?.type; + if (typeResult?.isIncomplete) { + isIncomplete = true; + } + + if (typeResult?.isAsymmetricDescriptor) { + isAsymmetricDescriptor = true; + } + } else if ( + ClassType.isBuiltIn(baseType, 'type') && + objectType && + isClassInstance(objectType) && + !baseTypeResult.isSuperCall + ) { + // Handle the case where the base type is an instance of 'type'. We'll + // treat it as an instantiable subclass of 'object'. + const typeResult = getTypeFromClassMember( + node.memberName, + ClassType.cloneAsInstantiable(objectType), + memberName, + usage, + diag, + MemberAccessFlags.None, + baseTypeResult.bindToType + ? (convertToInstance(baseTypeResult.bindToType) as ClassType | TypeVarType) + : undefined + ); + type = typeResult?.type; if (typeResult?.isIncomplete) { isIncomplete = true; } + + if (typeResult?.isAsymmetricDescriptor) { + isAsymmetricDescriptor = true; + } } else { + // Handle the special case of LiteralString. + if ( + ClassType.isBuiltIn(baseType, 'LiteralString') && + strClassType && + isInstantiableClass(strClassType) + ) { + baseType = ClassType.cloneAsInstance(strClassType); + } + // Handle the special case of 'name' and 'value' members within an enum. if (ClassType.isEnumClass(baseType)) { const literalValue = baseType.literalValue; @@ -3816,12 +4497,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions /* memberAccessFlags */ undefined, baseTypeResult.bindToType ); + if (typeResult) { type = addConditionToType(typeResult.type, getTypeCondition(baseType)); } + if (typeResult?.isIncomplete) { isIncomplete = true; } + + if (typeResult?.isAsymmetricDescriptor) { + isAsymmetricDescriptor = true; + } } break; } @@ -3905,7 +4592,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.memberName ); } - type = UnknownType.create(); + type = evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); } } break; @@ -3913,7 +4600,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case TypeCategory.Union: { type = mapSubtypes(baseType, (subtype) => { - if (isNone(subtype)) { + if (isNoneInstance(subtype)) { const typeResult = getTypeFromNoneBase(); if (typeResult) { type = addConditionToType(typeResult.type, getTypeCondition(baseType)); @@ -3942,6 +4629,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions { type: subtype, node, + isIncomplete: baseTypeResult.isIncomplete, }, usage, EvaluatorFlags.None @@ -3957,14 +4645,31 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case TypeCategory.Function: case TypeCategory.OverloadedFunction: { - const functionObj = getBuiltInObject(node, 'function'); - - // The "__defaults__" member is not currently defined in the "function" - // class, so we'll special-case it here. - if (functionObj && memberName !== '__defaults__') { - type = getTypeFromMemberAccessWithBaseType(node, { type: functionObj, node }, usage, flags).type; - } else { + if (memberName === '__defaults__') { + // The "__defaults__" member is not currently defined in the "function" + // class, so we'll special-case it here. type = AnyType.create(); + } else if (memberName === '__self__') { + // The "__self__" member is not currently defined in the "function" + // class, so we'll special-case it here. + const functionType = isFunction(baseType) ? baseType : baseType.overloads[0]; + if ( + functionType.preBoundFlags !== undefined && + (functionType.preBoundFlags & FunctionTypeFlags.StaticMethod) === 0 + ) { + type = functionType.boundToType; + } + } else { + if (!functionObj) { + type = AnyType.create(); + } else { + type = getTypeFromMemberAccessWithBaseType( + node, + { type: functionObj, node }, + usage, + flags + ).type; + } } break; } @@ -3986,40 +4691,43 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (!type) { - let diagMessage = Localizer.Diagnostic.memberAccess(); - if (usage.method === 'set') { - diagMessage = Localizer.Diagnostic.memberSet(); - } else if (usage.method === 'del') { - diagMessage = Localizer.Diagnostic.memberDelete(); - } - - // If there is an expected type diagnostic addendum (used for assignments), - // use that rather than the local diagnostic addendum because it will be - // more informative. - if (usage.setExpectedTypeDiag) { - diag = usage.setExpectedTypeDiag; - } - const isFunctionRule = isFunction(baseType) || isOverloadedFunction(baseType) || (isClassInstance(baseType) && ClassType.isBuiltIn(baseType, 'function')); - const [ruleSet, rule] = isFunctionRule - ? [fileInfo.diagnosticRuleSet.reportFunctionMemberAccess, DiagnosticRule.reportFunctionMemberAccess] - : [fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues]; - addDiagnostic( - ruleSet, - rule, - diagMessage.format({ name: memberName, type: printType(baseType) }) + diag.getString(), - node.memberName - ); + if (!baseTypeResult.isIncomplete) { + let diagMessage = Localizer.Diagnostic.memberAccess(); + if (usage.method === 'set') { + diagMessage = Localizer.Diagnostic.memberSet(); + } else if (usage.method === 'del') { + diagMessage = Localizer.Diagnostic.memberDelete(); + } - // If this is member access on a function, use "Any" so if the - // reportFunctionMemberAccess rule is disabled, we don't trigger - // additional reportUnknownMemberType diagnostics. - type = isFunctionRule ? AnyType.create() : UnknownType.create(); - } + // If there is an expected type diagnostic addendum (used for assignments), + // use that rather than the local diagnostic addendum because it will be + // more informative. + if (usage.setExpectedTypeDiag) { + diag = usage.setExpectedTypeDiag; + } + + const [ruleSet, rule] = isFunctionRule + ? [fileInfo.diagnosticRuleSet.reportFunctionMemberAccess, DiagnosticRule.reportFunctionMemberAccess] + : [fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues]; + + addDiagnostic( + ruleSet, + rule, + diagMessage.format({ name: memberName, type: printType(baseType) }) + diag.getString(), + node.memberName + ); + } + + // If this is member access on a function, use "Any" so if the + // reportFunctionMemberAccess rule is disabled, we don't trigger + // additional reportUnknownMemberType diagnostics. + type = isFunctionRule ? AnyType.create() : UnknownType.create(); + } // Should we specialize the class? if ((flags & EvaluatorFlags.DoNotSpecialize) === 0) { @@ -4029,24 +4737,32 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (usage.method === 'get') { + let skipPartialUnknownCheck = isIncomplete; + // Don't report an error if the type is a partially-specialized - // class. This comes up frequently in cases where a type is passed - // as an argument (e.g. "defaultdict(list)"). - if (node.parent?.nodeType !== ParseNodeType.Argument || !isInstantiableClass(type)) { - if (!isIncomplete) { - reportPossibleUnknownAssignment( - fileInfo.diagnosticRuleSet.reportUnknownMemberType, - DiagnosticRule.reportUnknownMemberType, - node.memberName, - type, - node, - /* ignoreEmptyContainers */ false - ); + // class being passed as an argument. This comes up frequently in + // cases where a type is passed as an argument (e.g. "defaultdict(list)"). + // It can also come up in cases like "isinstance(x, (list, dict))". + if (isInstantiableClass(type)) { + const argNode = ParseTreeUtils.getParentNodeOfType(node, ParseNodeType.Argument); + if (argNode && argNode?.parent?.nodeType === ParseNodeType.Call) { + skipPartialUnknownCheck = true; } } + + if (!skipPartialUnknownCheck) { + reportPossibleUnknownAssignment( + fileInfo.diagnosticRuleSet.reportUnknownMemberType, + DiagnosticRule.reportUnknownMemberType, + node.memberName, + type, + node, + /* ignoreEmptyContainers */ false + ); + } } - return { type, node, isIncomplete }; + return { type, node, isIncomplete, isAsymmetricDescriptor }; } function getTypeFromClassMemberName( @@ -4054,7 +4770,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType: ClassType, memberName: string, usage: EvaluatorUsage, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, flags: MemberAccessFlags, bindToType?: ClassType | TypeVarType ): ClassMemberLookup | undefined { @@ -4070,6 +4786,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (flags & MemberAccessFlags.SkipObjectBaseClass) { classLookupFlags |= ClassMemberLookupFlags.SkipObjectBaseClass; } + if (flags & MemberAccessFlags.SkipTypeBaseClass) { + classLookupFlags |= ClassMemberLookupFlags.SkipTypeBaseClass; + } if (flags & MemberAccessFlags.SkipOriginalClass) { classLookupFlags |= ClassMemberLookupFlags.SkipOriginalClass; } @@ -4091,17 +4810,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let type: Type | undefined; let isTypeIncomplete = false; - if (usage.method === 'get') { - const typeResult = getTypeOfMemberInternal(errorNode, memberInfo); - if (typeResult) { - type = typeResult.type; - if (typeResult.isIncomplete) { - isTypeIncomplete = true; - } - } else { - type = UnknownType.create(); + if (memberInfo.symbol.isInitVar()) { + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.memberIsInitVar().format({ name: memberName })); } - } else { + return undefined; + } + + if (usage.method !== 'get') { // If the usage indicates a 'set' or 'delete' and the access is within the // class definition itself, use only the declared type to avoid circular // type evaluation. @@ -4119,17 +4835,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } } + } - if (!type) { - const typeResult = getTypeOfMemberInternal(errorNode, memberInfo); - if (typeResult) { - type = typeResult.type; - if (typeResult.isIncomplete) { - isTypeIncomplete = true; - } - } else { - type = UnknownType.create(); + if (!type) { + // Determine whether to replace Self variables with a specific + // class. Avoid doing this if there's a "bindToType" specified + // because that case is used for super() calls where we want + // to leave the Self type generic (not specialized). + const selfClass = bindToType ? undefined : classType; + + const typeResult = getTypeOfMemberInternal(errorNode, memberInfo, selfClass); + + if (typeResult) { + type = typeResult.type; + if (typeResult.isIncomplete) { + isTypeIncomplete = true; } + } else { + type = UnknownType.create(); } } @@ -4137,7 +4860,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (ClassType.isTypedDictClass(classType)) { const typedDecls = memberInfo.symbol.getTypedDeclarations(); if (typedDecls.length > 0 && typedDecls[0].type === DeclarationType.Variable) { - diag.addMessage(Localizer.DiagnosticAddendum.memberUnknown().format({ name: memberName })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.memberUnknown().format({ name: memberName })); + } return undefined; } } @@ -4152,7 +4877,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const objectAccessType = applyDescriptorAccessMethod( + const descriptorResult = applyDescriptorAccessMethod( type, memberInfo, classType, @@ -4165,21 +4890,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag ); - if (!objectAccessType) { + if (!descriptorResult) { return undefined; } - type = objectAccessType; + type = descriptorResult.type; if (usage.method === 'set' && usage.setType) { // Verify that the assigned type is compatible. - if (!canAssignType(type, usage.setType, diag.createAddendum())) { - diag.addMessage( - Localizer.DiagnosticAddendum.memberAssignment().format({ - type: printType(usage.setType), - name: memberName, - classType: printObjectTypeForClass(classType), - }) - ); + if (!canAssignType(type, usage.setType, diag?.createAddendum())) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.memberAssignment().format({ + type: printType(usage.setType), + name: memberName, + classType: printObjectTypeForClass(classType), + }) + ); + } return undefined; } @@ -4188,19 +4915,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ClassType.isFrozenDataClass(memberInfo.classType) && (flags & MemberAccessFlags.AccessClassMembersOnly) === 0 ) { - diag.addMessage( - Localizer.DiagnosticAddendum.dataclassFrozen().format({ - name: printType(ClassType.cloneAsInstance(memberInfo.classType)), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.dataClassFrozen().format({ + name: printType(ClassType.cloneAsInstance(memberInfo.classType)), + }) + ); + } return undefined; } } return { + symbol: memberInfo.symbol, type, isTypeIncomplete, isClassMember: !memberInfo.isInstanceMember, + isClassVar: memberInfo.isClassVar, + classType: memberInfo.classType, + isAsymmetricDescriptor: descriptorResult.isAsymmetricDescriptor, }; } @@ -4211,35 +4944,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions (flags & (MemberAccessFlags.AccessClassMembersOnly | MemberAccessFlags.SkipAttributeAccessOverride)) === 0 ) { - const generalAttrType = applyAttributeAccessOverride(classType, errorNode, usage); - + const generalAttrType = applyAttributeAccessOverride(classType, errorNode, usage, memberName); if (generalAttrType) { - const objectAccessType = applyDescriptorAccessMethod( - generalAttrType, - memberInfo, - classType, - bindToType, - /* isAccessedThroughObject */ !!bindToType, - flags, - errorNode, - memberName, - usage, - diag - ); - - if (!objectAccessType) { - return undefined; - } - return { - type: objectAccessType, + symbol: undefined, + type: generalAttrType, isTypeIncomplete: false, isClassMember: false, + isClassVar: false, + isAsymmetricDescriptor: false, }; } } - diag.addMessage(Localizer.DiagnosticAddendum.memberUnknown().format({ name: memberName })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.memberUnknown().format({ name: memberName })); + } + return undefined; } @@ -4256,24 +4977,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions errorNode: ExpressionNode, memberName: string, usage: EvaluatorUsage, - diag: DiagnosticAddendum - ): Type | undefined { + diag: DiagnosticAddendum | undefined + ): DescriptorTypeResult | undefined { const treatConstructorAsClassMember = (flags & MemberAccessFlags.TreatConstructorAsClassMethod) !== 0; let isTypeValid = true; + let isAsymmetricDescriptor = false; type = mapSubtypes(type, (subtype) => { - if (isClass(subtype)) { + const concreteSubtype = makeTopLevelTypeVarsConcrete(subtype); + + if (isClass(concreteSubtype)) { // If it's an object, use its class to lookup the descriptor. If it's a class, // use its metaclass instead. - let lookupClass: ClassType | undefined = subtype; + let lookupClass: ClassType | undefined = concreteSubtype; let isAccessedThroughMetaclass = false; - if (TypeBase.isInstantiable(subtype)) { - if (subtype.details.effectiveMetaclass && isInstantiableClass(subtype.details.effectiveMetaclass)) { + if (TypeBase.isInstantiable(concreteSubtype)) { + if ( + concreteSubtype.details.effectiveMetaclass && + isInstantiableClass(concreteSubtype.details.effectiveMetaclass) + ) { // When accessing a class member that is a class whose metaclass implements // a descriptor protocol, only 'get' operations are allowed. If it's accessed // through the object, all access methods are supported. if (isAccessedThroughObject || usage.method === 'get') { - lookupClass = convertToInstance(subtype.details.effectiveMetaclass) as ClassType; + lookupClass = convertToInstance(concreteSubtype.details.effectiveMetaclass) as ClassType; isAccessedThroughMetaclass = true; } else { lookupClass = undefined; @@ -4304,17 +5031,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (ClassType.isPropertyClass(lookupClass)) { if (usage.method === 'set') { if (!accessMethod) { - diag.addMessage( - Localizer.DiagnosticAddendum.propertyMissingSetter().format({ name: memberName }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.propertyMissingSetter().format({ + name: memberName, + }) + ); + } isTypeValid = false; return undefined; } } else if (usage.method === 'del') { if (!accessMethod) { - diag.addMessage( - Localizer.DiagnosticAddendum.propertyMissingDeleter().format({ name: memberName }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.propertyMissingDeleter().format({ + name: memberName, + }) + ); + } isTypeValid = false; return undefined; } @@ -4361,15 +5096,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Infer return types before specializing. Otherwise a generic inferred // return type won't be properly specialized. - if (isFunction(accessMethodType)) { - getFunctionEffectiveReturnType(accessMethodType); - } else if (isOverloadedFunction(accessMethodType)) { - accessMethodType.overloads.forEach((overload) => { - getFunctionEffectiveReturnType(overload); - }); - } + inferReturnTypeIfNecessary(accessMethodType); accessMethodType = partiallySpecializeType(accessMethodType, memberInfo.classType); + + // If the property is being accessed from a protocol class (not an instance), + // flag this as an error because a property within a protocol is meant to be + // interpreted as a read-only attribute rather than a protocol, so accessing + // it directly from the class has an ambiguous meaning. + if ( + (flags & MemberAccessFlags.AccessClassMembersOnly) !== 0 && + ClassType.isProtocolClass(baseTypeClass) + ) { + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.propertyAccessFromProtocolClass()); + } + isTypeValid = false; + } } if ( @@ -4385,13 +5128,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let bindToClass: ClassType | undefined; // The "bind-to" class depends on whether the descriptor is defined - // on the metaclass or the class. - if (TypeBase.isInstantiable(subtype)) { + // on the metaclass or the class. We handle properties specially here + // because of the way we model the __get__ logic in the property class. + if (ClassType.isPropertyClass(concreteSubtype) && !isAccessedThroughMetaclass) { + if (memberInfo && isInstantiableClass(memberInfo.classType)) { + bindToClass = memberInfo.classType; + } + } else { if (isInstantiableClass(accessMethod.classType)) { bindToClass = accessMethod.classType; } - } else if (memberInfo && isInstantiableClass(memberInfo.classType)) { - bindToClass = memberInfo.classType; } const boundMethodType = bindFunctionToClassOrObject( @@ -4401,22 +5147,51 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions errorNode, /* recursionCount */ undefined, /* treatConstructorAsClassMember */ undefined, - isAccessedThroughMetaclass ? subtype : undefined + isAccessedThroughMetaclass ? concreteSubtype : undefined ); if ( boundMethodType && (isFunction(boundMethodType) || isOverloadedFunction(boundMethodType)) ) { + const typeVarMap = new TypeVarMap(getTypeVarScopeId(boundMethodType)); + if (bindToClass) { + typeVarMap.addSolveForScope(getTypeVarScopeId(bindToClass)); + } + const callResult = validateCallArguments( errorNode, argList, boundMethodType, - /* typeVarMap */ undefined, + typeVarMap, /* skipUnknownArgCheck */ true ); if (callResult.argumentErrors) { + if (usage.method === 'set') { + if ( + usage.setType && + isFunction(boundMethodType) && + boundMethodType.details.parameters.length >= 2 + ) { + const setterType = FunctionType.getEffectiveParameterType( + boundMethodType, + 1 + ); + + diag?.addMessage( + Localizer.DiagnosticAddendum.typeIncompatible().format({ + destType: printType(setterType), + sourceType: printType(usage.setType), + }) + ); + } else if (isOverloadedFunction(boundMethodType)) { + diag?.addMessage( + Localizer.Diagnostic.noOverload().format({ name: accessMethodName }) + ); + } + } + isTypeValid = false; return AnyType.create(); } @@ -4430,19 +5205,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return undefined; }); + // Determine if we're calling __set__ on an asymmetric descriptor or property. + if (usage.method === 'set' && isClass(accessMethod.classType)) { + if (isAsymmetricDescriptorClass(accessMethod.classType)) { + isAsymmetricDescriptor = true; + } + } + if (returnType) { return returnType; } } } } - } else if (isFunction(subtype) || isOverloadedFunction(subtype)) { + } else if (isFunction(concreteSubtype) || isOverloadedFunction(concreteSubtype)) { // If this function is an instance member (e.g. a lambda that was // assigned to an instance variable), don't perform any binding. if (!isAccessedThroughObject || (memberInfo && !memberInfo.isInstanceMember)) { return bindFunctionToClassOrObject( isAccessedThroughObject ? ClassType.cloneAsInstance(baseTypeClass) : baseTypeClass, - subtype, + concreteSubtype, memberInfo && isInstantiableClass(memberInfo.classType) ? memberInfo.classType : undefined, errorNode, /* recursionCount */ undefined, @@ -4455,11 +5237,48 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (usage.method === 'set') { if (memberInfo?.symbol.isClassVar()) { if (flags & MemberAccessFlags.DisallowClassVarWrites) { - diag.addMessage(Localizer.DiagnosticAddendum.memberSetClassVar().format({ name: memberName })); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.memberSetClassVar().format({ name: memberName }) + ); + } + isTypeValid = false; return undefined; } } + // Check for an attempt to overwrite a final member variable. + const finalTypeDecl = memberInfo?.symbol + .getDeclarations() + .find((decl) => isFinalVariableDeclaration(decl)); + + if (finalTypeDecl && !ParseTreeUtils.isNodeContainedWithin(errorNode, finalTypeDecl.node)) { + // If a Final instance variable is declared in the class body but is + // being assigned within an __init__ method, it's allowed. + const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(errorNode); + if (!enclosingFunctionNode || enclosingFunctionNode.name.value !== '__init__') { + if (diag) { + diag.addMessage(Localizer.Diagnostic.finalReassigned().format({ name: memberName })); + } + isTypeValid = false; + return undefined; + } + } + + // Check for an attempt to overwrite an instance variable that is + // read-only (e.g. in a named tuple). + if ( + memberInfo?.isInstanceMember && + isClass(memberInfo.classType) && + ClassType.isReadOnlyInstanceVariables(memberInfo.classType) + ) { + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.readOnlyAttribute().format({ name: memberName })); + } + isTypeValid = false; + return undefined; + } + let enforceTargetType = false; if (memberInfo && memberInfo.symbol.hasTypedDeclarations()) { @@ -4476,16 +5295,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (enforceTargetType) { - let effectiveType = subtype; + let effectiveType = concreteSubtype; // If the code is patching a method (defined on the class) // with an object-level function, strip the "self" parameter // off the original type. This is sometimes done for test // purposes to override standard behaviors of specific methods. if (isAccessedThroughObject) { - if (!memberInfo!.isInstanceMember && isFunction(subtype)) { - if (FunctionType.isClassMethod(subtype) || FunctionType.isInstanceMethod(subtype)) { - effectiveType = FunctionType.clone(subtype, /* stripFirstParam */ true); + if (!memberInfo!.isInstanceMember && isFunction(concreteSubtype)) { + if ( + FunctionType.isClassMethod(concreteSubtype) || + FunctionType.isInstanceMethod(concreteSubtype) + ) { + effectiveType = FunctionType.clone(concreteSubtype, /* stripFirstParam */ true); } } } @@ -4497,39 +5319,100 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return subtype; }); - return isTypeValid ? type : undefined; + if (!isTypeValid) { + return undefined; + } + + return { type, isAsymmetricDescriptor }; + } + + function isAsymmetricDescriptorClass(classType: ClassType): boolean { + // If the value has already been cached in this type, return the cached value. + if (classType.isAsymmetricDescriptor !== undefined) { + return classType.isAsymmetricDescriptor; + } + + let isAsymmetric = false; + + const getterSymbolResult = lookUpClassMember(classType, '__get__', ClassMemberLookupFlags.SkipBaseClasses); + const setterSymbolResult = lookUpClassMember(classType, '__set__', ClassMemberLookupFlags.SkipBaseClasses); + + if (!getterSymbolResult || !setterSymbolResult) { + isAsymmetric = false; + } else { + const getterType = getEffectiveTypeOfSymbol(getterSymbolResult.symbol); + const setterType = getEffectiveTypeOfSymbol(setterSymbolResult.symbol); + + // If either the setter or getter is an overload (or some other non-function type), + // conservatively assume that it's not asymmetric. + if (isFunction(getterType) && isFunction(setterType)) { + // If there's no declared return type on the getter, assume it's symmetric. + if (setterType.details.parameters.length >= 3 && getterType.details.declaredReturnType) { + const setterValueType = FunctionType.getEffectiveParameterType(setterType, 2); + const getterReturnType = FunctionType.getSpecializedReturnType(getterType) ?? UnknownType.create(); + + if (!isTypeSame(setterValueType, getterReturnType)) { + isAsymmetric = true; + } + } + } + } + + // Cache the value for next time. + classType.isAsymmetricDescriptor = isAsymmetric; + return isAsymmetric; } // Applies the __getattr__, __setattr__ or __delattr__ method if present. function applyAttributeAccessOverride( classType: ClassType, errorNode: ExpressionNode, - usage: EvaluatorUsage + usage: EvaluatorUsage, + memberName: string ): Type | undefined { if (usage.method === 'get') { // See if the class has a "__getattribute__" or "__getattr__" method. // If so, arbitrary members are supported. - const getAttribType = getTypeFromClassMember( + let getAttrType = getTypeFromClassMember( errorNode, classType, '__getattribute__', { method: 'get' }, - new DiagnosticAddendum(), - MemberAccessFlags.SkipObjectBaseClass + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipAttributeAccessOverride )?.type; - if (getAttribType && isFunction(getAttribType)) { - return getFunctionEffectiveReturnType(getAttribType); + if (!getAttrType) { + getAttrType = getTypeFromClassMember( + errorNode, + classType, + '__getattr__', + { method: 'get' }, + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipAttributeAccessOverride + )?.type; + } + + // If it's an overload, it might be based on the member name. Create + // a literal str type based on the member name and find the best overload. + if (getAttrType && isOverloadedFunction(getAttrType)) { + let nameLiteralType: Type = AnyType.create(); + if (strClassType && isInstantiableClass(strClassType)) { + nameLiteralType = ClassType.cloneWithLiteral(ClassType.cloneAsInstance(strClassType), memberName); + } + + getAttrType = getBestOverloadForArguments(errorNode, getAttrType, [ + { + argumentCategory: ArgumentCategory.Simple, + type: AnyType.create(), + }, + { + argumentCategory: ArgumentCategory.Simple, + type: nameLiteralType, + }, + ]); } - const getAttrType = getTypeFromClassMember( - errorNode, - classType, - '__getattr__', - { method: 'get' }, - new DiagnosticAddendum(), - MemberAccessFlags.SkipObjectBaseClass - )?.type; if (getAttrType && isFunction(getAttrType)) { return getFunctionEffectiveReturnType(getAttrType); } @@ -4539,8 +5422,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType, '__setattr__', { method: 'get' }, - new DiagnosticAddendum(), - MemberAccessFlags.SkipObjectBaseClass + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipAttributeAccessOverride )?.type; if (setAttrType) { // The type doesn't matter for a set usage. We just need @@ -4554,8 +5437,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType, '__detattr__', { method: 'get' }, - new DiagnosticAddendum(), - MemberAccessFlags.SkipObjectBaseClass + /* diag */ undefined, + MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipAttributeAccessOverride )?.type; if (delAttrType) { // The type doesn't matter for a delete usage. We just need @@ -4643,7 +5526,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (baseTypeSupportsIndexNarrowing) { // Before performing code flow analysis, update the cache to prevent recursion. - writeTypeCache(node, indexTypeResult.type, /* isIncomplete */ false); + writeTypeCache(node, indexTypeResult.type, flags, /* isIncomplete */ false); // See if we can refine the type based on code flow analysis. const codeFlowTypeResult = getFlowTypeOfReference( @@ -4668,12 +5551,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions indexTypeResult.isIncomplete = true; } + // Handle "Required" and "NotRequired" specially. + if ((flags & EvaluatorFlags.RequiredAllowed) !== 0) { + if (isInstantiableClass(baseTypeResult.type)) { + if (ClassType.isBuiltIn(baseTypeResult.type, 'Required')) { + indexTypeResult.isRequired = true; + } else if (ClassType.isBuiltIn(baseTypeResult.type, 'NotRequired')) { + indexTypeResult.isNotRequired = true; + } + } + } + return indexTypeResult; } function adjustTypeArgumentsForVariadicTypeVar( typeArgs: TypeResult[], - typeParameters: TypeVarType[] + typeParameters: TypeVarType[], + errorNode: ExpressionNode ): TypeResult[] { const variadicIndex = typeParameters.findIndex((param) => isVariadicTypeVar(param)); @@ -4681,39 +5576,71 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // param at the end of the list? if (variadicIndex >= 0) { if (tupleClassType && isInstantiableClass(tupleClassType)) { - const variadicTypeResults = typeArgs.slice( - variadicIndex, - variadicIndex + 1 + typeArgs.length - typeParameters.length - ); + if (variadicIndex < typeArgs.length) { + const variadicTypeResults = typeArgs.slice( + variadicIndex, + variadicIndex + 1 + typeArgs.length - typeParameters.length + ); - // If the type args consist of a lone variadic type variable, don't wrap it in a tuple. - if (variadicTypeResults.length === 1 && isVariadicTypeVar(variadicTypeResults[0].type)) { - validateVariadicTypeVarIsUnpacked(variadicTypeResults[0].type, variadicTypeResults[0].node); - } else { - variadicTypeResults.forEach((arg, index) => { - validateTypeArg(arg, /* allowEmptyTuple */ index === 0, /* allowVariadicTypeVar */ true); - }); + // If the type args consist of a lone variadic type variable, don't wrap it in a tuple. + if (variadicTypeResults.length === 1 && isVariadicTypeVar(variadicTypeResults[0].type)) { + validateVariadicTypeVarIsUnpacked(variadicTypeResults[0].type, variadicTypeResults[0].node); + } else { + variadicTypeResults.forEach((arg, index) => { + validateTypeArg(arg, { + allowEmptyTuple: index === 0, + allowVariadicTypeVar: true, + allowUnpackedTuples: true, + }); + }); - const variadicTypes: Type[] = - variadicTypeResults.length === 1 && variadicTypeResults[0].isEmptyTupleShorthand - ? [] - : variadicTypeResults.map((typeResult) => convertToInstance(typeResult.type)); + const variadicTypes: TupleTypeArgument[] = []; + if (variadicTypeResults.length !== 1 || !variadicTypeResults[0].isEmptyTupleShorthand) { + variadicTypeResults.forEach((typeResult) => { + if (isUnpackedClass(typeResult.type) && typeResult.type.tupleTypeArguments) { + variadicTypes.push(...typeResult.type.tupleTypeArguments); + } else { + variadicTypes.push({ + type: convertToInstance(typeResult.type), + isUnbounded: false, + }); + } + }); + } - const tupleObject = convertToInstance( - specializeTupleClass( - tupleClassType, - variadicTypes, - /* isTypeArgumentExplicit */ true, - /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true - ) - ); + const tupleObject = convertToInstance( + specializeTupleClass( + tupleClassType, + variadicTypes, + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true, + /* isUnpackedTuple */ true + ) + ); - typeArgs = [ - ...typeArgs.slice(0, variadicIndex), - { node: typeArgs[variadicIndex].node, type: tupleObject }, - ...typeArgs.slice(variadicIndex + 1 + typeArgs.length - typeParameters.length, typeArgs.length), - ]; + typeArgs = [ + ...typeArgs.slice(0, variadicIndex), + { node: typeArgs[variadicIndex].node, type: tupleObject }, + ...typeArgs.slice( + variadicIndex + 1 + typeArgs.length - typeParameters.length, + typeArgs.length + ), + ]; + } + } else { + // Add an empty tuple that maps to the TypeVarTuple type parameter. + typeArgs.push({ + node: errorNode, + type: convertToInstance( + specializeTupleClass( + tupleClassType, + [], + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true, + /* isUnpackedTuple */ true + ) + ), + }); } } } @@ -4737,69 +5664,149 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } - function getTypeFromIndexWithBaseType( + // Handles index expressions that are providing type arguments for a + // generic type alias. + function createSpecializedTypeAlias( node: IndexNode, baseType: Type, - usage: EvaluatorUsage, flags: EvaluatorFlags - ): TypeResult { - // Handle the case where we're specializing a generic type alias. + ): TypeResult | undefined { if ( - baseType.typeAliasInfo?.typeParameters && - baseType.typeAliasInfo.typeParameters.length > 0 && - !baseType.typeAliasInfo.typeArguments + !baseType.typeAliasInfo?.typeParameters || + (baseType.typeAliasInfo.typeParameters.length === 0 && baseType.typeAliasInfo.typeArguments) ) { - const typeParameters = baseType.typeAliasInfo.typeParameters; - const typeArgs = adjustTypeArgumentsForVariadicTypeVar(getTypeArgs(node, flags), typeParameters); + return undefined; + } + + // If this is not instantiable, then the index expression isn't a specialization. + if (!TypeBase.isInstantiable(baseType)) { + return undefined; + } + + // If this is already specialized, the index expression isn't a specialization. + if (baseType.typeAliasInfo.typeArguments) { + return undefined; + } + const typeParameters = baseType.typeAliasInfo.typeParameters; + let typeArgs = adjustTypeArgumentsForVariadicTypeVar(getTypeArgs(node, flags), typeParameters, node); + + // PEP 612 says that if the class has only one type parameter consisting + // of a ParamSpec, the list of arguments does not need to be enclosed in + // a list. We'll handle that case specially here. Presumably this applies to + // type aliases as well. + if (typeParameters.length === 1 && typeParameters[0].details.isParamSpec && typeArgs) { if ( - typeArgs.length > typeParameters.length && - !typeParameters.some((typeVar) => typeVar.details.isVariadic) + typeArgs.every( + (typeArg) => !isEllipsisType(typeArg.type) && !typeArg.typeList && !isParamSpec(typeArg.type) + ) ) { - addError( - Localizer.Diagnostic.typeArgsTooMany().format({ - name: printType(baseType), - expected: typeParameters.length, - received: typeArgs.length, - }), - typeArgs[typeParameters.length].node - ); + typeArgs = [ + { + type: UnknownType.create(), + node: typeArgs[0].node, + typeList: typeArgs, + }, + ]; } + } - const typeVarMap = new TypeVarMap(baseType.typeAliasInfo.typeVarScopeId); - const diag = new DiagnosticAddendum(); - typeParameters.forEach((param, index) => { - const typeArgType: Type = - index < typeArgs.length ? convertToInstance(typeArgs[index].type) : UnknownType.create(); - canAssignTypeToTypeVar(param, typeArgType, diag, typeVarMap); - }); + if (typeArgs.length > typeParameters.length && !typeParameters.some((typeVar) => typeVar.details.isVariadic)) { + addError( + Localizer.Diagnostic.typeArgsTooMany().format({ + name: printType(baseType), + expected: typeParameters.length, + received: typeArgs.length, + }), + typeArgs[typeParameters.length].node + ); + } - if (!diag.isEmpty()) { - addError( - Localizer.Diagnostic.typeNotSpecializable().format({ type: printType(baseType) }) + - diag.getString(), - node - ); - } + const typeVarMap = new TypeVarMap(baseType.typeAliasInfo.typeVarScopeId); + const diag = new DiagnosticAddendum(); + typeParameters.forEach((param, index) => { + if (param.details.isParamSpec && index < typeArgs.length) { + if (typeArgs[index].typeList) { + const functionType = FunctionType.createInstantiable('', '', '', FunctionTypeFlags.ParamSpecValue); + TypeBase.setSpecialForm(functionType); + typeArgs[index].typeList!.forEach((paramType, paramIndex) => { + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + name: `__p${paramIndex}`, + isNameSynthesized: true, + type: convertToInstance(paramType.type), + hasDeclaredType: true, + }); + }); - const aliasTypeArgs: Type[] = []; - baseType.typeAliasInfo.typeParameters?.forEach((typeParam) => { - const typeVarType = isParamSpec(typeParam) - ? typeVarMap.getParamSpec(typeParam)?.paramSpec - : typeVarMap.getTypeVarType(typeParam); - aliasTypeArgs.push(typeVarType || UnknownType.create()); - }); + canAssignTypeToTypeVar(param, functionType, diag, typeVarMap); + } else if (isParamSpec(typeArgs[index].type)) { + canAssignTypeToTypeVar(param, convertToInstance(typeArgs[index].type), diag, typeVarMap); + } else if (isEllipsisType(typeArgs[index].type)) { + const functionType = FunctionType.createInstantiable( + '', + '', + '', + FunctionTypeFlags.ParamSpecValue | FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck + ); + TypeBase.setSpecialForm(functionType); + FunctionType.addDefaultParameters(functionType); + canAssignTypeToTypeVar(param, functionType, diag, typeVarMap); + } else { + addError(Localizer.Diagnostic.typeArgListExpected(), typeArgs[index].node); + } + } else { + if (index < typeArgs.length && typeArgs[index].typeList) { + addError(Localizer.Diagnostic.typeArgListNotAllowed(), typeArgs[index].node); + } - const type = TypeBase.cloneForTypeAlias( - applySolvedTypeVars(baseType, typeVarMap), - baseType.typeAliasInfo.name, - baseType.typeAliasInfo.fullName, - baseType.typeAliasInfo.typeVarScopeId, - baseType.typeAliasInfo.typeParameters, - aliasTypeArgs + const typeArgType: Type = + index < typeArgs.length ? convertToInstance(typeArgs[index].type) : UnknownType.create(); + canAssignTypeToTypeVar(param, typeArgType, diag, typeVarMap); + } + }); + + if (!diag.isEmpty()) { + addError( + Localizer.Diagnostic.typeNotSpecializable().format({ type: printType(baseType) }) + diag.getString(), + node ); + } - return { type, node }; + const aliasTypeArgs: Type[] = []; + baseType.typeAliasInfo.typeParameters?.forEach((typeParam) => { + let typeVarType: Type | undefined; + if (isParamSpec(typeParam)) { + const paramSpecValue = typeVarMap.getParamSpec(typeParam); + typeVarType = paramSpecValue ? convertParamSpecValueToType(paramSpecValue) : UnknownType.create(); + } else { + typeVarType = typeVarMap.getTypeVarType(typeParam); + } + aliasTypeArgs.push(typeVarType || UnknownType.create()); + }); + + const type = TypeBase.cloneForTypeAlias( + applySolvedTypeVars(baseType, typeVarMap), + baseType.typeAliasInfo.name, + baseType.typeAliasInfo.fullName, + baseType.typeAliasInfo.typeVarScopeId, + baseType.typeAliasInfo.typeParameters, + aliasTypeArgs + ); + + return { type, node }; + } + + function getTypeFromIndexWithBaseType( + node: IndexNode, + baseType: Type, + usage: EvaluatorUsage, + flags: EvaluatorFlags + ): TypeResult { + // Handle the case where we're specializing a generic type alias. + const typeAliasResult = createSpecializedTypeAlias(node, baseType, flags); + if (typeAliasResult) { + return typeAliasResult; } if (isTypeAliasPlaceholder(baseType)) { @@ -4856,7 +5863,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions getIndexAccessMagicMethodName(usage), /* usage */ undefined, /* diag */ undefined, - /* memberAccessFlags */ MemberAccessFlags.ConsiderMetaclassOnly + MemberAccessFlags.SkipAttributeAccessOverride | MemberAccessFlags.ConsiderMetaclassOnly ); if (itemMethodType) { return getTypeFromIndexedObjectOrClass(node, concreteSubtype, usage).type; @@ -4903,12 +5910,29 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'Annotated'); const hasCustomClassGetItem = isInstantiableClass(concreteSubtype) && ClassType.hasCustomClassGetItem(concreteSubtype); - - let typeArgs = getTypeArgs(node, flags, isAnnotatedClass, hasCustomClassGetItem); + const isGenericClass = + concreteSubtype.details.typeParameters?.length > 0 || + ClassType.isSpecialBuiltIn(concreteSubtype) || + ClassType.isBuiltIn(concreteSubtype, 'type') || + ClassType.isPartiallyConstructed(concreteSubtype); + const isFinalAnnotation = + isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'Final'); + const isClassVarAnnotation = + isInstantiableClass(concreteSubtype) && ClassType.isBuiltIn(concreteSubtype, 'ClassVar'); + + let typeArgs = getTypeArgs( + node, + flags, + isAnnotatedClass, + hasCustomClassGetItem || !isGenericClass, + isFinalAnnotation, + isClassVarAnnotation + ); if (!isAnnotatedClass) { typeArgs = adjustTypeArgumentsForVariadicTypeVar( typeArgs, - concreteSubtype.details.typeParameters + concreteSubtype.details.typeParameters, + node ); } @@ -4918,6 +5942,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return concreteSubtype; } + if (concreteSubtype.typeArguments) { + addError( + Localizer.Diagnostic.classAlreadySpecialized().format({ + type: printType(convertToInstance(concreteSubtype), /* expandTypeAlias */ true), + }), + node.baseExpression + ); + return concreteSubtype; + } + return createSpecializedClassType(concreteSubtype, typeArgs, flags, node); } @@ -4933,7 +5967,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return UnknownType.create(); } - if (isNone(concreteSubtype)) { + if (isNoneInstance(concreteSubtype)) { addDiagnostic( AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.reportOptionalSubscript, DiagnosticRule.reportOptionalSubscript, @@ -4961,11 +5995,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // In case we didn't walk the list items above, do so now. // If we have, this information will be cached. node.items.forEach((item) => { - getTypeOfExpression( - item.valueExpression, - /* expectedType */ undefined, - flags & EvaluatorFlags.AllowForwardReferences - ); + if (!isTypeCached(item.valueExpression)) { + getTypeOfExpression( + item.valueExpression, + /* expectedType */ undefined, + flags & EvaluatorFlags.AllowForwardReferences + ); + } }); return { type, node, isIncomplete }; @@ -4973,15 +6009,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function makeTupleObject(entryTypes: Type[], isUnspecifiedLength = false) { if (tupleClassType && isInstantiableClass(tupleClassType)) { - if (isUnspecifiedLength) { - return convertToInstance( - specializeTupleClass(tupleClassType, [ - combineTypes(entryTypes), - AnyType.create(/* isEllipsis */ true), - ]) - ); - } - return convertToInstance(specializeTupleClass(tupleClassType, entryTypes)); + return convertToInstance( + specializeTupleClass( + tupleClassType, + entryTypes.map((t) => { + return { type: t, isUnbounded: isUnspecifiedLength }; + }) + ) + ); } return UnknownType.create(); @@ -5009,14 +6044,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const magicMethodName = getIndexAccessMagicMethodName(usage); const itemMethodType = isClassInstance(baseType) - ? getTypeFromObjectMember(node, baseType, magicMethodName)?.type + ? getTypeFromObjectMember( + node, + baseType, + magicMethodName, + /* usage */ undefined, + /* diag */ undefined, + MemberAccessFlags.SkipAttributeAccessOverride + )?.type : getTypeFromClassMember( node, baseType, magicMethodName, /* usage */ undefined, /* diag */ undefined, - /* memberAccessFlags */ MemberAccessFlags.ConsiderMetaclassOnly + MemberAccessFlags.SkipAttributeAccessOverride | MemberAccessFlags.ConsiderMetaclassOnly )?.type; if (!itemMethodType) { @@ -5047,25 +6089,28 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const index0Expr = node.items[0].valueExpression; const valueType = getTypeOfExpression(index0Expr).type; - if (isClassInstance(valueType) && ClassType.isBuiltIn(valueType, 'int') && isLiteralType(valueType)) { - const indexValue = valueType.literalValue as number; + if ( + isClassInstance(valueType) && + ClassType.isBuiltIn(valueType, 'int') && + isLiteralType(valueType) && + typeof valueType.literalValue === 'number' + ) { + const indexValue = valueType.literalValue; const tupleType = getSpecializedTupleType(baseType); - if (tupleType && tupleType.tupleTypeArguments) { - if (isOpenEndedTupleClass(tupleType)) { - return { node, type: tupleType.tupleTypeArguments[0] }; - } else if (indexValue >= 0 && indexValue < tupleType.tupleTypeArguments.length) { - return { node, type: tupleType.tupleTypeArguments[indexValue] }; + if (tupleType && tupleType.tupleTypeArguments && !isUnboundedTupleClass(tupleType)) { + if (indexValue >= 0 && indexValue < tupleType.tupleTypeArguments.length) { + return { node, type: tupleType.tupleTypeArguments[indexValue].type }; } else if (indexValue < 0 && tupleType.tupleTypeArguments.length + indexValue >= 0) { return { node, - type: tupleType.tupleTypeArguments[tupleType.tupleTypeArguments.length + indexValue], + type: tupleType.tupleTypeArguments[tupleType.tupleTypeArguments.length + indexValue].type, }; } } } else if (isClassInstance(valueType) && ClassType.isBuiltIn(valueType, 'slice')) { const tupleType = getSpecializedTupleType(baseType); - if (tupleType && tupleType.tupleTypeArguments && !isOpenEndedTupleClass(tupleType)) { + if (tupleType && tupleType.tupleTypeArguments && !isUnboundedTupleClass(tupleType)) { if (index0Expr.nodeType === ParseNodeType.Slice && !index0Expr.stepValue) { // Create a local helper function to evaluate the slice parameters. const getSliceParameter = (expression: ExpressionNode | undefined, defaultValue: number) => { @@ -5075,9 +6120,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if ( isClassInstance(valType) && ClassType.isBuiltIn(valType, 'int') && - isLiteralType(valType) + isLiteralType(valType) && + typeof valType.literalValue === 'number' ) { - value = valType.literalValue as number; + value = valType.literalValue; if (value < 0) { value = tupleType.tupleTypeArguments!.length + value; } @@ -5236,17 +6282,29 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node: IndexNode, flags: EvaluatorFlags, isAnnotatedClass = false, - hasCustomClassGetItem = false + hasCustomClassGetItem = false, + isFinalAnnotation = false, + isClassVarAnnotation = false ): TypeResult[] { const typeArgs: TypeResult[] = []; - let adjFlags = - flags & - ~( + let adjFlags = flags; + + if (isFinalAnnotation || isClassVarAnnotation) { + adjFlags |= EvaluatorFlags.ClassVarDisallowed | EvaluatorFlags.FinalDisallowed; + } else { + adjFlags &= ~( EvaluatorFlags.DoNotSpecialize | EvaluatorFlags.ParamSpecDisallowed | - EvaluatorFlags.TypeVarTupleDisallowed + EvaluatorFlags.TypeVarTupleDisallowed | + EvaluatorFlags.RequiredAllowed ); - adjFlags |= EvaluatorFlags.ClassVarDisallowed; + + if (!isAnnotatedClass) { + adjFlags |= EvaluatorFlags.ClassVarDisallowed | EvaluatorFlags.FinalDisallowed; + } + + adjFlags |= EvaluatorFlags.AllowUnpackedTupleOrTypeVarTuple; + } // Create a local function that validates a single type argument. const getTypeArgTypeResult = (expr: ExpressionNode, argIndex: number) => { @@ -5282,20 +6340,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.items[0].valueExpression.expressions.forEach((item, index) => { typeArgs.push(getTypeArgTypeResult(item, index)); }); + + // Set the node's type so it isn't reevaluated later. + setTypeForNode(node.items[0].valueExpression); } else { node.items.forEach((arg, index) => { const typeResult = getTypeArgTypeResult(arg.valueExpression, index); if (arg.argumentCategory !== ArgumentCategory.Simple) { - if ( - arg.argumentCategory === ArgumentCategory.UnpackedList && - isVariadicTypeVar(typeResult.type) && - !typeResult.type.isVariadicUnpacked - ) { - typeResult.type = TypeVarType.cloneForUnpacked(typeResult.type); - } else { - addError(Localizer.Diagnostic.unpackedArgInTypeArgument(), arg.valueExpression); - typeResult.type = UnknownType.create(); + if (arg.argumentCategory === ArgumentCategory.UnpackedList) { + if (isVariadicTypeVar(typeResult.type) && !typeResult.type.isVariadicUnpacked) { + typeResult.type = TypeVarType.cloneForUnpacked(typeResult.type); + } else if ( + isInstantiableClass(typeResult.type) && + !typeResult.type.includeSubclasses && + isTupleClass(typeResult.type) + ) { + typeResult.type = ClassType.cloneForUnpacked(typeResult.type); + } } } @@ -5318,9 +6380,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions EvaluatorFlags.ExpectingType | EvaluatorFlags.ExpectingTypeAnnotation | EvaluatorFlags.ConvertEllipsisToAny | - EvaluatorFlags.EvaluateStringLiteralAsType | - EvaluatorFlags.FinalDisallowed | - EvaluatorFlags.ClassVarDisallowed; + EvaluatorFlags.EvaluateStringLiteralAsType; const fileInfo = AnalyzerNodeInfo.getFileInfo(node); if (fileInfo.isStubFile) { @@ -5333,6 +6393,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeList: node.entries.map((entry) => getTypeOfExpression(entry, undefined, adjustedFlags)), node, }; + + // Set the node's type so it isn't reevaluated later. + setTypeForNode(node); } else { typeResult = getTypeOfExpression(node, /* expectedType */ undefined, adjustedFlags); @@ -5341,9 +6404,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions addError(Localizer.Diagnostic.protocolNotAllowedInTypeArgument(), node); } - // "ClassVar" is not allowed as a type argument. - if (isClass(typeResult.type) && ClassType.isBuiltIn(typeResult.type, 'ClassVar')) { - addError(Localizer.Diagnostic.protocolNotAllowedInTypeArgument(), node); + if ((flags & EvaluatorFlags.ClassVarDisallowed) !== 0) { + // "ClassVar" is not allowed as a type argument. + if (isClass(typeResult.type) && ClassType.isBuiltIn(typeResult.type, 'ClassVar')) { + addError(Localizer.Diagnostic.classVarNotAllowed(), node); + } } } @@ -5358,17 +6423,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If the expected type is a union, recursively call for each of the subtypes // to find one that matches. let effectiveExpectedType = expectedType; + let expectedTypeContainsAny = expectedType && isAny(expectedType); if (expectedType && isUnion(expectedType)) { let matchingSubtype: Type | undefined; doForEachSubtype(expectedType, (subtype) => { + if (isAny(subtype)) { + expectedTypeContainsAny = true; + } + if (!matchingSubtype) { const subtypeResult = useSpeculativeMode(node, () => { return getTypeFromTupleExpected(node, subtype); }); - if (subtypeResult) { + if (subtypeResult && canAssignType(subtype, subtypeResult.type)) { matchingSubtype = subtype; } } @@ -5384,7 +6454,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - return getTypeFromTupleInferred(node, /* useAny */ !!expectedType); + const resultType = getTypeFromTupleInferred(node); + + // If there was an expected type of Any, replace the resulting type + // with Any rather than return a type with unknowns. + if (expectedTypeContainsAny) { + resultType.type = AnyType.create(); + } + + return resultType; } function getTypeFromTupleExpected(node: TupleNode, expectedType: Type): TypeResult | undefined { @@ -5398,27 +6476,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Build an array of expected types. - const expectedTypes: Type[] = []; + let expectedTypes: Type[] = []; if (isTupleClass(expectedType) && expectedType.tupleTypeArguments) { - // Is this a homogeneous tuple of indeterminate length? If so, - // match the number of expected types to the number of entries - // in the tuple expression. - if (isOpenEndedTupleClass(expectedType)) { - const homogenousType = transformPossibleRecursiveTypeAlias(expectedType.tupleTypeArguments[0]); - for (let i = 0; i < node.expressions.length; i++) { - expectedTypes.push(homogenousType); + expectedTypes = expectedType.tupleTypeArguments.map((t) => transformPossibleRecursiveTypeAlias(t.type)); + const unboundedIndex = expectedType.tupleTypeArguments.findIndex((t) => t.isUnbounded); + if (unboundedIndex >= 0) { + if (expectedTypes.length > node.expressions.length) { + expectedTypes.splice(unboundedIndex, 1); + } else { + while (expectedTypes.length < node.expressions.length) { + expectedTypes.splice(unboundedIndex, 0, expectedTypes[unboundedIndex]); + } } - } else { - expectedType.tupleTypeArguments.forEach((typeArg) => { - expectedTypes.push(transformPossibleRecursiveTypeAlias(typeArg)); - }); } } else { const tupleTypeVarMap = new TypeVarMap(getTypeVarScopeId(tupleClassType)); if ( !populateTypeVarMapBasedOnExpectedType( - tupleClassType, + ClassType.cloneAsInstance(tupleClassType), expectedType, tupleTypeVarMap, getTypeVarScopesForNode(node) @@ -5456,10 +6532,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type, node }; } - function getTypeFromTupleInferred(node: TupleNode, useAny: boolean): TypeResult { - const entryTypeResults = node.expressions.map((expr) => - getTypeOfExpression(expr, useAny ? AnyType.create() : undefined) - ); + function getTypeFromTupleInferred(node: TupleNode): TypeResult { + const entryTypeResults = node.expressions.map((expr) => getTypeOfExpression(expr)); const isIncomplete = entryTypeResults.some((result) => result.isIncomplete); if (!tupleClassType || !isInstantiableClass(tupleClassType)) { @@ -5471,38 +6545,45 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type, node, isIncomplete }; } - function buildTupleTypesList(entryTypeResults: TypeResult[]): Type[] { - const entryTypes: Type[] = []; - let isOpenEnded = false; + function buildTupleTypesList(entryTypeResults: TypeResult[]): TupleTypeArgument[] { + const entryTypes: TupleTypeArgument[] = []; for (const typeResult of entryTypeResults) { + let possibleUnpackedTuple: Type | undefined; if (typeResult.unpackedType) { - // Is this an unpacked tuple? If so, we can append the individual - // unpacked entries onto the new tuple. If it's not an upacked tuple - // but some other iterator (e.g. a List), we won't know the number of - // items, so we'll need to leave the Tuple open-ended. - if (isClassInstance(typeResult.unpackedType) && isTupleClass(typeResult.unpackedType)) { - const typeArgs = typeResult.unpackedType.tupleTypeArguments; - - // If the Tuple wasn't specialized or has a "..." type parameter, we can't - // make any determination about its contents. - if (!typeArgs || isOpenEndedTupleClass(typeResult.unpackedType)) { - entryTypes.push(typeResult.type); - isOpenEnded = true; - } else { - entryTypes.push(...typeArgs); - } + possibleUnpackedTuple = typeResult.unpackedType; + } else if (isUnpacked(typeResult.type)) { + possibleUnpackedTuple = typeResult.type; + } + + // Is this an unpacked tuple? If so, we can append the individual + // unpacked entries onto the new tuple. If it's not an upacked tuple + // but some other iterator (e.g. a List), we won't know the number of + // items, so we'll need to leave the Tuple open-ended. + if ( + possibleUnpackedTuple && + isClassInstance(possibleUnpackedTuple) && + possibleUnpackedTuple.tupleTypeArguments + ) { + const typeArgs = possibleUnpackedTuple.tupleTypeArguments; + + if (!typeArgs) { + entryTypes.push({ type: UnknownType.create(), isUnbounded: true }); } else { - entryTypes.push(typeResult.type); - isOpenEnded = true; + entryTypes.push(...typeArgs); } } else { - entryTypes.push(typeResult.type); + entryTypes.push({ type: typeResult.type, isUnbounded: !!typeResult.unpackedType }); } } - if (isOpenEnded) { - return [combineTypes(entryTypes), AnyType.create(/* isEllipsis */ true)]; + // If there are multiple unbounded entries, combine all of them into a single + // unbounded entry to avoid violating the invariant that there can be at most + // one unbounded entry in a tuple. + if (entryTypes.filter((t) => t.isUnbounded).length > 1) { + const firstUnboundedEntryIndex = entryTypes.findIndex((t) => t.isUnbounded); + const removedEntries = entryTypes.splice(firstUnboundedEntryIndex); + entryTypes.push({ type: combineTypes(removedEntries.map((t) => t.type)), isUnbounded: true }); } return entryTypes; @@ -5536,16 +6617,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.leftExpression.nodeType === ParseNodeType.Name && node.leftExpression.value === 'reveal_type' ) { - if ( - node.arguments.length === 1 && - node.arguments[0].argumentCategory === ArgumentCategory.Simple && - node.arguments[0].name === undefined - ) { - // Handle the special-case "reveal_type" call. - returnResult.type = getTypeFromRevealType(node); - } else { - addError(Localizer.Diagnostic.revealTypeArgs(), node); - } + // Handle the implicit "reveal_type" call. + returnResult = getTypeFromRevealType(node, expectedType); + } else if (isFunction(baseTypeResult.type) && baseTypeResult.type.details.builtInName === 'reveal_type') { + // Handle the "typing.reveal_type" call. + returnResult = getTypeFromRevealType(node, expectedType); } else if ( isAnyOrUnknown(baseTypeResult.type) && node.leftExpression.nodeType === ParseNodeType.Name && @@ -5569,12 +6645,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions returnResult.type = callResult.returnType || UnknownType.create(); - // If some of the subtypes have NoReturn and others don't remove - // the NoReturn type from the union. - if (isUnion(returnResult.type)) { - returnResult.type = removeNoReturnFromUnion(returnResult.type); - } - if (callResult.argumentErrors) { returnResult.typeErrors = true; @@ -5612,9 +6682,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions AnalyzerNodeInfo.getFileInfo(node).isTypingStubFile; if (!isCyclicalTypeVarCall) { - argList.forEach((arg, index) => { - if (arg.node!.valueExpression.nodeType !== ParseNodeType.StringList) { - getTypeForArgument(arg); + argList.forEach((arg) => { + if ( + arg.valueExpression && + arg.valueExpression.nodeType !== ParseNodeType.StringList && + !isTypeCached(arg.valueExpression) + ) { + getTypeOfExpression(arg.valueExpression); } }); } @@ -5622,23 +6696,86 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return returnResult; } - function getTypeFromRevealType(node: CallNode) { - const type = getTypeOfExpression(node.arguments[0].valueExpression).type; - const exprString = ParseTreeUtils.printExpression(node.arguments[0].valueExpression); + function getTypeFromRevealType(node: CallNode, expectedType: Type | undefined): TypeResult { + let arg0Value: ExpressionNode | undefined; + let expectedRevealTypeNode: ExpressionNode | undefined; + let expectedRevealType: Type | undefined; + let expectedTextNode: ExpressionNode | undefined; + let expectedText: string | undefined; + + // Make sure there is only one positional argument passed as arg 0. + node.arguments.forEach((arg, index) => { + if (index === 0) { + if (arg.argumentCategory === ArgumentCategory.Simple && !arg.name) { + arg0Value = arg.valueExpression; + } + } else if (arg.argumentCategory !== ArgumentCategory.Simple || !arg.name) { + arg0Value = undefined; + } else if (arg.name.value === 'expected_text') { + expectedTextNode = arg.valueExpression; + const expectedTextType = getTypeOfExpression(arg.valueExpression).type; + + if ( + !isClassInstance(expectedTextType) || + !ClassType.isBuiltIn(expectedTextType, 'str') || + typeof expectedTextType.literalValue !== 'string' + ) { + addError(Localizer.Diagnostic.revealTypeExpectedTextArg(), arg.valueExpression); + } else { + expectedText = expectedTextType.literalValue; + } + } else if (arg.name.value === 'expected_type') { + expectedRevealTypeNode = arg.valueExpression; + expectedRevealType = convertToInstance(getTypeForArgumentExpectingType(arg).type); + } + }); + + if (!arg0Value) { + addError(Localizer.Diagnostic.revealTypeArgs(), node); + return { node, type: UnknownType.create() }; + } + + const typeResult = getTypeOfExpression(arg0Value, expectedType); + const type = typeResult.type; + + const exprString = ParseTreeUtils.printExpression(arg0Value); const typeString = printType(type, /* expandTypeAlias */ true); + + if (expectedText !== undefined) { + if (expectedText !== typeString) { + addError( + Localizer.Diagnostic.revealTypeExpectedTextMismatch().format({ + expected: expectedText, + received: typeString, + }), + expectedTextNode ?? arg0Value + ); + } + } + + if (expectedRevealType) { + if (!isTypeSame(expectedRevealType, type)) { + const expectedRevealTypeText = printType(expectedRevealType); + addError( + Localizer.Diagnostic.revealTypeExpectedTypeMismatch().format({ + expected: expectedRevealTypeText, + received: typeString, + }), + expectedRevealTypeNode ?? arg0Value + ); + } + } + addInformation( Localizer.DiagnosticAddendum.typeOfSymbol().format({ name: exprString, type: typeString }), node.arguments[0] ); - // Return a literal string with the type. We can use this in unit tests - // to validate the exact type. - const strType = getBuiltInType(node, 'str'); - if (isInstantiableClass(strType)) { - return ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strType, typeString)); - } - - return AnyType.create(); + return { + node, + type, + isIncomplete: typeResult.isIncomplete, + }; } function getTypeFromRevealLocals(node: CallNode) { @@ -5797,16 +6934,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // the class itself. It depends on whether the super() call is located // within an instance method or not. let resultIsInstance = true; - const enclosingMethod = ParseTreeUtils.getEnclosingFunction(node); - if (enclosingMethod) { - const methodType = getTypeOfFunction(enclosingMethod); - if (methodType) { - if ( - FunctionType.isStaticMethod(methodType.functionType) || - FunctionType.isConstructorMethod(methodType.functionType) || - FunctionType.isClassMethod(methodType.functionType) - ) { - resultIsInstance = false; + if (node.arguments.length <= 1) { + const enclosingMethod = ParseTreeUtils.getEnclosingFunction(node); + if (enclosingMethod) { + const methodType = getTypeOfFunction(enclosingMethod); + if (methodType) { + if ( + FunctionType.isStaticMethod(methodType.functionType) || + FunctionType.isConstructorMethod(methodType.functionType) || + FunctionType.isClassMethod(methodType.functionType) + ) { + resultIsInstance = false; + } } } } @@ -5831,6 +6970,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions resultIsInstance && bindToType && isInstantiableClass(bindToType) ? ClassType.cloneAsInstance(bindToType) : bindToType, + isSuperCall: true, }; } } @@ -5843,6 +6983,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (targetClassType.details.mro.some((mroBase) => isAnyOrUnknown(mroBase))) { return { type: UnknownType.create(), + isSuperCall: true, node, }; } @@ -5853,6 +6994,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isInstantiableClass(baseClassType)) { return { type: resultIsInstance ? ClassType.cloneAsInstance(baseClassType) : baseClassType, + isSuperCall: true, node, }; } @@ -5861,6 +7003,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type: UnknownType.create(), + isSuperCall: true, node, }; } @@ -5875,7 +7018,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function validateOverloadsWithExpandedTypes( errorNode: ExpressionNode, expandedArgTypes: (Type | undefined)[][], - overloads: FunctionType[], argParamMatches: MatchArgsToParamsResult[], typeVarMap: TypeVarMap | undefined, skipUnknownArgCheck: boolean, @@ -5894,8 +7036,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const argTypeOverride = expandedArgTypes[expandedTypesIndex]; const hasArgTypeOverride = argTypeOverride.some((a) => a !== undefined); - for (let overloadIndex = 0; overloadIndex < overloads.length; overloadIndex++) { - const overload = overloads[overloadIndex]; + for (let overloadIndex = 0; overloadIndex < argParamMatches.length; overloadIndex++) { + const overload = argParamMatches[overloadIndex].overload; let matchResults = argParamMatches[overloadIndex]; if (hasArgTypeOverride) { @@ -5919,10 +7061,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Use speculative mode so we don't output any diagnostics or // record any final types in the type cache. const callResult = useSpeculativeMode(errorNode, () => { - return validateFunctionArgumentTypes( + return validateFunctionArgumentTypesWithExpectedType( errorNode, matchResults, - overload, effectiveTypeVarMap, /* skipUnknownArgCheck */ true, expectedType @@ -5956,10 +7097,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions useSpeculativeMode(errorNode, () => { typeVarMap.addSolveForScope(getTypeVarScopeId(overload)); typeVarMap.unlock(); - return validateFunctionArgumentTypes( + return validateFunctionArgumentTypesWithExpectedType( errorNode, matchResults, - overload, typeVarMap, /* skipUnknownArgCheck */ true, expectedType @@ -5970,12 +7110,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // And run through the first expanded argument list one more time to // populate the type cache. - const firstExpansionOverload = matchedOverloads[0].overload; matchedOverloads[0].typeVarMap.unlock(); - const finalCallResult = validateFunctionArgumentTypes( + const finalCallResult = validateFunctionArgumentTypesWithExpectedType( errorNode, matchedOverloads[0].matchResults, - firstExpansionOverload, matchedOverloads[0].typeVarMap, skipUnknownArgCheck, expectedType @@ -5985,7 +7123,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isTypeIncomplete = true; } - return { argumentErrors: false, returnType: combineTypes(returnTypes), isTypeIncomplete }; + return { + argumentErrors: false, + returnType: combineTypes(returnTypes), + isTypeIncomplete, + specializedInitSelfType: finalCallResult.specializedInitSelfType, + }; } function getBestOverloadForArguments( @@ -5993,33 +7136,62 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type: OverloadedFunctionType, argList: FunctionArgument[] ): FunctionType | undefined { - let firstMatch: FunctionType | undefined; + let overloadIndex = 0; + let matches: MatchArgsToParamsResult[] = []; + // Create a list of potential overload matches based on arguments. type.overloads.forEach((overload) => { - if (!firstMatch) { + useSpeculativeMode(errorNode, () => { + if (FunctionType.isOverloaded(overload)) { + const matchResults = matchFunctionArgumentsToParameters( + errorNode, + argList, + overload, + overloadIndex + ); + + if (!matchResults.argumentErrors) { + matches.push(matchResults); + } + + overloadIndex++; + } + }); + }); + + matches = sortOverloadsByBestMatch(matches); + + let winningOverloadIndex: number | undefined; + + matches.forEach((match, matchIndex) => { + if (winningOverloadIndex === undefined) { useSpeculativeMode(errorNode, () => { - if (FunctionType.isOverloaded(overload)) { - const matchResults = matchFunctionArgumentsToParameters(errorNode, argList, overload); - if (!matchResults.argumentErrors) { - const callResult = validateFunctionArgumentTypes( - errorNode, - matchResults, - overload, - new TypeVarMap(getTypeVarScopeId(overload)), - /* skipUnknownArgCheck */ true, - /* expectedType */ undefined - ); + const callResult = validateFunctionArgumentTypes( + errorNode, + match, + new TypeVarMap(getTypeVarScopeId(match.overload)), + /* skipUnknownArgCheck */ true + ); - if (callResult && !callResult.argumentErrors) { - firstMatch = overload; - } - } + if (callResult && !callResult.argumentErrors) { + winningOverloadIndex = matchIndex; } }); } }); - return firstMatch; + return winningOverloadIndex === undefined ? undefined : matches[winningOverloadIndex].overload; + } + + // Sorts the list of overloads based first on "relevance" and second on order. + function sortOverloadsByBestMatch(matches: MatchArgsToParamsResult[]) { + return matches.sort((a, b) => { + if (a.relevance !== b.relevance) { + return b.relevance - a.relevance; + } + + return a.overloadIndex - b.overloadIndex; + }); } function validateOverloadedFunctionArguments( @@ -6030,8 +7202,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions skipUnknownArgCheck: boolean, expectedType: Type | undefined ): CallResult { - const filteredOverloads: FunctionType[] = []; - const filteredMatchResults: MatchArgsToParamsResult[] = []; + let filteredMatchResults: MatchArgsToParamsResult[] = []; let contextFreeArgTypes: Type[] = []; // Start by evaluating the types of the arguments without any expected @@ -6040,31 +7211,100 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // speculatively because we don't want to record any types in the type // cache or record any diagnostics at this stage. useSpeculativeMode(errorNode, () => { + let overloadIndex = 0; type.overloads.forEach((overload) => { // Consider only the functions that have the @overload decorator, // not the final function that omits the overload. This is the // intended behavior according to PEP 484. if (FunctionType.isOverloaded(overload)) { - const matchResults = matchFunctionArgumentsToParameters(errorNode, argList, overload); + const matchResults = matchFunctionArgumentsToParameters( + errorNode, + argList, + overload, + overloadIndex + ); if (!matchResults.argumentErrors) { - filteredOverloads.push(overload); filteredMatchResults.push(matchResults); } + + overloadIndex++; } }); // Also evaluate the types of each argument expression without regard to // the expectedType. We'll use this to determine whether we need to do // union expansion. - contextFreeArgTypes = argList.map((arg) => - arg.type - ? arg.type - : arg.valueExpression - ? getTypeOfExpression(arg.valueExpression).type - : AnyType.create() - ); + contextFreeArgTypes = argList.map((arg) => { + if (arg.type) { + return arg.type; + } + + if (arg.valueExpression) { + const valueExpressionNode = arg.valueExpression; + return useSpeculativeMode(valueExpressionNode, () => { + return getTypeOfExpression(valueExpressionNode).type; + }); + } + + return AnyType.create(); + }); }); + filteredMatchResults = sortOverloadsByBestMatch(filteredMatchResults); + + // If there are no possible arg/param matches among the overloads, + // emit an error that includes the argument types. + if (filteredMatchResults.length === 0) { + // Skip the error message if we're in speculative mode because it's very + // expensive, and we're going to suppress the diagnostic anyway. + if (!isDiagnosticSuppressedForNode(errorNode)) { + const functionName = type.overloads[0].details.name || ''; + const diagAddendum = new DiagnosticAddendum(); + const argTypes = argList.map((t) => printType(getTypeForArgument(t).type)); + + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.argumentTypes().format({ types: argTypes.join(', ') }) + ); + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.noOverload().format({ name: functionName }) + diagAddendum.getString(), + errorNode + ); + } + + return { argumentErrors: true, isTypeIncomplete: false }; + } + + // Create a helper lambda that evaluates the overload that matches + // the arg/param lists. + const evaluateUsingLastMatchingOverload = (skipUnknownArgCheck: boolean) => { + // Find the match with the largest overload index (i.e. the last overload + // that was in the overload list). + const lastMatch = filteredMatchResults.reduce((previous, current) => { + return current.overloadIndex > previous.overloadIndex ? current : previous; + }); + + const effectiveTypeVarMap = typeVarMap ?? new TypeVarMap(); + effectiveTypeVarMap.addSolveForScope(getTypeVarScopeId(lastMatch.overload)); + effectiveTypeVarMap.unlock(); + + return validateFunctionArgumentTypesWithExpectedType( + errorNode, + lastMatch, + effectiveTypeVarMap, + skipUnknownArgCheck, + expectedType + ); + }; + + // If there is only one possible arg/param match among the overloads, + // use the normal type matching mechanism because it is faster and + // will provide a clearer error message. + if (filteredMatchResults.length === 1) { + return evaluateUsingLastMatchingOverload(/* skipUnknownArgCheck */ false); + } + let expandedArgTypes: (Type | undefined)[][] | undefined = [argList.map((arg) => undefined)]; let isTypeIncomplete = false; @@ -6072,7 +7312,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const callResult = validateOverloadsWithExpandedTypes( errorNode, expandedArgTypes, - filteredOverloads, filteredMatchResults, typeVarMap, skipUnknownArgCheck, @@ -6101,25 +7340,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // in speculative mode because it's very expensive, and we're going to // suppress the diagnostic anyway. if (!isDiagnosticSuppressedForNode(errorNode) && !isTypeIncomplete) { - const functionName = type.overloads[0].details.name || ''; - const diagAddendum = new DiagnosticAddendum(); - const argTypes = argList.map((t) => printType(getTypeForArgument(t))); + const result = evaluateUsingLastMatchingOverload(/* skipUnknownArgCheck */ true); - diagAddendum.addMessage( - Localizer.DiagnosticAddendum.argumentTypes().format({ types: argTypes.join(', ') }) - ); - if (expandedArgTypes && expandedArgTypes.length > maxOverloadUnionExpansionCount) { - diagAddendum.addMessage(Localizer.DiagnosticAddendum.overloadTooManyUnions()); - } - addDiagnostic( - AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.noOverload().format({ name: functionName }) + diagAddendum.getString(), - errorNode - ); + // Replace the result with an unknown type since we don't know + // what overload should have been used. + result.returnType = UnknownType.create(); + return result; } - return { argumentErrors: true, isTypeIncomplete }; + return { argumentErrors: true, isTypeIncomplete: false }; } // Replaces each item in the expandedArgTypes with n items where n is @@ -6189,6 +7418,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let validatedTypes = false; let returnType: Type | undefined; let reportedErrors = false; + let isTypeIncomplete = false; + let usedMetaclassCallMethod = false; // Create a helper function that determines whether we should skip argument // validation for either __init__ or __new__. This is required for certain @@ -6207,7 +7438,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ClassType.cloneAsInstance(type), '__init__', { method: 'get' }, - new DiagnosticAddendum(), + /* diag */ undefined, MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipAttributeAccessOverride )?.type; @@ -6221,7 +7452,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeVarMap = new TypeVarMap(getTypeVarScopeId(type)); if ( populateTypeVarMapBasedOnExpectedType( - type, + ClassType.cloneAsInstance(type), expectedSubType, typeVarMap, getTypeVarScopesForNode(errorNode) @@ -6242,7 +7473,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!callResult?.argumentErrors) { // Call validateCallArguments again, this time without speculative // mode, so any errors are reported. - validateCallArguments( + const callResult = validateCallArguments( errorNode, argList, initMethodType, @@ -6250,6 +7481,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions skipUnknownArgCheck, NoneType.createInstance() ); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + return applyExpectedSubtypeForConstructor(type, expectedSubType, typeVarMap); } } @@ -6277,7 +7513,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); if (!callResult.argumentErrors) { - returnType = applyExpectedTypeForConstructor(type, /* expectedType */ undefined, typeVarMap); + let adjustedClassType = type; + if ( + callResult.specializedInitSelfType && + isClassInstance(callResult.specializedInitSelfType) && + ClassType.isSameGenericClass(callResult.specializedInitSelfType, type) + ) { + adjustedClassType = ClassType.cloneAsInstantiable(callResult.specializedInitSelfType); + } + + returnType = applyExpectedTypeForConstructor( + adjustedClassType, + /* expectedType */ undefined, + typeVarMap + ); + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } } else { reportedErrors = true; } @@ -6291,117 +7544,114 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Don't report errors for __new__ if __init__ already generated errors. They're // probably going to be entirely redundant anyway. if (!reportedErrors) { - // See if there is a custom metaclass that defines a __call__ method. If so, - // we'll assume that the __new__ method on the class is not used. const metaclass = type.details.effectiveMetaclass; - let metaclassCallMethodInfo: ClassMember | undefined; - if (metaclass && isInstantiableClass(metaclass) && !ClassType.isBuiltIn(metaclass, 'type')) { - metaclassCallMethodInfo = lookUpClassMember( + let constructorMethodInfo: ClassMemberLookup | undefined; + + // See if there's a custom `__call__` method on the metaclass. If so, we'll + // use that rather than the `__new__` method on the class. + if (metaclass && isInstantiableClass(metaclass) && !ClassType.isSameGenericClass(metaclass, type)) { + constructorMethodInfo = getTypeFromClassMemberName( + errorNode, metaclass, '__call__', - ClassMemberLookupFlags.DeclaredTypesOnly | - ClassMemberLookupFlags.SkipObjectBaseClass | - ClassMemberLookupFlags.SkipInstanceVariables + { method: 'get' }, + /* diag */ undefined, + MemberAccessFlags.ConsiderMetaclassOnly | + MemberAccessFlags.SkipTypeBaseClass | + MemberAccessFlags.SkipAttributeAccessOverride, + type ); - // We're not interested in the __call__ method on the 'type' class. - if ( - metaclassCallMethodInfo && - isInstantiableClass(metaclassCallMethodInfo.classType) && - ClassType.isBuiltIn(metaclassCallMethodInfo.classType, 'type') - ) { - metaclassCallMethodInfo = undefined; + if (constructorMethodInfo) { + usedMetaclassCallMethod = true; } } - const constructorMethodInfo = getTypeFromClassMemberName( - errorNode, - type, - '__new__', - { method: 'get' }, - new DiagnosticAddendum(), - MemberAccessFlags.AccessClassMembersOnly | - MemberAccessFlags.SkipObjectBaseClass | - MemberAccessFlags.TreatConstructorAsClassMethod, - type - ); - if ( - !metaclassCallMethodInfo && - constructorMethodInfo && - !skipConstructorCheck(constructorMethodInfo.type) - ) { - const constructorMethodType = constructorMethodInfo.type; + if (!constructorMethodInfo) { + constructorMethodInfo = getTypeFromClassMemberName( + errorNode, + type, + '__new__', + { method: 'get' }, + /* diag */ undefined, + MemberAccessFlags.AccessClassMembersOnly | + MemberAccessFlags.SkipObjectBaseClass | + MemberAccessFlags.TreatConstructorAsClassMethod, + type + ); + } + + if (constructorMethodInfo && !skipConstructorCheck(constructorMethodInfo.type)) { const typeVarMap = new TypeVarMap(getTypeVarScopeId(type)); if (type.typeAliasInfo) { typeVarMap.addSolveForScope(type.typeAliasInfo.typeVarScopeId); } - if (constructorMethodType) { - // Skip the unknown argument check if we've already checked for __init__. - const callResult = validateCallArguments( - errorNode, - argList, - constructorMethodType, - typeVarMap, - skipUnknownArgCheck - ); + typeVarMap.addSolveForScope(getTypeVarScopeId(constructorMethodInfo.type)); - if (callResult.argumentErrors) { - reportedErrors = true; - } else { - let newReturnType = callResult.returnType; - - // If the constructor returned an object whose type matches the class of - // the original type being constructed, use the return type in case it was - // specialized. If it doesn't match, we'll fall back on the assumption that - // the constructed type is an instance of the class type. We need to do this - // in cases where we're inferring the return type based on a call to - // super().__new__(). - if (newReturnType) { - if (isClassInstance(newReturnType) && ClassType.isSameGenericClass(newReturnType, type)) { - // If the specialized return type derived from the __init__ - // method is "better" than the return type provided by the - // __new__ method (where "better" means that the type arguments - // are all known), stick with the __init__ result. + // Skip the unknown argument check if we've already checked for __init__. + const callResult = validateCallArguments( + errorNode, + argList, + constructorMethodInfo.type, + typeVarMap, + skipUnknownArgCheck + ); + + if (callResult.argumentErrors) { + reportedErrors = true; + } else { + let newReturnType = callResult.returnType; + + if (callResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + // If the constructor returned an object whose type matches the class of + // the original type being constructed, use the return type in case it was + // specialized. If it doesn't match, we'll fall back on the assumption that + // the constructed type is an instance of the class type. We need to do this + // in cases where we're inferring the return type based on a call to + // super().__new__(). + if (newReturnType) { + if (isClassInstance(newReturnType) && ClassType.isSameGenericClass(newReturnType, type)) { + // If the specialized return type derived from the __init__ + // method is "better" than the return type provided by the + // __new__ method (where "better" means that the type arguments + // are all known), stick with the __init__ result. + if ( + (!isPartlyUnknown(newReturnType) && !requiresSpecialization(newReturnType)) || + returnType === undefined + ) { + // Special-case the 'tuple' type specialization to use + // the homogenous arbitrary-length form. if ( - (!isPartlyUnknown(newReturnType) && !requiresSpecialization(newReturnType)) || - returnType === undefined + isClassInstance(newReturnType) && + ClassType.isTupleClass(newReturnType) && + !newReturnType.tupleTypeArguments && + newReturnType.typeArguments && + newReturnType.typeArguments.length === 1 ) { - // Special-case the 'tuple' type specialization to use - // the homogenous arbitrary-length form. - if ( - isClassInstance(newReturnType) && - ClassType.isTupleClass(newReturnType) && - !newReturnType.tupleTypeArguments && - newReturnType.typeArguments && - newReturnType.typeArguments.length === 1 - ) { - newReturnType = specializeTupleClass(newReturnType, [ - newReturnType.typeArguments[0], - AnyType.create(/* isEllipsis */ true), - ]); - } - - returnType = newReturnType; + newReturnType = specializeTupleClass(newReturnType, [ + { type: newReturnType.typeArguments[0], isUnbounded: true }, + ]); } - } else if (!returnType && !isUnknown(newReturnType)) { + returnType = newReturnType; } + } else if (!returnType && !isUnknown(newReturnType)) { + returnType = newReturnType; } } + } - if (!returnType) { - returnType = applyExpectedTypeForConstructor(type, expectedType, typeVarMap); - } else if ( - isClassInstance(returnType) && - isTupleClass(returnType) && - !returnType.tupleTypeArguments - ) { - returnType = applyExpectedTypeForTupleConstructor(returnType, expectedType); - } - validatedTypes = true; + if (!returnType) { + returnType = applyExpectedTypeForConstructor(type, expectedType, typeVarMap); + } else if (isClassInstance(returnType) && isTupleClass(returnType) && !returnType.tupleTypeArguments) { + returnType = applyExpectedTypeForTupleConstructor(returnType, expectedType); } + validatedTypes = true; } } @@ -6417,13 +7667,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!validatedTypes && argList.length > 0) { // Suppress this error if the class was instantiated from a custom - // metaclass because it's likely that it's a false positive. + // metaclass because it's likely that it's a false positive. Also + // suppress the error if the class's metaclass has a __call__ method. const isCustomMetaclass = !!type.details.effectiveMetaclass && isInstantiableClass(type.details.effectiveMetaclass) && !ClassType.isBuiltIn(type.details.effectiveMetaclass); - if (!isCustomMetaclass) { + if (!isCustomMetaclass && !usedMetaclassCallMethod) { const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, @@ -6436,21 +7687,50 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!returnType) { // There was no __init__ or __new__ method or we couldn't match the provided - // arguments to them. Do our best to specialize the instantiated class based - // on the expected type (if provided). - const typeVarMap = new TypeVarMap(getTypeVarScopeId(type)); - if (expectedType) { - populateTypeVarMapBasedOnExpectedType( - type, - expectedType, - typeVarMap, - getTypeVarScopesForNode(errorNode) - ); + // arguments to them. + if (!expectedType && type.typeArguments) { + // If there was no expected type but the type was already specialized, + // assume that we're constructing an instance of the specialized type. + returnType = convertToInstance(type); + } else { + // Do our best to specialize the instantiated class based on the expected + // type if provided. + const typeVarMap = new TypeVarMap(getTypeVarScopeId(type)); + + if (expectedType) { + populateTypeVarMapBasedOnExpectedType( + ClassType.cloneAsInstance(type), + expectedType, + typeVarMap, + getTypeVarScopesForNode(errorNode) + ); + } + + returnType = applyExpectedTypeForConstructor(type, expectedType, typeVarMap); + } + } + + if (!reportedErrors) { + const transformed = applyConstructorTransform(evaluatorInterface, errorNode, argList, type, { + argumentErrors: reportedErrors, + returnType, + isTypeIncomplete, + }); + + returnType = transformed.returnType; + + if (transformed.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (transformed.argumentErrors) { + reportedErrors = true; } - returnType = applyExpectedTypeForConstructor(type, expectedType, typeVarMap); } - return { argumentErrors: reportedErrors, returnType }; + const result: CallResult = { argumentErrors: reportedErrors, returnType, isTypeIncomplete }; + + return result; } function applyExpectedSubtypeForConstructor( @@ -6458,13 +7738,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions expectedSubtype: Type, typeVarMap: TypeVarMap ): Type | undefined { - const specializedType = applySolvedTypeVars( - ClassType.cloneAsInstance(type), - typeVarMap, - /* unknownIfNotFound */ true - ); + const specializedType = applySolvedTypeVars(ClassType.cloneAsInstance(type), typeVarMap); - if (!canAssignType(expectedSubtype, specializedType, new DiagnosticAddendum())) { + if (!canAssignType(expectedSubtype, specializedType)) { return undefined; } @@ -6524,7 +7800,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type: ClassType, expectedType: Type, typeVarMap: TypeVarMap, - liveTypeVarScopes: TypeVarScopeId[] + liveTypeVarScopes: TypeVarScopeId[] | undefined ): boolean { if (isAny(expectedType)) { type.details.typeParameters.forEach((typeParam) => { @@ -6533,7 +7809,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } - if (!isClassInstance(expectedType)) { + if (!isClass(expectedType)) { return false; } @@ -6542,9 +7818,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!expectedTypeArgs) { return canAssignType( type, - ClassType.cloneAsInstantiable(expectedType), - new DiagnosticAddendum(), - typeVarMap + expectedType, + /* diag */ undefined, + typeVarMap, + CanAssignFlags.PopulatingExpectedType ); } @@ -6554,12 +7831,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const sameClassTypeVarMap = buildTypeVarMapFromSpecializedClass(expectedType); sameClassTypeVarMap.getTypeVars().forEach((entry) => { const typeVarType = sameClassTypeVarMap.getTypeVarType(entry.typeVar); - typeVarMap.setTypeVarType( - entry.typeVar, - entry.typeVar.details.variance === Variance.Covariant ? undefined : typeVarType, - entry.typeVar.details.variance === Variance.Contravariant ? undefined : typeVarType, - entry.retainLiteral - ); + + if (typeVarType) { + // Skip this if the type argument is a TypeVar defined by the class scope because + // we're potentially solving for these TypeVars. + if (!isTypeVar(typeVarType) || typeVarType.scopeId !== type.details.typeVarScopeId) { + typeVarMap.setTypeVarType( + entry.typeVar, + entry.typeVar.details.variance === Variance.Covariant ? undefined : typeVarType, + entry.typeVar.details.variance === Variance.Contravariant ? undefined : typeVarType, + entry.retainLiteral + ); + } + } }); return true; } @@ -6569,12 +7853,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const synthExpectedTypeArgs = ClassType.getTypeParameters(expectedType).map((typeParam, index) => { const typeVar = TypeVarType.createInstance(`__dest${index}`); typeVar.details.isSynthesized = true; - typeVar.details.variance = typeParam.details.variance; + + // Use invariance here so we set the narrow and wide values on the TypeVar. + typeVar.details.variance = Variance.Invariant; typeVar.scopeId = expectedTypeScopeId; return typeVar; }); const genericExpectedType = ClassType.cloneForSpecialization( - ClassType.cloneAsInstantiable(expectedType), + expectedType, synthExpectedTypeArgs, /* isTypeArgumentExplicit */ true ); @@ -6584,12 +7870,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeVar = TypeVarType.createInstance(`__source${index}`); typeVar.details.isSynthesized = true; typeVar.details.synthesizedIndex = index; + typeVar.details.isExemptFromBoundCheck = true; return typeVar; }); const specializedType = ClassType.cloneForSpecialization(type, typeArgs, /* isTypeArgumentExplicit */ true); const syntheticTypeVarMap = new TypeVarMap(expectedTypeScopeId); - if (canAssignType(genericExpectedType, specializedType, new DiagnosticAddendum(), syntheticTypeVarMap)) { + if ( + canAssignType( + genericExpectedType, + specializedType, + /* diag */ undefined, + syntheticTypeVarMap, + CanAssignFlags.PopulatingExpectedType + ) + ) { + let isResultValid = true; + synthExpectedTypeArgs.forEach((typeVar, index) => { const synthTypeVar = syntheticTypeVarMap.getTypeVarType(typeVar); @@ -6604,23 +7901,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const targetTypeVar = ClassType.getTypeParameters(specializedType)[synthTypeVar.details.synthesizedIndex]; if (index < expectedTypeArgs.length) { - const expectedTypeArgValue = transformExpectedTypeForConstructor( - expectedTypeArgs[index], - typeVarMap, - liveTypeVarScopes - ); + let expectedTypeArgValue: Type | undefined = expectedTypeArgs[index]; + + if (liveTypeVarScopes) { + expectedTypeArgValue = transformExpectedTypeForConstructor( + expectedTypeArgValue, + typeVarMap, + liveTypeVarScopes + ); + } + if (expectedTypeArgValue) { typeVarMap.setTypeVarType( targetTypeVar, typeVar.details.variance === Variance.Covariant ? undefined : expectedTypeArgValue, typeVar.details.variance === Variance.Contravariant ? undefined : expectedTypeArgValue ); + } else { + isResultValid = false; } } } }); - return true; + return isResultValid; } return false; @@ -6641,12 +7945,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ): CallResult { let argumentErrors = false; let isTypeIncomplete = false; + let specializedInitSelfType: Type | undefined; if (recursionCount > maxTypeRecursionCount) { return { returnType: UnknownType.create(), argumentErrors: true }; } + recursionCount++; - if (TypeBase.isNonCallable(callType)) { + if (TypeBase.isSpecialForm(callType)) { const exprNode = errorNode.nodeType === ParseNodeType.Call ? errorNode.leftExpression : errorNode; addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, @@ -6690,21 +7996,61 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return createNamedTupleType(evaluatorInterface, errorNode, argList, false); } + let effectiveTypeVarMap = typeVarMap; + if (!effectiveTypeVarMap) { + // If a typeVarMap wasn't provided by the caller, allocate one here. + effectiveTypeVarMap = new TypeVarMap(getTypeVarScopeId(expandedSubtype)); + + // There are certain cases, such as with super().__new__(cls) calls where + // the call is a constructor but the proper TypeVar scope has been lost. + // We'll add a wildcard TypeVar scope here. This is a bit of a hack and + // we may need to revisit this in the future. + if ( + !effectiveTypeVarMap.getSolveForScopes() && + FunctionType.isConstructorMethod(expandedSubtype) + ) { + effectiveTypeVarMap.addSolveForScope(WildcardTypeVarScopeId); + } + } + const functionResult = validateFunctionArguments( errorNode, argList, expandedSubtype, - typeVarMap || new TypeVarMap(getTypeVarScopeId(expandedSubtype)), + effectiveTypeVarMap, skipUnknownArgCheck, expectedType ); + if (functionResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + if (functionResult.argumentErrors) { argumentErrors = true; - } + } else { + specializedInitSelfType = functionResult.specializedInitSelfType; - if (functionResult.isTypeIncomplete) { - isTypeIncomplete = true; + // Call the function transform logic to handle special-cased functions. + const transformed = applyFunctionTransform( + evaluatorInterface, + errorNode, + argList, + expandedSubtype, + { + argumentErrors: functionResult.argumentErrors, + returnType: functionResult.returnType ?? UnknownType.create(), + isTypeIncomplete, + } + ); + + functionResult.returnType = transformed.returnType; + if (transformed.isTypeIncomplete) { + isTypeIncomplete = true; + } + if (transformed.argumentErrors) { + argumentErrors = true; + } } // Handle the NewType specially, replacing the normal return type. @@ -6724,37 +8070,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case TypeCategory.OverloadedFunction: { // Handle the 'cast' call as a special case. - const isCast = - expandedSubtype.overloads[0].details.builtInName === 'cast' && argList.length === 2; - - if (isCast) { - // Precalculate the type of the first argument using special semantics, - // since we are expecting a type here. This allows us to support quoted - // types, etc. - getTypeForArgumentExpectingType(argList[0]); - } - - const functionResult = validateOverloadedFunctionArguments( - errorNode, - argList, - expandedSubtype, - typeVarMap, - skipUnknownArgCheck, - expectedType - ); - - if (functionResult.argumentErrors) { - argumentErrors = true; - } - - if (functionResult.isTypeIncomplete) { - isTypeIncomplete = true; - } - - if (isCast) { + if (expandedSubtype.overloads[0].details.builtInName === 'cast' && argList.length === 2) { // Verify that the cast is necessary. - const castToType = getTypeForArgumentExpectingType(argList[0]); - const castFromType = getTypeForArgument(argList[1]); + const castToType = getTypeForArgumentExpectingType(argList[0]).type; + const castFromType = getTypeForArgument(argList[1]).type; if (isInstantiableClass(castToType) && isClassInstance(castFromType)) { if ( isTypeSame( @@ -6777,6 +8096,46 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return convertToInstance(castToType); } + const functionResult = validateOverloadedFunctionArguments( + errorNode, + argList, + expandedSubtype, + typeVarMap, + skipUnknownArgCheck, + expectedType + ); + + if (functionResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + + if (functionResult.argumentErrors) { + argumentErrors = true; + } else { + specializedInitSelfType = functionResult.specializedInitSelfType; + + // Call the function transform logic to handle special-cased functions. + const transformed = applyFunctionTransform( + evaluatorInterface, + errorNode, + argList, + expandedSubtype, + { + argumentErrors: functionResult.argumentErrors, + returnType: functionResult.returnType ?? UnknownType.create(), + isTypeIncomplete, + } + ); + + functionResult.returnType = transformed.returnType; + if (transformed.isTypeIncomplete) { + isTypeIncomplete = true; + } + if (transformed.argumentErrors) { + argumentErrors = true; + } + } + return functionResult.returnType || UnknownType.create(); } @@ -6810,14 +8169,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (argList.length === 1) { // The one-parameter form of "type" returns the class // for the specified object. - const argType = getTypeForArgument(argList[0]); - if ( - isClassInstance(argType) || - (isTypeVar(argType) && TypeBase.isInstance(argType)) || - isNone(argType) - ) { - return convertToInstantiable(stripLiteralValue(argType)); - } + const argType = getTypeForArgument(argList[0]).type; + return mapSubtypes(argType, (subtype) => { + if ( + isClassInstance(subtype) || + (isTypeVar(subtype) && TypeBase.isInstance(subtype)) || + isNoneInstance(subtype) + ) { + return convertToInstantiable(stripLiteralValue(subtype)); + } + + return AnyType.create(); + }); } else if (argList.length >= 2) { // The two-parameter form of "type" returns a new class type // built from the specified base types. @@ -6845,6 +8208,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return createNamedTupleType(evaluatorInterface, errorNode, argList, true); } + if (className === 'NewType') { + return createNewType(errorNode, argList); + } + if ( className === 'Protocol' || className === 'Generic' || @@ -6865,6 +8232,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if ( className === 'Enum' || className === 'IntEnum' || + className === 'StrEnum' || className === 'Flag' || className === 'IntFlag' ) { @@ -6943,9 +8311,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions skipUnknownArgCheck, expectedType ); + if (constructorResult.argumentErrors) { argumentErrors = true; } + + if (constructorResult.isTypeIncomplete) { + isTypeIncomplete = true; + } + let returnType = constructorResult.returnType; // If the expandedSubtype originated from a TypeVar, convert @@ -6966,8 +8340,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isInstantiableClass(baseClass) && ClassType.isBuiltIn(baseClass, 'type') ) ) { - // We don't know the name of the new class in this case. - const newClassName = '__class_' + returnType.details.name; + let newClassName = '__class_' + returnType.details.name; + if (argList.length === 3) { + const firstArgType = getTypeForArgument(argList[0]).type; + if ( + isClassInstance(firstArgType) && + ClassType.isBuiltIn(firstArgType, 'str') && + typeof firstArgType.literalValue === 'string' + ) { + newClassName = firstArgType.literalValue; + } + } + const newClassType = ClassType.createInstantiable( newClassName, '', @@ -6979,15 +8363,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ClassType.cloneAsInstantiable(returnType) ); newClassType.details.baseClasses.push(getBuiltInType(errorNode, 'object')); + newClassType.details.effectiveMetaclass = expandedSubtype; computeMroLinearization(newClassType); return newClassType; } return returnType; } else { - const memberType = getTypeFromObjectMember(errorNode, expandedSubtype, '__call__')?.type; + let memberType = getTypeFromObjectMember(errorNode, expandedSubtype, '__call__')?.type; if (memberType && (isFunction(memberType) || isOverloadedFunction(memberType))) { + memberType = removeParamSpecVariadicsFromSignature(memberType); + const functionResult = validateCallArguments( errorNode, argList, @@ -6995,7 +8382,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeVarMap, skipUnknownArgCheck, expectedType, - recursionCount + 1 + recursionCount ); if (functionResult.argumentErrors) { argumentErrors = true; @@ -7041,7 +8428,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeVarMap, skipUnknownArgCheck, expectedType, - recursionCount + 1 + recursionCount ); if (callResult.argumentErrors) { @@ -7050,6 +8437,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return callResult.returnType || UnknownType.create(); } + + case TypeCategory.Module: { + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.moduleNotCallable(), + errorNode + ); + return undefined; + } } return undefined; @@ -7058,8 +8455,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { argumentErrors, - returnType: isNever(returnType) ? undefined : returnType, + returnType: isNever(returnType) && !returnType.isNoReturn ? undefined : returnType, isTypeIncomplete, + specializedInitSelfType, }; } @@ -7070,58 +8468,34 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function matchFunctionArgumentsToParameters( errorNode: ExpressionNode, argList: FunctionArgument[], - type: FunctionType + type: FunctionType, + overloadIndex: number ): MatchArgsToParamsResult { + const paramDetails = getParameterListDetails(type); let argIndex = 0; - const typeParams = type.details.parameters; - - // The last parameter might be a var arg dictionary. If so, strip it off. - const varArgDictParam = typeParams.find((param) => param.category === ParameterCategory.VarArgDictionary); + let matchedUnpackedListOfUnknownLength = false; let reportedArgError = false; - - // Is there a positional-only "/" parameter? If so, it separates the - // positional-only from positional or keyword parameters. - let positionalOnlyIndex = typeParams.findIndex( - (param) => param.category === ParameterCategory.Simple && !param.name - ); + let isTypeIncomplete = false; + let isVariadicTypeVarFullyMatched = false; // Build a map of parameters by name. const paramMap = new Map(); - typeParams.forEach((param, index) => { + paramDetails.params.forEach((paramInfo) => { + const param = paramInfo.param; if (param.name && param.category === ParameterCategory.Simple) { paramMap.set(param.name, { argsNeeded: param.category === ParameterCategory.Simple && !param.hasDefault ? 1 : 0, argsReceived: 0, - isPositionalOnly: positionalOnlyIndex >= 0 && index < positionalOnlyIndex, + isPositionalOnly: paramInfo.source === ParameterSource.PositionOnly, }); } }); - // Is there a bare (nameless) "*" parameter? If so, it signifies the end - // of the positional parameter list. - let positionalParamCount = typeParams.findIndex( - (param) => param.category === ParameterCategory.VarArgList && !param.name - ); - - const varArgListParamIndex = typeParams.findIndex((param) => param.category === ParameterCategory.VarArgList); - const varArgDictParamIndex = typeParams.findIndex( - (param) => param.category === ParameterCategory.VarArgDictionary - ); - - // Is there a var-arg (named "*") parameter? If so, it is the last of - // the positional parameters. - if (positionalParamCount < 0) { - positionalParamCount = varArgListParamIndex; - if (positionalParamCount >= 0) { - positionalParamCount++; - } - } + let positionalOnlyLimitIndex = paramDetails.positionOnlyParamCount; + let positionParamLimitIndex = paramDetails.firstKeywordOnlyIndex ?? paramDetails.params.length; - // Is there a keyword var-arg ("**") parameter? If so, it's not included - // in the list of positional parameters. - if (positionalParamCount < 0) { - positionalParamCount = varArgDictParamIndex; - } + const varArgListParamIndex = paramDetails.argsIndex; + const varArgDictParamIndex = paramDetails.kwargsIndex; // Is this an function that uses the *args and **kwargs // from a param spec? If so, we need to treat all positional parameters @@ -7130,9 +8504,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let paramSpecTarget: TypeVarType | undefined; let hasParamSpecArgsKwargs = false; - if (varArgListParamIndex >= 0 && varArgDictParamIndex >= 0) { - const varArgListParam = typeParams[varArgListParamIndex]; - const varArgDictParam = typeParams[varArgDictParamIndex]; + if (varArgListParamIndex !== undefined && varArgDictParamIndex !== undefined) { + assert(paramDetails.params[varArgListParamIndex], 'varArgListParamIndex params entry is undefined'); + const varArgListParam = paramDetails.params[varArgListParamIndex].param; + assert(paramDetails.params[varArgDictParamIndex], 'varArgDictParamIndex params entry is undefined'); + const varArgDictParam = paramDetails.params[varArgDictParamIndex].param; + if ( isParamSpec(varArgListParam.type) && varArgListParam.type.paramSpecAccess === 'args' && @@ -7149,7 +8526,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions paramSpecArgList = []; paramSpecTarget = TypeVarType.cloneForParamSpecAccess(varArgListParam.type, undefined); } else { - positionalOnlyIndex = varArgListParamIndex; + positionalOnlyLimitIndex = varArgListParamIndex; } } } @@ -7159,28 +8536,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // the number of positional parameters. argList.forEach((arg) => { if (arg.name) { - const namedParamIndex = typeParams.findIndex( - (param) => param.name === arg.name!.value && param.category === ParameterCategory.Simple + const keywordParamIndex = paramDetails.params.findIndex( + (paramInfo) => + paramInfo.param.name === arg.name!.value && + paramInfo.param.category === ParameterCategory.Simple ); // Is this a parameter that can be interpreted as either a keyword or a positional? // If so, we'll treat it as a keyword parameter in this case because it's being // targeted by a keyword argument. - if (namedParamIndex >= 0 && namedParamIndex > positionalOnlyIndex) { - if (positionalParamCount < 0 || namedParamIndex < positionalParamCount) { - positionalParamCount = namedParamIndex; + if (keywordParamIndex >= 0 && keywordParamIndex >= positionalOnlyLimitIndex) { + if (positionParamLimitIndex < 0 || keywordParamIndex < positionParamLimitIndex) { + positionParamLimitIndex = keywordParamIndex; } } } }); // If we didn't see any special cases, then all parameters are positional. - if (positionalParamCount < 0) { - positionalParamCount = typeParams.length; + if (positionParamLimitIndex < 0) { + positionParamLimitIndex = paramDetails.params.length; } // Determine how many positional args are being passed before - // we see a named arg. + // we see a keyword arg. let positionalArgCount = argList.findIndex( (arg) => arg.argumentCategory === ArgumentCategory.UnpackedDictionary || arg.name !== undefined ); @@ -7203,15 +8582,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Map the positional args to parameters. let paramIndex = 0; let unpackedArgIndex = 0; - let unpackedParamIndex = 0; while (argIndex < positionalArgCount) { - if (paramIndex === positionalOnlyIndex) { - paramIndex++; - continue; - } - - if (argIndex < positionalOnlyIndex && argList[argIndex].name) { + if (argIndex < positionalOnlyLimitIndex && argList[argIndex].name) { const fileInfo = AnalyzerNodeInfo.getFileInfo(argList[argIndex].name!); addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, @@ -7222,15 +8595,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions reportedArgError = true; } - if (paramIndex >= positionalParamCount) { + if (paramIndex >= positionParamLimitIndex) { if (!foundUnpackedListArg || argList[argIndex].argumentCategory !== ArgumentCategory.UnpackedList) { addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - positionalParamCount === 1 + positionParamLimitIndex === 1 ? Localizer.Diagnostic.argPositionalExpectedOne() : Localizer.Diagnostic.argPositionalExpectedCount().format({ - expected: positionalParamCount, + expected: positionParamLimitIndex, }), argList[argIndex].valueExpression || errorNode ); @@ -7239,16 +8612,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions break; } - const paramType = FunctionType.getEffectiveParameterType(type, paramIndex); + if (paramIndex >= paramDetails.params.length) { + break; + } + + assert(paramDetails.params[paramIndex], 'paramIndex params entry is undefined'); + const paramType = paramDetails.params[paramIndex].type; if (argList[argIndex].argumentCategory === ArgumentCategory.UnpackedList) { if (!argList[argIndex].valueExpression) { break; } const isParamVariadic = - typeParams[paramIndex].category === ParameterCategory.VarArgList && isVariadicTypeVar(paramType); + paramDetails.params[paramIndex].param.category === ParameterCategory.VarArgList && + isVariadicTypeVar(paramType); let isArgCompatibleWithVariadic = false; - const argType = getTypeForArgument(argList[argIndex]); + const argTypeResult = getTypeForArgument(argList[argIndex]); + const argType = argTypeResult.type; let listElementType: Type | undefined; let advanceToNextArg = false; @@ -7256,14 +8636,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // with a ParamSpec and a Concatenate operator. PEP 612 indicates that // all positional parameters specified in the Concatenate must be // filled explicitly. - if (type.details.paramSpec && paramIndex < positionalParamCount) { + if (type.details.paramSpec && paramIndex < positionParamLimitIndex) { addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - positionalParamCount === 1 + positionParamLimitIndex === 1 ? Localizer.Diagnostic.argPositionalExpectedOne() : Localizer.Diagnostic.argPositionalExpectedCount().format({ - expected: positionalParamCount, + expected: positionParamLimitIndex, }), argList[argIndex].valueExpression || errorNode ); @@ -7274,14 +8654,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // specified types rather than using the more generic iterator // type which will be a union of all element types. const combinedTupleType = combineSameSizedTuples(makeTopLevelTypeVarsConcrete(argType), tupleClassType); + if ( !isParamVariadic && combinedTupleType && isClassInstance(combinedTupleType) && combinedTupleType.tupleTypeArguments && - combinedTupleType.tupleTypeArguments.length > 0 + combinedTupleType.tupleTypeArguments.length > 0 && + unpackedArgIndex < combinedTupleType.tupleTypeArguments.length ) { - listElementType = combinedTupleType.tupleTypeArguments[unpackedArgIndex]; + listElementType = combinedTupleType.tupleTypeArguments[unpackedArgIndex].type; // Determine if there are any more unpacked list arguments after // this one. If not, we'll clear this flag because this unpacked @@ -7301,12 +8683,31 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // unpacked variadic type variable. listElementType = argType; isArgCompatibleWithVariadic = true; + advanceToNextArg = true; + isVariadicTypeVarFullyMatched = true; + } else if ( + isClassInstance(argType) && + isTupleClass(argType) && + argType.tupleTypeArguments && + argType.tupleTypeArguments.length === 1 && + isVariadicTypeVar(argType.tupleTypeArguments[0].type) + ) { + // Handle the case where an unpacked variadic type var has + // been packaged into a tuple. + listElementType = argType.tupleTypeArguments[0].type; + isArgCompatibleWithVariadic = true; + advanceToNextArg = true; + isVariadicTypeVarFullyMatched = true; } else if (isParamSpec(argType) && argType.paramSpecAccess === 'args') { listElementType = undefined; } else { listElementType = getTypeFromIterator(argType, /* isAsync */ false, argList[argIndex].valueExpression!) || UnknownType.create(); + + if (paramDetails.params[paramIndex].param.category !== ParameterCategory.VarArgList) { + matchedUnpackedListOfUnknownLength = true; + } } const funcArg: FunctionArgument | undefined = listElementType @@ -7315,8 +8716,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type: listElementType, } : undefined; + if (funcArg && argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } - const paramName = typeParams[paramIndex].name; + const paramName = paramDetails.params[paramIndex].param.name; // It's not allowed to use unpacked arguments with a variadic *args // parameter unless the argument is a variadic arg as well. @@ -7335,102 +8739,108 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (funcArg) { validateArgTypeParams.push({ - paramCategory: typeParams[paramIndex].category, + paramCategory: paramDetails.params[paramIndex].param.category, paramType, requiresTypeVarMatching: requiresSpecialization(paramType), argument: funcArg, errorNode: argList[argIndex].valueExpression || errorNode, - paramName: typeParams[paramIndex].isNameSynthesized ? undefined : paramName, + paramName: paramDetails.params[paramIndex].param.isNameSynthesized ? undefined : paramName, }); } } - trySetActive(argList[argIndex], typeParams[paramIndex]); + trySetActive(argList[argIndex], paramDetails.params[paramIndex].param); // Note that the parameter has received an argument. if ( paramName && - typeParams[paramIndex].category === ParameterCategory.Simple && + paramDetails.params[paramIndex].param.category === ParameterCategory.Simple && paramMap.has(paramName) ) { paramMap.get(paramName)!.argsReceived++; } - if (advanceToNextArg || typeParams[paramIndex].category === ParameterCategory.VarArgList) { + if ( + advanceToNextArg || + paramDetails.params[paramIndex].param.category === ParameterCategory.VarArgList + ) { argIndex++; } - if (typeParams[paramIndex].category !== ParameterCategory.VarArgList) { + if ( + isVariadicTypeVarFullyMatched || + paramDetails.params[paramIndex].param.category !== ParameterCategory.VarArgList + ) { paramIndex++; } - } else if (typeParams[paramIndex].category === ParameterCategory.VarArgList) { - trySetActive(argList[argIndex], typeParams[paramIndex]); + } else if (paramDetails.params[paramIndex].param.category === ParameterCategory.VarArgList) { + trySetActive(argList[argIndex], paramDetails.params[paramIndex].param); if (paramSpecArgList) { paramSpecArgList.push(argList[argIndex]); + argIndex++; } else { - let paramCategory = typeParams[paramIndex].category; + let paramCategory = paramDetails.params[paramIndex].param.category; let effectiveParamType = paramType; - const paramName = typeParams[paramIndex].name; + const paramName = paramDetails.params[paramIndex].param.name; - // Handle the case where the target parameter is a variadic type variable - // that has been specialized with a tuple of types. if ( - isVariadicTypeVar(typeParams[paramIndex].type) && - isClassInstance(paramType) && - isTupleClass(paramType) && + isUnpackedClass(paramType) && paramType.tupleTypeArguments && - unpackedParamIndex < paramType.tupleTypeArguments.length + paramType.tupleTypeArguments.length > 0 ) { - effectiveParamType = paramType.tupleTypeArguments[unpackedParamIndex]; - paramCategory = isVariadicTypeVar(effectiveParamType) - ? ParameterCategory.VarArgList - : ParameterCategory.Simple; - - unpackedParamIndex++; - const paramsToFillCount = positionalArgCount - argIndex - 1; - const argsRemainingCount = paramType.tupleTypeArguments.length - unpackedParamIndex; - - if (unpackedParamIndex >= paramType.tupleTypeArguments.length) { - paramIndex++; - } else if (argsRemainingCount > 0 && paramsToFillCount <= 0) { + effectiveParamType = paramType.tupleTypeArguments[0].type; + } + + paramCategory = isVariadicTypeVar(effectiveParamType) + ? ParameterCategory.VarArgList + : ParameterCategory.Simple; + + const remainingArgCount = positionalArgCount - argIndex; + const remainingParamCount = positionParamLimitIndex - paramIndex - 1; + + if (remainingArgCount <= remainingParamCount) { + if (remainingArgCount < remainingParamCount) { // Have we run out of arguments and still have parameters left to fill? addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - argsRemainingCount === 1 + remainingArgCount === 1 ? Localizer.Diagnostic.argMorePositionalExpectedOne() : Localizer.Diagnostic.argMorePositionalExpectedCount().format({ - expected: argsRemainingCount, + expected: remainingArgCount, }), argList[argIndex].valueExpression || errorNode ); reportedArgError = true; } - } - validateArgTypeParams.push({ - paramCategory, - paramType: effectiveParamType, - requiresTypeVarMatching: requiresSpecialization(paramType), - argument: argList[argIndex], - errorNode: argList[argIndex].valueExpression || errorNode, - paramName, - mapsToVarArgList: true, - }); + paramIndex++; + } else { + validateArgTypeParams.push({ + paramCategory, + paramType: effectiveParamType, + requiresTypeVarMatching: requiresSpecialization(paramType), + argument: argList[argIndex], + errorNode: argList[argIndex].valueExpression || errorNode, + paramName, + mapsToVarArgList: true, + }); + + argIndex++; + } } - argIndex++; } else { - const paramName = typeParams[paramIndex].name; + const paramName = paramDetails.params[paramIndex].param.name; validateArgTypeParams.push({ - paramCategory: typeParams[paramIndex].category, + paramCategory: paramDetails.params[paramIndex].param.category, paramType, requiresTypeVarMatching: requiresSpecialization(paramType), argument: argList[argIndex], errorNode: argList[argIndex].valueExpression || errorNode, - paramName: typeParams[paramIndex].isNameSynthesized ? undefined : paramName, + paramName: paramDetails.params[paramIndex].param.isNameSynthesized ? undefined : paramName, }); - trySetActive(argList[argIndex], typeParams[paramIndex]); + trySetActive(argList[argIndex], paramDetails.params[paramIndex].param); // Note that the parameter has received an argument. if (paramName && paramMap.has(paramName)) { @@ -7445,15 +8855,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Check if there weren't enough positional arguments to populate all of // the positional-only parameters. if ( - positionalOnlyIndex >= 0 && - positionalArgCount < positionalOnlyIndex && + positionalOnlyLimitIndex >= 0 && + paramIndex < positionalOnlyLimitIndex && (!foundUnpackedListArg || hasParamSpecArgsKwargs) ) { - const firstParamWithDefault = typeParams.findIndex((param) => param.hasDefault); + const firstParamWithDefault = paramDetails.params.findIndex((paramInfo) => paramInfo.param.hasDefault); const positionOnlyWithoutDefaultsCount = - firstParamWithDefault >= 0 && firstParamWithDefault < positionalOnlyIndex + firstParamWithDefault >= 0 && firstParamWithDefault < positionalOnlyLimitIndex ? firstParamWithDefault - : positionalOnlyIndex; + : positionalOnlyLimitIndex; const argsRemainingCount = positionOnlyWithoutDefaultsCount - positionalArgCount; if (argsRemainingCount > 0) { addDiagnostic( @@ -7479,7 +8889,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions while (argIndex < argList.length) { if (argList[argIndex].argumentCategory === ArgumentCategory.UnpackedDictionary) { // Verify that the type used in this expression is a Mapping[str, T]. - const argType = getTypeForArgument(argList[argIndex]); + const argType = getTypeForArgument(argList[argIndex]).type; if (isAnyOrUnknown(argType)) { unpackedDictionaryArgType = argType; } else if (isClassInstance(argType) && ClassType.isTypedDictClass(argType)) { @@ -7496,9 +8906,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } else { paramEntry.argsReceived++; - const paramInfoIndex = typeParams.findIndex((param) => param.name === name); + const paramInfoIndex = paramDetails.params.findIndex( + (paramInfo) => paramInfo.param.name === name + ); assert(paramInfoIndex >= 0); - const paramType = FunctionType.getEffectiveParameterType(type, paramInfoIndex); + const paramType = paramDetails.params[paramInfoIndex].type; validateArgTypeParams.push({ paramCategory: ParameterCategory.Simple, @@ -7512,13 +8924,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions paramName: name, }); } - } else if (varArgDictParam) { - assert(varArgDictParamIndex >= 0); - const paramType = FunctionType.getEffectiveParameterType(type, varArgDictParamIndex); + } else if (paramDetails.kwargsIndex !== undefined) { + const paramType = paramDetails.params[paramDetails.kwargsIndex].type; validateArgTypeParams.push({ paramCategory: ParameterCategory.VarArgDictionary, paramType, - requiresTypeVarMatching: requiresSpecialization(varArgDictParam.type), + requiresTypeVarMatching: requiresSpecialization(paramType), argument: { argumentCategory: ArgumentCategory.Simple, type: entry.valueType, @@ -7570,7 +8981,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( ClassType.cloneAsInstance(mappingType), argType, - new DiagnosticAddendum(), + /* diag */ undefined, mappingTypeVarMap ) ) { @@ -7580,7 +8991,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) as ClassType; const typeArgs = specializedMapping.typeArguments; if (typeArgs && typeArgs.length >= 2) { - if (canAssignType(strObjType, typeArgs[0], new DiagnosticAddendum())) { + if (canAssignType(strObjType, typeArgs[0])) { isValidMappingType = true; } unpackedDictionaryArgType = typeArgs[1]; @@ -7625,9 +9036,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } else { paramEntry.argsReceived++; - const paramInfoIndex = typeParams.findIndex((param) => param.name === paramNameValue); + const paramInfoIndex = paramDetails.params.findIndex( + (paramInfo) => paramInfo.param.name === paramNameValue + ); assert(paramInfoIndex >= 0); - const paramType = FunctionType.getEffectiveParameterType(type, paramInfoIndex); + const paramType = paramDetails.params[paramInfoIndex].type; validateArgTypeParams.push({ paramCategory: ParameterCategory.Simple, @@ -7637,18 +9050,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions errorNode: argList[argIndex].valueExpression || errorNode, paramName: paramNameValue, }); - trySetActive(argList[argIndex], typeParams[paramInfoIndex]); + trySetActive(argList[argIndex], paramDetails.params[paramInfoIndex].param); } - } else if (varArgDictParam) { - assert(varArgDictParamIndex >= 0); + } else if (paramDetails.kwargsIndex !== undefined) { if (paramSpecArgList) { paramSpecArgList.push(argList[argIndex]); } else { - const paramType = FunctionType.getEffectiveParameterType(type, varArgDictParamIndex); + const paramType = paramDetails.params[paramDetails.kwargsIndex].type; validateArgTypeParams.push({ paramCategory: ParameterCategory.VarArgDictionary, paramType, - requiresTypeVarMatching: requiresSpecialization(varArgDictParam.type), + requiresTypeVarMatching: requiresSpecialization(paramType), argument: argList[argIndex], errorNode: argList[argIndex].valueExpression || errorNode, paramName: paramNameValue, @@ -7661,7 +9073,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isPositionalOnly: false, }); } - trySetActive(argList[argIndex], varArgDictParam); + assert( + paramDetails.params[paramDetails.kwargsIndex], + 'paramDetails.kwargsIndex params entry is undefined' + ); + trySetActive(argList[argIndex], paramDetails.params[paramDetails.kwargsIndex].param); } else { addDiagnostic( AnalyzerNodeInfo.getFileInfo(paramName).diagnosticRuleSet.reportGeneralTypeIssues, @@ -7672,14 +9088,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions reportedArgError = true; } } else if (argList[argIndex].argumentCategory === ArgumentCategory.Simple) { - const adjustedCount = positionalParamCount; const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - adjustedCount === 1 + positionParamLimitIndex === 1 ? Localizer.Diagnostic.argPositionalExpectedOne() - : Localizer.Diagnostic.argPositionalExpectedCount().format({ expected: adjustedCount }), + : Localizer.Diagnostic.argPositionalExpectedCount().format({ + expected: positionParamLimitIndex, + }), argList[argIndex].valueExpression || errorNode ); reportedArgError = true; @@ -7691,25 +9108,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If there are keyword-only parameters that haven't been matched but we // have an unpacked dictionary arg, assume that it applies to them. - if (unpackedDictionaryArgType && (!foundUnpackedListArg || varArgListParamIndex >= 0)) { + if (unpackedDictionaryArgType && (!foundUnpackedListArg || paramDetails.argsIndex !== undefined)) { // Don't consider any position-only parameters, since they cannot be matched to // **kwargs arguments. Consider parameters that are either positional or keyword // if there is no *args argument. - const firstKeywordArgIndex = foundUnpackedListArg - ? varArgListParamIndex + 1 - : positionalOnlyIndex >= 0 - ? positionalOnlyIndex + 1 - : 0; - typeParams.forEach((param, paramIndex) => { + paramDetails.params.forEach((paramInfo, paramIndex) => { + const param = paramInfo.param; if ( - paramIndex >= firstKeywordArgIndex && + paramIndex >= paramDetails.firstPositionOrKeywordIndex && param.category === ParameterCategory.Simple && param.name && !param.hasDefault && paramMap.has(param.name) && paramMap.get(param.name)!.argsReceived === 0 ) { - const paramType = FunctionType.getEffectiveParameterType(type, paramIndex); + const paramType = paramDetails.params[paramIndex].type; validateArgTypeParams.push({ paramCategory: ParameterCategory.Simple, paramType, @@ -7718,7 +9131,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions argumentCategory: ArgumentCategory.Simple, type: unpackedDictionaryArgType!, }, - errorNode: errorNode, + errorNode: + argList.find((arg) => arg.argumentCategory === ArgumentCategory.UnpackedDictionary) + ?.valueExpression ?? errorNode, paramName: param.isNameSynthesized ? undefined : param.name, }); @@ -7755,20 +9170,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // def foo(v1: _T = 'default') // and _T is a TypeVar, we need to match the TypeVar to the default // value's type if it's not provided by the caller. - typeParams.forEach((param, index) => { + paramDetails.params.forEach((paramInfo) => { + const param = paramInfo.param; if (param.category === ParameterCategory.Simple && param.name) { const entry = paramMap.get(param.name)!; if (entry.argsNeeded === 0 && entry.argsReceived === 0) { - const paramType = FunctionType.getEffectiveParameterType(type, index); - if ( param.defaultType && !isEllipsisType(param.defaultType) && - requiresSpecialization(paramType) + requiresSpecialization(param.type) ) { validateArgTypeParams.push({ paramCategory: param.category, - paramType: paramType, + paramType: param.type, requiresTypeVarMatching: true, argument: { argumentCategory: ArgumentCategory.Simple, @@ -7788,29 +9202,54 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // bother doing the extra work here. This occurs frequently when attempting to find the // correct overload. if (!reportedArgError || !speculativeTypeTracker.isSpeculative(undefined)) { - // If there are arguments that map to a variadic *args parameter, see - // if the type of that *args parameter is a variadic type variable. If so, - // we'll preprocess those arguments and combine them into a tuple. - if (varArgListParamIndex >= 0 && typeParams[varArgListParamIndex].hasDeclaredType) { - const paramType = FunctionType.getEffectiveParameterType(type, varArgListParamIndex); + // If there are arguments that map to a variadic *args parameter that hasn't + // already been matched, see if the type of that *args parameter is a variadic + // type variable. If so, we'll preprocess those arguments and combine them + // into a tuple. + if ( + paramDetails.argsIndex !== undefined && + paramDetails.argsIndex >= 0 && + paramDetails.params[paramDetails.argsIndex].param.hasDeclaredType && + !isVariadicTypeVarFullyMatched + ) { + const paramType = paramDetails.params[paramDetails.argsIndex].type; const variadicArgs = validateArgTypeParams.filter((argParam) => argParam.mapsToVarArgList); if (isTypeVar(paramType) && paramType.details.isVariadic) { - // TODO - check whether any of the arguments in variadicArgs are - // variadic (*args). These are not allowed because we don't know - // their length. - if (tupleClassType && isInstantiableClass(tupleClassType)) { - const tupleTypeArgs = variadicArgs.map((argParam) => - stripLiteralValue(getTypeForArgument(argParam.argument)) - ); + const tupleTypeArgs: TupleTypeArgument[] = variadicArgs.map((argParam) => { + const argType = getTypeForArgument(argParam.argument).type; + const containsVariadicTypeVar = + isUnpackedVariadicTypeVar(argType) || + (isClassInstance(argType) && + isTupleClass(argType) && + argType.tupleTypeArguments && + argType.tupleTypeArguments.some((arg) => isUnpackedVariadicTypeVar(arg.type))); + + if ( + containsVariadicTypeVar && + argParam.argument.argumentCategory !== ArgumentCategory.UnpackedList + ) { + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.typeVarTupleMustBeUnpacked(), + argParam.argument.valueExpression ?? errorNode + ); + } + + return { + type: stripLiteralValue(argType), + isUnbounded: argParam.argument.argumentCategory === ArgumentCategory.UnpackedList, + }; + }); const specializedTuple = ClassType.cloneAsInstance( specializeTupleClass( tupleClassType, tupleTypeArgs, /* isTypeArgumentExplicit */ true, /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true + /* isUnpackedTuple */ true ) ); @@ -7820,7 +9259,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions requiresTypeVarMatching: true, argument: { argumentCategory: ArgumentCategory.Simple, type: specializedTuple }, errorNode, - paramName: typeParams[varArgListParamIndex].name, + paramName: paramDetails.params[paramDetails.argsIndex].param.name, mapsToVarArgList: true, }; @@ -7833,36 +9272,160 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + let relevance = 0; + if (matchedUnpackedListOfUnknownLength) { + // Lower the relevance if we made assumptions about the length + // of an unpacked argument. This will favor overloads that + // associate this case with a *args parameter. + relevance--; + } + + // Special-case the builtin isinstance and issubclass functions. + if ( + ['isinstance', 'issubclass'].some((name) => name === type.details.builtInName) && + validateArgTypeParams.length === 2 + ) { + validateArgTypeParams[1].expectingType = true; + } + return { + overload: type, + overloadIndex, argumentErrors: reportedArgError, + isTypeIncomplete, argParams: validateArgTypeParams, paramSpecTarget, paramSpecArgList, activeParam, + relevance, }; } // After having matched arguments with parameters, this function evaluates the // types of each argument expression and validates that the resulting type is // compatible with the declared type of the corresponding parameter. - function validateFunctionArgumentTypes( + function validateFunctionArgumentTypesWithExpectedType( errorNode: ExpressionNode, matchResults: MatchArgsToParamsResult, - type: FunctionType, typeVarMap: TypeVarMap, skipUnknownArgCheck = false, - expectedType?: Type + expectedType: Type | undefined ): CallResult { - let isTypeIncomplete = false; - let argumentErrors = false; + const type = matchResults.overload; + + if ( + !expectedType || + isAnyOrUnknown(expectedType) || + isNever(expectedType) || + requiresSpecialization(expectedType) || + !type.details.declaredReturnType + ) { + return validateFunctionArgumentTypes(errorNode, matchResults, typeVarMap, skipUnknownArgCheck); + } + + const effectiveReturnType = getFunctionEffectiveReturnType(type); + let effectiveExpectedType: Type | undefined = expectedType; + let effectiveFlags = CanAssignFlags.AllowTypeVarNarrowing; + if (containsLiteralType(effectiveExpectedType, /* includeTypeArgs */ true)) { + effectiveFlags |= CanAssignFlags.RetainLiteralsForTypeVar; + } + + // If the expected type is a union, we don't know which type is expected. + // We may or may not be able to make use of the expected type. We'll evaluate + // speculatively to see if using the expected type works. + if (isUnion(expectedType)) { + let speculativeResults: CallResult | undefined; + + useSpeculativeMode(errorNode, () => { + const typeVarMapCopy = typeVarMap.clone(); + canAssignType( + effectiveReturnType, + effectiveExpectedType!, + /* diag */ undefined, + typeVarMapCopy, + effectiveFlags | CanAssignFlags.PopulatingExpectedType + ); + speculativeResults = validateFunctionArgumentTypes( + errorNode, + matchResults, + typeVarMapCopy, + skipUnknownArgCheck + ); + }); + + if (speculativeResults && speculativeResults.argumentErrors) { + effectiveExpectedType = undefined; + } + } + + if (effectiveExpectedType) { + // Prepopulate the typeVarMap based on the specialized expected type if the + // callee has a declared return type. This will allow us to more closely match + // the expected type if possible. We set the AllowTypeVarNarrowing and + // SkipStripLiteralForTypeVar flags so the type can be further narrowed + // and so literals are not stripped. + + // If the return type is not the same as the expected type but is + // assignable to the expected type, determine which type arguments + // are needed to match the expected type. + if ( + isClassInstance(effectiveReturnType) && + isClassInstance(effectiveExpectedType) && + !ClassType.isSameGenericClass(effectiveReturnType, effectiveExpectedType) + ) { + const tempTypeVarMap = new TypeVarMap(getTypeVarScopeId(effectiveReturnType)); + populateTypeVarMapBasedOnExpectedType( + effectiveReturnType, + effectiveExpectedType, + tempTypeVarMap, + getTypeVarScopesForNode(errorNode) + ); + + const genericReturnType = ClassType.cloneForSpecialization( + effectiveReturnType, + /* typeArguments */ undefined, + /* isTypeArgumentExplicit */ false + ); + + effectiveExpectedType = applySolvedTypeVars(genericReturnType, tempTypeVarMap); + } + + canAssignType( + effectiveReturnType, + effectiveExpectedType, + /* diag */ undefined, + typeVarMap, + effectiveFlags | CanAssignFlags.PopulatingExpectedType + ); + } + + return validateFunctionArgumentTypes(errorNode, matchResults, typeVarMap, skipUnknownArgCheck); + } + function validateFunctionArgumentTypes( + errorNode: ExpressionNode, + matchResults: MatchArgsToParamsResult, + typeVarMap: TypeVarMap, + skipUnknownArgCheck = false + ): CallResult { + const type = matchResults.overload; + let isTypeIncomplete = matchResults.isTypeIncomplete; + let argumentErrors = false; + let specializedInitSelfType: Type | undefined; const typeCondition = getTypeCondition(type); - // If the function was bound to a class or object, it's possible that - // some of that class's type variables have not yet been solved. Add - // that class's TypeVar scope ID. if (type.boundTypeVarScopeId) { - typeVarMap.addSolveForScope(type.boundTypeVarScopeId); + // If the function was bound to a class or object and was a constructor, a + // static method or a class method, it's possible that some of that class's + // type variables have not yet been solved. Add that class's TypeVar scope ID. + if (type.preBoundFlags !== undefined && type.boundToType && requiresSpecialization(type.boundToType)) { + if ( + type.preBoundFlags & + (FunctionTypeFlags.StaticMethod | FunctionTypeFlags.ClassMethod | FunctionTypeFlags.StaticMethod) + ) { + typeVarMap.addSolveForScope(type.boundTypeVarScopeId); + } + } // Some typeshed stubs use specialized type annotations in the "self" parameter // of an overloaded __init__ method to specify which specialized type should @@ -7879,6 +9442,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions type.strippedFirstParamType.typeArguments ) { const typeParams = type.strippedFirstParamType.details.typeParameters; + specializedInitSelfType = type.strippedFirstParamType; type.strippedFirstParamType.typeArguments.forEach((typeArg, index) => { if (index < typeParams.length) { const typeParam = typeParams[index]; @@ -7890,31 +9454,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - if ( - expectedType && - !isAnyOrUnknown(expectedType) && - !requiresSpecialization(expectedType) && - type.details.declaredReturnType - ) { - // If the expected type is a union, we don't know which type is expected, - // so avoid using the expected type. The exception is if there are literals - // in the union, where it's important to prepopulate the literals. - if (!isUnion(expectedType) || containsLiteralType(expectedType)) { - // Prepopulate the typeVarMap based on the specialized expected type if the - // callee has a declared return type. This will allow us to more closely match - // the expected type if possible. We set the AllowTypeVarNarrowing and - // SkipStripLiteralForTypeVar flags so the type can be further narrowed - // and so literals are not stripped. - canAssignType( - getFunctionEffectiveReturnType(type), - expectedType, - new DiagnosticAddendum(), - typeVarMap, - CanAssignFlags.AllowTypeVarNarrowing | CanAssignFlags.RetainLiteralsForTypeVar - ); - } - } - // Special-case a few built-in calls that are often used for // casting or checking for unknown types. if (['cast', 'isinstance', 'issubclass'].some((name) => name === type.details.builtInName)) { @@ -7935,21 +9474,38 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // In practice, we will limit the number of passes to 2 because it can get // very expensive to go beyond this, and we don't see generally see cases // where more than two passes are needed. - const passCount = Math.min(typeVarMatchingCount, 2); + let passCount = Math.min(typeVarMatchingCount, 2); for (let i = 0; i < passCount; i++) { useSpeculativeMode(errorNode, () => { matchResults.argParams.forEach((argParam) => { if (argParam.requiresTypeVarMatching) { + // Populate the typeVarMap for the argument. If the argument + // is an overload function, skip it during the first pass + // because the selection of the proper overload may depend + // on type arguments supplied by other function arguments. + // Set useNarrowBoundOnly to true the first time through + // the loop if we're going to go through the loop multiple + // times. const argResult = validateArgType( argParam, typeVarMap, - type.details.name, + type, skipUnknownArgCheck, + /* skipOverloadArg */ i === 0, + /* useNarrowBoundOnly */ passCount > 1 && i === 0, typeCondition ); + if (argResult.isTypeIncomplete) { isTypeIncomplete = true; } + + // If we skipped a overload arg during the first pass, + // add another pass to ensure that we handle all of the + // type variables. + if (i === 0 && argResult.skippedOverloadArg) { + passCount++; + } } }); }); @@ -7964,8 +9520,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const argResult = validateArgType( argParam, typeVarMap, - type.details.name, + type, skipUnknownArgCheck, + /* skipOverloadArg */ false, + /* useNarrowBoundOnly */ false, typeCondition ); @@ -8018,6 +9576,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions curNode = typeVarScopeNode; } + // If the function is returning a callable, don't eliminate unsolved + // type vars within a union. There are legit uses for unsolved type vars + // within a callable. + if (isFunction(returnType) || isOverloadedFunction(returnType)) { + eliminateUnsolvedInUnions = false; + } + let specializedReturnType = addConditionToType( applySolvedTypeVars( returnType, @@ -8029,17 +9594,27 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeCondition ); - // Handle 'TypeGuard' specially. We'll transform the return type into a 'bool' - // object with a type argument that reflects the narrowed type. + // If the final return type is an unpacked tuple, turn it into a normal (unpacked) tuple. + if (isUnpackedClass(specializedReturnType)) { + specializedReturnType = ClassType.cloneForUnpacked(specializedReturnType, /* isUnpackedTuple */ false); + } + + // Handle 'TypeGuard' and 'StrictTypeGuard' specially. We'll transform the + // return type into a 'bool' object with a type argument that reflects the + // narrowed type. if ( isClassInstance(specializedReturnType) && - ClassType.isBuiltIn(specializedReturnType, 'TypeGuard') && + ClassType.isBuiltIn(specializedReturnType, ['TypeGuard', 'StrictTypeGuard']) && specializedReturnType.typeArguments && specializedReturnType.typeArguments.length > 0 ) { if (boolClassType && isInstantiableClass(boolClassType)) { specializedReturnType = ClassType.cloneAsInstance( - ClassType.cloneForTypeGuard(boolClassType, specializedReturnType.typeArguments[0]) + ClassType.cloneForTypeGuard( + boolClassType, + specializedReturnType.typeArguments[0], + ClassType.isBuiltIn(specializedReturnType, 'StrictTypeGuard') + ) ); } } @@ -8056,11 +9631,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }; } + if (specializedInitSelfType) { + specializedInitSelfType = applySolvedTypeVars(specializedInitSelfType, typeVarMap); + } + return { argumentErrors, returnType: specializedReturnType, isTypeIncomplete, activeParam: matchResults.activeParam, + specializedInitSelfType, }; } @@ -8075,7 +9655,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions skipUnknownArgCheck = false, expectedType?: Type ): CallResult { - const matchResults = matchFunctionArgumentsToParameters(errorNode, argList, type); + const matchResults = matchFunctionArgumentsToParameters(errorNode, argList, type, 0); if (matchResults.argumentErrors) { // Evaluate types of all args. This will ensure that referenced symbols are @@ -8094,10 +9674,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }; } - return validateFunctionArgumentTypes( + return validateFunctionArgumentTypesWithExpectedType( errorNode, matchResults, - type, typeVarMap, skipUnknownArgCheck, expectedType @@ -8115,7 +9694,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ): boolean { const paramSpecValue = typeVarMap.getParamSpec(paramSpec); - if (!paramSpecValue || !paramSpecValue.concrete) { + if (!paramSpecValue) { addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -8125,11 +9704,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return false; } + // If the ParamSpec was bound to a generic function, some TypeVars may + // not yet be solved. Add the TypeVar scope for the bound function. + typeVarMap.addSolveForScope(paramSpecValue.typeVarScopeId); + let reportedArgError = false; // Build a map of all named parameters. const paramMap = new Map(); - const paramSpecParams = paramSpecValue.concrete.parameters; + const paramSpecParams = paramSpecValue.parameters; paramSpecParams.forEach((param) => { if (param.name) { paramMap.set(param.name, param); @@ -8137,6 +9720,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }); let positionalIndex = 0; + let positionalIndexLimit = paramSpecParams.findIndex( + (paramInfo) => paramInfo.category !== ParameterCategory.Simple + ); + if (positionalIndexLimit < 0) { + positionalIndexLimit = paramSpecParams.length; + } + const argsParam = paramSpecParams.find((paramInfo) => paramInfo.category === ParameterCategory.VarArgList); + const kwargsParam = paramSpecParams.find( + (paramInfo) => paramInfo.category === ParameterCategory.VarArgDictionary + ); + argList.forEach((arg) => { if (arg.argumentCategory === ArgumentCategory.Simple) { let paramType: Type | undefined; @@ -8146,6 +9740,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (paramInfo) { paramType = paramInfo.type; paramMap.delete(arg.name.value); + } else if (kwargsParam) { + paramType = kwargsParam.type; } else { addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, @@ -8156,12 +9752,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions reportedArgError = true; } } else { - if (positionalIndex < paramSpecParams.length) { + if (positionalIndex < positionalIndexLimit) { const paramInfo = paramSpecParams[positionalIndex]; paramType = paramInfo.type; if (paramInfo.name) { paramMap.delete(paramInfo.name); } + } else if (argsParam) { + paramType = argsParam.type; } else { addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, @@ -8190,8 +9788,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions errorNode: arg.valueExpression || errorNode, }, typeVarMap, - /* functionName */ '', + /* functionType */ undefined, /* skipUnknownArgCheck */ false, + /* skipOverloadArg */ false, + /* useNarrowBoundOnly */ false, conditionFilter ) ) { @@ -8214,7 +9814,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return paramInfo.category === ParameterCategory.Simple && !paramInfo.hasDefault; }); - if (unassignedParams.length > 0) { + if (unassignedParams.length > 0 && !paramSpecValue.paramSpec) { const missingParamNames = unassignedParams.map((p) => `"${p}"`).join(', '); addDiagnostic( AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, @@ -8234,31 +9834,37 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function validateArgType( argParam: ValidateArgTypeParams, typeVarMap: TypeVarMap, - functionName: string, + functionType: FunctionType | undefined, skipUnknownCheck: boolean, + skipOverloadArg: boolean, + useNarrowBoundOnly: boolean, conditionFilter: TypeCondition[] | undefined ): ArgResult { let argType: Type | undefined; let expectedTypeDiag: DiagnosticAddendum | undefined; let isTypeIncomplete = false; let isCompatible = true; + const functionName = functionType?.details.name; if (argParam.argument.valueExpression) { // If the param type is a "bare" TypeVar, don't use it as an expected // type. This causes problems for cases where the the call expression // result can influence the type of the TypeVar, such as in // the expression "min(1, max(2, 0.5))". We set useNarrowBoundOnly - // to true here because a wide bound on a TypeVar (if a narrow bound - // has not yet been established) will unnecessarily constrain the - // expected type. - let expectedType: Type | undefined = isTypeVar(argParam.paramType) - ? undefined - : applySolvedTypeVars( - argParam.paramType, - typeVarMap, - /* unknownIfNotFound */ false, - /* useNarrowBoundOnly */ true - ); + // to true if this is the first pass through the parameter list because + // a wide bound on a TypeVar (if a narrow bound has not yet been established) + // will unnecessarily constrain the expected type. + let expectedType: Type | undefined = + isTypeVar(argParam.paramType) && + functionType !== undefined && + argParam.paramType.scopeId === functionType.details.typeVarScopeId + ? undefined + : applySolvedTypeVars( + argParam.paramType, + typeVarMap, + /* unknownIfNotFound */ false, + useNarrowBoundOnly + ); // If the expected type is unknown, don't use an expected type. Instead, // use default rules for evaluating the expression type. @@ -8270,15 +9876,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (argParam.argType) { argType = argParam.argType; } else { - const exprType = getTypeOfExpression(argParam.argument.valueExpression, expectedType); - argType = exprType.type; - if (exprType.isIncomplete) { + const flags = argParam.expectingType + ? EvaluatorFlags.EvaluateStringLiteralAsType | + EvaluatorFlags.ParamSpecDisallowed | + EvaluatorFlags.TypeVarTupleDisallowed + : EvaluatorFlags.None; + const exprTypeResult = getTypeOfExpression(argParam.argument.valueExpression, expectedType, flags); + argType = exprTypeResult.type; + if (exprTypeResult.isIncomplete) { isTypeIncomplete = true; } - if (exprType.typeErrors) { + if (exprTypeResult.typeErrors) { isCompatible = false; } - expectedTypeDiag = exprType.expectedTypeDiagAddendum; + expectedTypeDiag = exprTypeResult.expectedTypeDiagAddendum; } if ( @@ -8286,14 +9897,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions argParam.argument.name && !speculativeTypeTracker.isSpeculative(argParam.errorNode) ) { - writeTypeCache(argParam.argument.name, expectedType || argType, isTypeIncomplete); + writeTypeCache(argParam.argument.name, expectedType || argType, EvaluatorFlags.None, isTypeIncomplete); } } else { // Was the argument's type precomputed by the caller? if (argParam.argType) { argType = argParam.argType; + } else if (argParam.expectingType && !argParam.argument.type && argParam.argument.valueExpression) { + const argTypeResult = getTypeOfExpression( + argParam.argument.valueExpression, + /* expectedType */ undefined, + EvaluatorFlags.EvaluateStringLiteralAsType | + EvaluatorFlags.ParamSpecDisallowed | + EvaluatorFlags.TypeVarTupleDisallowed + ); + argType = argTypeResult.type; + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } } else { - argType = getTypeForArgument(argParam.argument); + const argTypeResult = getTypeForArgument(argParam.argument); + argType = argTypeResult.type; + if (argTypeResult.isIncomplete) { + isTypeIncomplete = true; + } } } @@ -8321,6 +9948,35 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { isCompatible, isTypeIncomplete }; } + // If we are asked to skip overload arguments, determine whether the argument + // is an explicit overload type, an overloaded class constructor, or a + // an overloaded callback protocol. + if (skipOverloadArg) { + if (isOverloadedFunction(argType)) { + return { isCompatible, isTypeIncomplete, skippedOverloadArg: true }; + } + + const concreteParamType = makeTopLevelTypeVarsConcrete(argParam.paramType); + if (isFunction(concreteParamType) || isOverloadedFunction(concreteParamType)) { + if (isInstantiableClass(argType)) { + const constructor = createFunctionFromConstructor(argType); + if (constructor && isOverloadedFunction(constructor)) { + return { isCompatible, isTypeIncomplete, skippedOverloadArg: true }; + } + } + + if (isClassInstance(argType)) { + const callMember = lookUpObjectMember(argType, '__call__'); + if (callMember) { + const memberType = getTypeOfMember(callMember); + if (isOverloadedFunction(memberType)) { + return { isCompatible, isTypeIncomplete, skippedOverloadArg: true }; + } + } + } + } + } + if (!canAssignType(argParam.paramType, argType, diag.createAddendum(), typeVarMap)) { // Mismatching parameter types are common in untyped code; don't bother spending time // printing types if the diagnostic is disabled. @@ -8378,6 +10034,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions argParam.errorNode ); } + return { isCompatible: false, isTypeIncomplete }; } @@ -8403,7 +10060,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Do not check for unknown types if the expected type is "Any". // Don't print types if reportUnknownArgumentType is disabled for performance. - if (fileInfo.diagnosticRuleSet.reportUnknownArgumentType !== 'none' && !isAny(argParam.paramType)) { + if ( + fileInfo.diagnosticRuleSet.reportUnknownArgumentType !== 'none' && + !isAny(argParam.paramType) && + !isTypeIncomplete + ) { if (isUnknown(simplifiedType)) { const diagAddendum = getDiagAddendum(); addDiagnostic( @@ -8412,15 +10073,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions Localizer.Diagnostic.argTypeUnknown() + diagAddendum.getString(), argParam.errorNode ); - } else if (isPartlyUnknown(simplifiedType, true)) { + } else if (isPartlyUnknown(simplifiedType, /* allowUnknownTypeArgsForClasses */ true)) { + let suppressPartialUnknown = false; + // Don't report an error if the type is a partially-specialized // class. This comes up frequently in cases where a type is passed // as an argument (e.g. "defaultdict(list)"). + if (isInstantiableClass(simplifiedType)) { + suppressPartialUnknown = true; + } // If the parameter type is also partially unknown, don't report // the error because it's likely that the partially-unknown type // arose due to bidirectional type matching. - if (!isPartlyUnknown(argParam.paramType) && !isInstantiableClass(simplifiedType)) { + if (isPartlyUnknown(argParam.paramType)) { + suppressPartialUnknown = true; + } + + // If the argument type comes from a `[]` or `{}` expression, + // don't bother reporting it. + if (isClassInstance(simplifiedType) && simplifiedType.isEmptyContainer) { + suppressPartialUnknown = true; + } + + if (!suppressPartialUnknown) { const diagAddendum = getDiagAddendum(); diagAddendum.addMessage( Localizer.DiagnosticAddendum.argumentType().format({ @@ -8480,8 +10156,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions argList[i].valueExpression || errorNode ); } else { - const argType = getTypeForArgumentExpectingType(argList[i]); - if (requiresSpecialization(argType)) { + const argType = + argList[i].type ?? + getTypeForExpressionExpectingType( + argList[i].valueExpression!, + /* allowFinal */ undefined, + /* allowRequired */ undefined, + /* interpreterParsesStringLiteral */ true + ).type; + if (requiresSpecialization(argType, /* ignorePseudoGeneric */ true)) { addError(Localizer.Diagnostic.typeVarGeneric(), argList[i].valueExpression || errorNode); } typeVar.details.boundType = convertToInstance(argType); @@ -8517,8 +10200,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions argList[i].valueExpression || errorNode ); } else { - const argType = getTypeForArgumentExpectingType(argList[i]); - if (requiresSpecialization(argType)) { + const argType = + argList[i].type ?? + getTypeForExpressionExpectingType( + argList[i].valueExpression!, + /* allowFinal */ undefined, + /* allowRequired */ undefined, + /* interpreterParsesStringLiteral */ true + ).type; + + if (requiresSpecialization(argType, /* ignorePseudoGeneric */ true)) { addError(Localizer.Diagnostic.typeVarGeneric(), argList[i].valueExpression || errorNode); } TypeVarType.addConstraint(typeVar, convertToInstance(argType)); @@ -8712,6 +10403,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const declaration: VariableDeclaration = { type: DeclarationType.Variable, node: stringNode as StringListNode, + isRuntimeTypeExpression: true, path: fileInfo.filePath, range: convertOffsetsToRange( stringNode.start, @@ -8719,6 +10411,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions fileInfo.lines ), moduleName: fileInfo.moduleName, + isInExceptSuite: false, }; newSymbol.addDeclaration(declaration); classFields.set(entryName, newSymbol); @@ -8746,7 +10439,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (argList.length >= 2) { - const baseClass = getTypeForArgumentExpectingType(argList[1]); + const baseClass = getTypeForArgumentExpectingType(argList[1]).type; if (isInstantiableClass(baseClass)) { if (ClassType.isProtocolClass(baseClass)) { @@ -8815,13 +10508,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Implements the semantics of the multi-parameter variant of the "type" call. function createType(errorNode: ExpressionNode, argList: FunctionArgument[]): ClassType | undefined { const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); - const arg0Type = getTypeForArgument(argList[0]); + const arg0Type = getTypeForArgument(argList[0]).type; if (!isClassInstance(arg0Type) || !ClassType.isBuiltIn(arg0Type, 'str')) { return undefined; } const className = (arg0Type.literalValue as string) || '_'; - const arg1Type = getTypeForArgument(argList[1]); + const arg1Type = getTypeForArgument(argList[1]).type; if (!isClassInstance(arg1Type) || !isTupleClass(arg1Type) || arg1Type.tupleTypeArguments === undefined) { return undefined; } @@ -8836,11 +10529,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions /* declaredMetaclass */ undefined, arg1Type.details.effectiveMetaclass ); - arg1Type.tupleTypeArguments.forEach((baseClass) => { - if (isInstantiableClass(baseClass) || isAnyOrUnknown(baseClass)) { - classType.details.baseClasses.push(baseClass); + arg1Type.tupleTypeArguments.forEach((typeArg) => { + if (isInstantiableClass(typeArg.type) || isAnyOrUnknown(typeArg.type)) { + classType.details.baseClasses.push(typeArg.type); } else { - addExpectedClassDiagnostic(baseClass, argList[1].valueExpression || errorNode); + addExpectedClassDiagnostic(typeArg.type, argList[1].valueExpression || errorNode); } }); @@ -8882,7 +10575,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } function getTypeFromUnaryOperation(node: UnaryOperationNode, expectedType: Type | undefined): TypeResult { - let exprType = makeTopLevelTypeVarsConcrete(getTypeOfExpression(node.expression).type); + const exprTypeResult = getTypeOfExpression(node.expression); + let exprType = makeTopLevelTypeVarsConcrete(exprTypeResult.type); + const isIncomplete = exprTypeResult.isIncomplete; + + if (isNever(exprType)) { + return { node, type: NeverType.createNever(), isIncomplete }; + } // Map unary operators to magic functions. Note that the bitwise // invert has two magic functions that are aliases of each other. @@ -8908,52 +10607,80 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - // __not__ always returns a boolean. - if (node.operator === OperatorType.Not) { - type = getBuiltInObject(node, 'bool'); - if (!type) { - type = UnknownType.create(); + // Handle certain operations on certain literal types + // using special-case math. Do not apply this if the input type + // is incomplete because we may be evaluating an expression within + // a loop, so the literal value may change each time. + if (!exprTypeResult.isIncomplete) { + const literalClassName = getLiteralTypeClassName(exprType); + if (literalClassName === 'int') { + if (node.operator === OperatorType.Add) { + type = exprType; + } else if (node.operator === OperatorType.Subtract) { + type = mapSubtypes(exprType, (subtype) => { + const classSubtype = subtype as ClassType; + return ClassType.cloneWithLiteral( + classSubtype, + -(classSubtype.literalValue as number | bigint) + ); + }); + } + } else if (literalClassName === 'bool') { + if (node.operator === OperatorType.Not) { + type = mapSubtypes(exprType, (subtype) => { + const classSubtype = subtype as ClassType; + return ClassType.cloneWithLiteral(classSubtype, !(classSubtype.literalValue as boolean)); + }); + } } - } else { - if (isAnyOrUnknown(exprType)) { - type = exprType; + } + + if (!type) { + // __not__ always returns a boolean. + if (node.operator === OperatorType.Not) { + type = getBuiltInObject(node, 'bool'); + if (!type) { + type = UnknownType.create(); + } } else { - const magicMethodName = unaryOperatorMap[node.operator]; - type = getTypeFromMagicMethodReturn(exprType, [], magicMethodName, node, expectedType); - } + if (isAnyOrUnknown(exprType)) { + type = exprType; + } else { + const magicMethodName = unaryOperatorMap[node.operator]; + type = getTypeFromMagicMethodReturn(exprType, [], magicMethodName, node, expectedType); + } - if (!type) { - const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - addDiagnostic( - fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.typeNotSupportUnaryOperator().format({ - operator: ParseTreeUtils.printOperator(node.operator), - type: printType(exprType), - }), - node - ); - type = UnknownType.create(); - } - } + if (!type) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - // Handle the special case where the unary operator is + or -, the operand - // is a literal int, and the resulting type is an int. In these cases, we'll - // want to interpret the resulting type as a literal. - if (node.operator === OperatorType.Add || node.operator === OperatorType.Subtract) { - if ( - isClassInstance(type) && - ClassType.isBuiltIn(type, 'int') && - isClassInstance(exprType) && - ClassType.isBuiltIn(exprType, 'int') && - typeof exprType.literalValue === 'number' - ) { - const value = node.operator === OperatorType.Add ? exprType.literalValue : -exprType.literalValue!; - type = ClassType.cloneWithLiteral(type, value); + if (expectedType) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.typeNotSupportUnaryOperatorBidirectional().format({ + operator: ParseTreeUtils.printOperator(node.operator), + type: printType(exprType), + expectedType: printType(expectedType), + }), + node + ); + } else { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.typeNotSupportUnaryOperator().format({ + operator: ParseTreeUtils.printOperator(node.operator), + type: printType(exprType), + }), + node + ); + } + type = UnknownType.create(); + } } } - return { type, node }; + return { type, node, isIncomplete }; } function operatorSupportsComparisonChaining(op: OperatorType) { @@ -9032,6 +10759,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // For the "+" operator , use this technique only if the right operand is // a list expression. This heuristic handles the common case of `my_list + [0]`. expectedOperandType = leftType; + } else if (node.operator === OperatorType.BitwiseOr) { + // If this is a bitwise or ("|"), use the type of the left operand. This allows + // us to support the case where a TypedDict is being updated with a dict expression. + expectedOperandType = leftType; } } @@ -9050,7 +10781,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !customMetaclassSupportsMethod(rightType, '__ror__') ) { let adjustedRightType = rightType; - if (!isNone(leftType) && isNone(rightType) && TypeBase.isInstance(rightType)) { + if (!isNoneInstance(leftType) && isNoneInstance(rightType) && TypeBase.isInstance(rightType)) { // Handle the special case where "None" is being added to the union // with something else. Even though "None" will normally be interpreted // as the None singleton object in contexts where a type annotation isn't @@ -9065,11 +10796,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions (flags & EvaluatorFlags.AllowForwardReferences) !== 0 || fileInfo.executionEnvironment.pythonVersion >= PythonVersion.V3_10; if (!unionNotationSupported) { - addError(Localizer.Diagnostic.unionSyntaxIllegal(), node, node.operatorToken); + // If the left type is Any, we can't say for sure whether this + // is an illegal syntax or a valid application of the "|" operator. + if (!isAnyOrUnknown(leftType)) { + addError(Localizer.Diagnostic.unionSyntaxIllegal(), node, node.operatorToken); + } + } + + const newUnion = combineTypes([leftType, adjustedRightType]); + if (isUnion(newUnion)) { + TypeBase.setSpecialForm(newUnion); } return { - type: combineTypes([leftType, adjustedRightType]), + type: newUnion, node, }; } @@ -9092,9 +10832,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } const diag = new DiagnosticAddendum(); - let type = validateBinaryOperation(node.operator, leftType, rightType, node, expectedType, diag); - if (!diag.isEmpty() || !type || isNever(type)) { + // Don't use literal math if either of the operand types are + // incomplete because we may be evaluating types within a loop, + // so the literal values may change each time. + const isLiteralMathAllowed = !leftTypeResult.isIncomplete && !rightTypeResult.isIncomplete; + let type = validateBinaryOperation( + node.operator, + leftType, + rightType, + node, + expectedType, + diag, + isLiteralMathAllowed + ); + + if (!diag.isEmpty() || !type) { if (!isIncomplete) { const fileInfo = AnalyzerNodeInfo.getFileInfo(node); @@ -9179,10 +10932,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const leftTypeResult = getTypeOfExpression(node.leftExpression); const leftType = leftTypeResult.type; - const rightTypeResult = getTypeOfExpression(node.rightExpression); + + let expectedOperandType: Type | undefined; + if (node.operator === OperatorType.BitwiseOrEqual) { + // If this is a bitwise or ("|="), use the type of the left operand. This allows + // us to support the case where a TypedDict is being updated with a dict expression. + expectedOperandType = leftType; + } + + const rightTypeResult = getTypeOfExpression(node.rightExpression, expectedOperandType); const rightType = rightTypeResult.type; const isIncomplete = !!rightTypeResult.isIncomplete || !!leftTypeResult.isIncomplete; + if (isNever(leftType) || isNever(rightType)) { + return { node, type: NeverType.createNever(), isIncomplete }; + } + type = mapSubtypesExpandTypeVars( leftType, /* conditionFilter */ undefined, @@ -9235,13 +11000,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If the LHS class didn't support the magic method for augmented // assignment, fall back on the normal binary expression evaluator. const binaryOperator = operatorMap[node.operator][1]; + + // Don't use literal math if either of the operand types are + // incomplete because we may be evaluating types within a loop, + // so the literal values may change each time. + const isLiteralMathAllowed = !leftTypeResult.isIncomplete && !rightTypeResult.isIncomplete; + returnType = validateBinaryOperation( binaryOperator, leftSubtypeUnexpanded, rightSubtypeUnexpanded, node, expectedType, - diag + diag, + isLiteralMathAllowed ); } @@ -9280,7 +11052,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions rightType: Type, errorNode: ExpressionNode, expectedType: Type | undefined, - diag: DiagnosticAddendum + diag: DiagnosticAddendum, + isLiteralMathAllowed: boolean ): Type | undefined { let type: Type | undefined; let concreteLeftType = makeTopLevelTypeVarsConcrete(leftType); @@ -9318,6 +11091,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions concreteLeftType = removeFalsinessFromType(concreteLeftType); } + if (isNever(leftType) || isNever(rightType)) { + return NeverType.createNever(); + } + // The "in" and "not in" operators make use of the __contains__ // magic method. if (operator === OperatorType.In || operator === OperatorType.NotIn) { @@ -9355,10 +11132,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions /* errorNode */ undefined ); - if ( - iteratorType && - canAssignType(iteratorType, leftSubtype, new DiagnosticAddendum()) - ) { + if (iteratorType && canAssignType(iteratorType, leftSubtype)) { returnType = getBuiltInObject(errorNode, 'bool'); } } @@ -9404,127 +11178,245 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); } } else if (binaryOperatorMap[operator]) { - type = mapSubtypesExpandTypeVars( - leftType, - /* conditionFilter */ undefined, - (leftSubtypeExpanded, leftSubtypeUnexpanded) => { - return mapSubtypesExpandTypeVars( - rightType, - getTypeCondition(leftSubtypeExpanded), - (rightSubtypeExpanded, rightSubtypeUnexpanded) => { - if (isAnyOrUnknown(leftSubtypeUnexpanded) || isAnyOrUnknown(rightSubtypeUnexpanded)) { - // If either type is "Unknown" (versus Any), propagate the Unknown. - if (isUnknown(leftSubtypeUnexpanded) || isUnknown(rightSubtypeUnexpanded)) { - return UnknownType.create(); - } else { - return AnyType.create(); - } - } + if (isNever(leftType) || isNever(rightType)) { + return NeverType.createNever(); + } - // Special-case __add__ for tuples when the types for both tuples are known. - if ( - operator === OperatorType.Add && - isClassInstance(leftSubtypeExpanded) && - isTupleClass(leftSubtypeExpanded) && - leftSubtypeExpanded.tupleTypeArguments && - !isOpenEndedTupleClass(leftSubtypeExpanded) && - isClassInstance(rightSubtypeExpanded) && - isTupleClass(rightSubtypeExpanded) && - rightSubtypeExpanded.tupleTypeArguments && - !isOpenEndedTupleClass(rightSubtypeExpanded) && - tupleClassType && - isInstantiableClass(tupleClassType) - ) { - return ClassType.cloneAsInstance( - specializeTupleClass(tupleClassType, [ - ...leftSubtypeExpanded.tupleTypeArguments, - ...rightSubtypeExpanded.tupleTypeArguments, - ]) - ); - } + // Handle certain operations on certain homogenous literal types + // using special-case math. For example, Literal[1, 2] + Literal[3, 4] + // should result in Literal[4, 5, 6]. + if (isLiteralMathAllowed) { + const leftLiteralClassName = getLiteralTypeClassName(leftType); + if (leftLiteralClassName && !getTypeCondition(leftType)) { + const rightLiteralClassName = getLiteralTypeClassName(rightType); - const magicMethodName = binaryOperatorMap[operator][0]; - let resultType = getTypeFromMagicMethodReturn( - leftSubtypeUnexpanded, - [rightSubtypeUnexpanded], - magicMethodName, - errorNode, - expectedType - ); + // If the number of subtypes starts to explode, don't use this code path. + const maxLiteralMathSubtypeCount = 64; - if (!resultType && leftSubtypeUnexpanded !== leftSubtypeExpanded) { - // Try the expanded left type. - resultType = getTypeFromMagicMethodReturn( - leftSubtypeExpanded, - [rightSubtypeUnexpanded], - magicMethodName, - errorNode, - expectedType - ); + if ( + leftLiteralClassName === rightLiteralClassName && + !getTypeCondition(rightType) && + getUnionSubtypeCount(leftType) * getUnionSubtypeCount(rightType) < maxLiteralMathSubtypeCount + ) { + if (leftLiteralClassName === 'str' || leftLiteralClassName === 'bytes') { + if (operator === OperatorType.Add) { + type = mapSubtypes(leftType, (leftSubtype) => { + return mapSubtypes(rightType, (rightSubtype) => { + const leftClassSubtype = leftSubtype as ClassType; + const rightClassSubtype = rightSubtype as ClassType; + + return ClassType.cloneWithLiteral( + leftClassSubtype, + ((leftClassSubtype.literalValue as string) + + rightClassSubtype.literalValue) as string + ); + }); + }); } + } else if (leftLiteralClassName === 'int') { + if ( + operator === OperatorType.Add || + operator === OperatorType.Subtract || + operator === OperatorType.Multiply || + operator === OperatorType.FloorDivide || + operator === OperatorType.Mod + ) { + let isValidResult = true; + + type = mapSubtypes(leftType, (leftSubtype) => { + return mapSubtypes(rightType, (rightSubtype) => { + try { + const leftClassSubtype = leftSubtype as ClassType; + const rightClassSubtype = rightSubtype as ClassType; + const leftLiteralValue = BigInt( + leftClassSubtype.literalValue as number | bigint + ); + const rightLiteralValue = BigInt( + rightClassSubtype.literalValue as number | bigint + ); - if (!resultType && rightSubtypeUnexpanded !== rightSubtypeExpanded) { - // Try the expanded left and right type. - resultType = getTypeFromMagicMethodReturn( - leftSubtypeExpanded, - [rightSubtypeExpanded], - magicMethodName, - errorNode, - expectedType - ); + let newValue: number | bigint | undefined; + if (operator === OperatorType.Add) { + newValue = leftLiteralValue + rightLiteralValue; + } else if (operator === OperatorType.Subtract) { + newValue = leftLiteralValue - rightLiteralValue; + } else if (operator === OperatorType.Multiply) { + newValue = leftLiteralValue * rightLiteralValue; + } else if (operator === OperatorType.FloorDivide) { + if (rightLiteralValue !== BigInt(0)) { + newValue = leftLiteralValue / rightLiteralValue; + } + } else if (operator === OperatorType.Mod) { + if (rightLiteralValue !== BigInt(0)) { + newValue = leftLiteralValue % rightLiteralValue; + } + } + + if (newValue === undefined) { + isValidResult = false; + return undefined; + } else if (typeof newValue === 'number' && isNaN(newValue)) { + isValidResult = false; + return undefined; + } else { + // Convert back to a simple number if it fits. Leave as a bigint + // if it doesn't. + if (newValue === BigInt(Number(newValue))) { + newValue = Number(newValue); + } + + return ClassType.cloneWithLiteral(leftClassSubtype, newValue); + } + } catch { + isValidResult = false; + return undefined; + } + }); + }); + + if (!isValidResult) { + type = undefined; + } } + } + } + } + } + + if (!type) { + type = mapSubtypesExpandTypeVars( + leftType, + /* conditionFilter */ undefined, + (leftSubtypeExpanded, leftSubtypeUnexpanded) => { + return mapSubtypesExpandTypeVars( + rightType, + getTypeCondition(leftSubtypeExpanded), + (rightSubtypeExpanded, rightSubtypeUnexpanded) => { + if (isAnyOrUnknown(leftSubtypeUnexpanded) || isAnyOrUnknown(rightSubtypeUnexpanded)) { + // If either type is "Unknown" (versus Any), propagate the Unknown. + if (isUnknown(leftSubtypeUnexpanded) || isUnknown(rightSubtypeUnexpanded)) { + return UnknownType.create(); + } else { + return AnyType.create(); + } + } + + // Special-case __add__ for tuples when the types for both tuples are known. + if ( + operator === OperatorType.Add && + isClassInstance(leftSubtypeExpanded) && + isTupleClass(leftSubtypeExpanded) && + leftSubtypeExpanded.tupleTypeArguments && + !isUnboundedTupleClass(leftSubtypeExpanded) && + isClassInstance(rightSubtypeExpanded) && + isTupleClass(rightSubtypeExpanded) && + rightSubtypeExpanded.tupleTypeArguments && + !isUnboundedTupleClass(rightSubtypeExpanded) && + tupleClassType && + isInstantiableClass(tupleClassType) + ) { + return ClassType.cloneAsInstance( + specializeTupleClass(tupleClassType, [ + ...leftSubtypeExpanded.tupleTypeArguments, + ...rightSubtypeExpanded.tupleTypeArguments, + ]) + ); + } - if (!resultType) { - // Try the alternate form (swapping right and left). - const altMagicMethodName = binaryOperatorMap[operator][1]; - resultType = getTypeFromMagicMethodReturn( - rightSubtypeUnexpanded, - [leftSubtypeUnexpanded], - altMagicMethodName, + const magicMethodName = binaryOperatorMap[operator][0]; + let resultType = getTypeFromMagicMethodReturn( + convertFunctionToObject(leftSubtypeUnexpanded), + [rightSubtypeUnexpanded], + magicMethodName, errorNode, expectedType ); + if (!resultType && leftSubtypeUnexpanded !== leftSubtypeExpanded) { + // Try the expanded left type. + resultType = getTypeFromMagicMethodReturn( + convertFunctionToObject(leftSubtypeExpanded), + [rightSubtypeUnexpanded], + magicMethodName, + errorNode, + expectedType + ); + } + if (!resultType && rightSubtypeUnexpanded !== rightSubtypeExpanded) { - // Try the expanded right type. + // Try the expanded left and right type. resultType = getTypeFromMagicMethodReturn( - rightSubtypeExpanded, - [leftSubtypeUnexpanded], - altMagicMethodName, + convertFunctionToObject(leftSubtypeExpanded), + [rightSubtypeExpanded], + magicMethodName, errorNode, expectedType ); } - if (!resultType && leftSubtypeUnexpanded !== leftSubtypeExpanded) { - // Try the expanded right and left type. + if (!resultType) { + // Try the alternate form (swapping right and left). + const altMagicMethodName = binaryOperatorMap[operator][1]; resultType = getTypeFromMagicMethodReturn( - rightSubtypeExpanded, - [leftSubtypeExpanded], + convertFunctionToObject(rightSubtypeUnexpanded), + [leftSubtypeUnexpanded], altMagicMethodName, errorNode, expectedType ); + + if (!resultType && rightSubtypeUnexpanded !== rightSubtypeExpanded) { + // Try the expanded right type. + resultType = getTypeFromMagicMethodReturn( + convertFunctionToObject(rightSubtypeExpanded), + [leftSubtypeUnexpanded], + altMagicMethodName, + errorNode, + expectedType + ); + } + + if (!resultType && leftSubtypeUnexpanded !== leftSubtypeExpanded) { + // Try the expanded right and left type. + resultType = getTypeFromMagicMethodReturn( + convertFunctionToObject(rightSubtypeExpanded), + [leftSubtypeExpanded], + altMagicMethodName, + errorNode, + expectedType + ); + } } - } - if (!resultType) { - diag.addMessage( - Localizer.Diagnostic.typeNotSupportBinaryOperator().format({ - operator: ParseTreeUtils.printOperator(operator), - leftType: printType(leftSubtypeExpanded), - rightType: printType(rightSubtypeExpanded), - }) - ); + if (!resultType) { + if (expectedType) { + diag.addMessage( + Localizer.Diagnostic.typeNotSupportBinaryOperatorBidirectional().format({ + operator: ParseTreeUtils.printOperator(operator), + leftType: printType(leftSubtypeExpanded), + rightType: printType(rightSubtypeExpanded), + expectedType: printType(expectedType), + }) + ); + } else { + diag.addMessage( + Localizer.Diagnostic.typeNotSupportBinaryOperator().format({ + operator: ParseTreeUtils.printOperator(operator), + leftType: printType(leftSubtypeExpanded), + rightType: printType(rightSubtypeExpanded), + }) + ); + } + } + return resultType; } - return resultType; - } - ); - } - ); + ); + } + ); + } } - return type; + return type && isNever(type) ? undefined : type; } function getTypeFromMagicMethodReturn( @@ -9601,7 +11493,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isClassInstance(subtype) || isInstantiableClass(subtype) || isTypeVar(subtype)) { return handleSubtype(subtype); - } else if (isNone(subtype)) { + } else if (isNoneInstance(subtype)) { // NoneType derives from 'object', so do the lookup on 'object' // in this case. const obj = getBuiltInObject(errorNode, 'object'); @@ -9621,6 +11513,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return returnType; } + // All functions in Python derive from object, so they inherit all + // of the capabilities of an object. This function converts a function + // to an object instance. + function convertFunctionToObject(type: Type) { + if (isFunction(type) || isOverloadedFunction(type)) { + if (objectType) { + return objectType; + } + } + + return type; + } + function getTypeFromDictionary(node: DictionaryNode, expectedType: Type | undefined): TypeResult { // If the expected type is a union, analyze for each of the subtypes // to find one that matches. @@ -9632,10 +11537,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions doForEachSubtype(expectedType, (subtype) => { if (!matchingSubtype) { const subtypeResult = useSpeculativeMode(node, () => { - return getTypeFromDictionaryExpected(node, subtype, new DiagnosticAddendum()); + return getTypeFromDictionaryExpected(node, subtype); }); - if (subtypeResult) { + if (subtypeResult && canAssignType(subtype, subtypeResult.type)) { matchingSubtype = subtype; } } @@ -9653,7 +11558,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const result = getTypeFromDictionaryInferred(node, expectedType)!; + const result = getTypeFromDictionaryInferred(node, /* hasExpectedType */ !!expectedType); return { ...result, expectedTypeDiagAddendum }; } @@ -9663,7 +11568,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function getTypeFromDictionaryExpected( node: DictionaryNode, expectedType: Type, - expectedDiagAddendum: DiagnosticAddendum + expectedDiagAddendum?: DiagnosticAddendum ): TypeResult | undefined { expectedType = transformPossibleRecursiveTypeAlias(expectedType); @@ -9685,7 +11590,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node, keyTypes, valueTypes, - !!expectedType, + /* forceStrictInference */ true, /* expectedKeyType */ undefined, /* expectedValueType */ undefined, expectedTypedDictEntries, @@ -9695,15 +11600,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isIncomplete = true; } - if ( - ClassType.isTypedDictClass(expectedType) && - canAssignToTypedDict(evaluatorInterface, expectedType, keyTypes, valueTypes, expectedDiagAddendum) - ) { - return { - type: expectedType, - node, - isIncomplete, - }; + if (ClassType.isTypedDictClass(expectedType)) { + const resultTypedDict = assignToTypedDict( + evaluatorInterface, + expectedType, + keyTypes, + valueTypes, + expectedDiagAddendum + ); + if (resultTypedDict) { + return { + type: resultTypedDict, + node, + isIncomplete, + }; + } } return undefined; @@ -9717,7 +11628,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const dictTypeVarMap = new TypeVarMap(getTypeVarScopeId(builtInDict)); if ( !populateTypeVarMapBasedOnExpectedType( - ClassType.cloneAsInstantiable(builtInDict), + builtInDict, expectedType, dictTypeVarMap, getTypeVarScopesForNode(node) @@ -9743,7 +11654,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node, keyTypes, valueTypes, - !!expectedType, + /* forceStrictInference */ true, expectedKeyType, expectedValueType, undefined, @@ -9774,12 +11685,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return { type, node, isIncomplete }; } - // Attempts to infer the type of a dictionary statement. If an expectedType - // is provided, the resulting type must be compatible with the expected type. - // If this isn't possible, undefined is returned. - function getTypeFromDictionaryInferred(node: DictionaryNode, expectedType: Type | undefined): TypeResult { - let keyType: Type = expectedType ? AnyType.create() : UnknownType.create(); - let valueType: Type = expectedType ? AnyType.create() : UnknownType.create(); + // Attempts to infer the type of a dictionary statement. If hasExpectedType + // is true, strict inference is used for the subexpressions. + function getTypeFromDictionaryInferred(node: DictionaryNode, hasExpectedType: boolean): TypeResult { + const fallbackType = hasExpectedType ? AnyType.create() : UnknownType.create(); + let keyType: Type = fallbackType; + let valueType: Type = fallbackType; let keyTypes: Type[] = []; let valueTypes: Type[] = []; @@ -9788,16 +11699,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let isIncomplete = false; // Infer the key and value types if possible. - if ( - getKeyAndValueTypesFromDictionary( - node, - keyTypes, - valueTypes, - !expectedType, - expectedType ? AnyType.create() : undefined, - expectedType ? AnyType.create() : undefined - ) - ) { + if (getKeyAndValueTypesFromDictionary(node, keyTypes, valueTypes, /* forceStrictInference */ hasExpectedType)) { isIncomplete = true; } @@ -9805,7 +11707,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions keyTypes = keyTypes.map((t) => stripLiteralValue(t)); valueTypes = valueTypes.map((t) => stripLiteralValue(t)); - keyType = keyTypes.length > 0 ? combineTypes(keyTypes) : expectedType ? AnyType.create() : UnknownType.create(); + keyType = keyTypes.length > 0 ? combineTypes(keyTypes) : fallbackType; // If the value type differs and we're not using "strict inference mode", // we need to back off because we can't properly represent the mappings @@ -9813,17 +11715,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // are the same type, we'll assume that all values in this dictionary should // be the same. if (valueTypes.length > 0) { - if (AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.strictDictionaryInference || !!expectedType) { + if (AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.strictDictionaryInference || hasExpectedType) { valueType = combineTypes(valueTypes); } else { - valueType = areTypesSame(valueTypes, /* ignorePseudoGeneric */ true) - ? valueTypes[0] - : expectedType - ? AnyType.create() - : UnknownType.create(); + valueType = areTypesSame(valueTypes, /* ignorePseudoGeneric */ true) ? valueTypes[0] : fallbackType; } } else { - valueType = expectedType ? AnyType.create() : UnknownType.create(); + valueType = fallbackType; isEmptyContainer = true; } @@ -9848,7 +11746,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node: DictionaryNode, keyTypes: Type[], valueTypes: Type[], - limitEntryCount: boolean, + forceStrictInference: boolean, expectedKeyType?: Type, expectedValueType?: Type, expectedTypedDictEntries?: Map, @@ -9861,7 +11759,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let addUnknown = true; if (entryNode.nodeType === ParseNodeType.DictionaryKeyEntry) { - const keyTypeResult = getTypeOfExpression(entryNode.keyExpression, expectedKeyType); + const keyTypeResult = getTypeOfExpression( + entryNode.keyExpression, + expectedKeyType ?? (forceStrictInference ? NeverType.createNever() : undefined) + ); if (keyTypeResult.isIncomplete) { isIncomplete = true; } @@ -9870,7 +11771,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (expectedKeyType) { const adjExpectedKeyType = makeTopLevelTypeVarsConcrete(expectedKeyType); if (!isAnyOrUnknown(adjExpectedKeyType)) { - if (canAssignType(adjExpectedKeyType, keyType, new DiagnosticAddendum(), undefined)) { + if (canAssignType(adjExpectedKeyType, keyType)) { keyType = adjExpectedKeyType; } } @@ -9890,7 +11791,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions expectedTypedDictEntries.get(keyType.literalValue as string)!.valueType ); } else { - valueTypeResult = getTypeOfExpression(entryNode.valueExpression, expectedValueType); + valueTypeResult = getTypeOfExpression( + entryNode.valueExpression, + expectedValueType ?? (forceStrictInference ? NeverType.createNever() : undefined) + ); } if (expectedDiagAddendum && valueTypeResult.expectedTypeDiagAddendum) { @@ -9902,7 +11806,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isIncomplete = true; } - if (!limitEntryCount || index < maxEntriesToUseForInference) { + if (forceStrictInference || index < maxEntriesToUseForInference) { keyTypes.push(keyType); valueTypes.push(valueType); } @@ -9924,14 +11828,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( ClassType.cloneAsInstance(mappingType), unexpandedType, - new DiagnosticAddendum(), + /* diag */ undefined, mappingTypeVarMap ) ) { const specializedMapping = applySolvedTypeVars(mappingType, mappingTypeVarMap) as ClassType; const typeArgs = specializedMapping.typeArguments; if (typeArgs && typeArgs.length >= 2) { - if (!limitEntryCount || index < maxEntriesToUseForInference) { + if (forceStrictInference || index < maxEntriesToUseForInference) { keyTypes.push(typeArgs[0]); valueTypes.push(typeArgs[1]); } @@ -9961,9 +11865,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // The result should be a tuple. if (isClassInstance(dictEntryType) && isTupleClass(dictEntryType)) { - const typeArgs = dictEntryType.tupleTypeArguments; + const typeArgs = dictEntryType.tupleTypeArguments?.map((t) => t.type); if (typeArgs && typeArgs.length === 2) { - if (!limitEntryCount || index < maxEntriesToUseForInference) { + if (forceStrictInference || index < maxEntriesToUseForInference) { keyTypes.push(typeArgs[0]); valueTypes.push(typeArgs[1]); } @@ -9973,7 +11877,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (addUnknown) { - if (!limitEntryCount || index < maxEntriesToUseForInference) { + if (forceStrictInference || index < maxEntriesToUseForInference) { keyTypes.push(UnknownType.create()); valueTypes.push(UnknownType.create()); } @@ -9997,7 +11901,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return getTypeFromListOrSetExpected(node, subtype); }); - if (subtypeResult) { + if (subtypeResult && canAssignType(subtype, subtypeResult.type)) { matchingSubtype = subtype; } } @@ -10013,7 +11917,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - return getTypeFromListOrSetInferred(node, expectedType); + return getTypeFromListOrSetInferred(node, /* hasExpectedType */ expectedType !== undefined); } // Attempts to determine the type of a list or set statement based on an expected type. @@ -10035,7 +11939,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeVarMap = new TypeVarMap(getTypeVarScopeId(builtInListOrSet)); if ( !populateTypeVarMapBasedOnExpectedType( - ClassType.cloneAsInstantiable(builtInListOrSet), + builtInListOrSet, expectedType, typeVarMap, getTypeVarScopesForNode(node) @@ -10084,30 +11988,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Attempts to infer the type of a list or set statement with no "expected type". - function getTypeFromListOrSetInferred(node: ListNode | SetNode, expectedType: Type | undefined): TypeResult { + function getTypeFromListOrSetInferred(node: ListNode | SetNode, hasExpectedType: boolean): TypeResult { const builtInClassName = node.nodeType === ParseNodeType.List ? 'list' : 'set'; let isEmptyContainer = false; let isIncomplete = false; - // If we received an expected entry type that of "object", - // allow Any rather than generating an "Unknown". - let expectedEntryType: Type | undefined; - if (expectedType) { - if (isAny(expectedType)) { - expectedEntryType = expectedType; - } else if (isClassInstance(expectedType) && ClassType.isBuiltIn(expectedType, 'object')) { - expectedEntryType = AnyType.create(); - } - } - let entryTypes: Type[] = []; node.entries.forEach((entry, index) => { let entryTypeResult: TypeResult; if (entry.nodeType === ParseNodeType.ListComprehension) { - entryTypeResult = getElementTypeFromListComprehension(entry, expectedEntryType); + entryTypeResult = getElementTypeFromListComprehension(entry); } else { - entryTypeResult = getTypeOfExpression(entry, expectedEntryType); + entryTypeResult = getTypeOfExpression( + entry, + /* expectedType */ hasExpectedType ? NeverType.createNever() : undefined + ); } if (entryTypeResult.isIncomplete) { @@ -10121,7 +12017,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions entryTypes = entryTypes.map((t) => stripLiteralValue(t)); - let inferredEntryType: Type = expectedType ? AnyType.create() : UnknownType.create(); + let inferredEntryType: Type = hasExpectedType ? AnyType.create() : UnknownType.create(); if (entryTypes.length > 0) { const fileInfo = AnalyzerNodeInfo.getFileInfo(node); // If there was an expected type or we're using strict list inference, @@ -10129,7 +12025,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if ( (builtInClassName === 'list' && fileInfo.diagnosticRuleSet.strictListInference) || (builtInClassName === 'set' && fileInfo.diagnosticRuleSet.strictSetInference) || - !!expectedType + hasExpectedType ) { inferredEntryType = combineTypes(entryTypes, maxSubtypesForInferredType); } else { @@ -10164,8 +12060,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions entryTypes: Type[], isNarrowable: boolean ): Type | undefined { - const diagDummy = new DiagnosticAddendum(); - // Synthesize a temporary bound type var. We will attempt to assign all list // entries to this type var, possibly narrowing the type in the process. const targetTypeVar = TypeVarType.createInstance('__typeArg'); @@ -10183,7 +12077,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // type explicitly includes literals. if ( entryTypes.some( - (entryType) => !canAssignType(targetTypeVar, stripLiteralValue(entryType), diagDummy, typeVarMap) + (entryType) => + !canAssignType(targetTypeVar, stripLiteralValue(entryType), /* diag */ undefined, typeVarMap) ) ) { // Allocate a fresh typeVarMap before we try again with literals not stripped. @@ -10194,7 +12089,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions expectedType, /* retainLiteral */ true ); - if (entryTypes.some((entryType) => !canAssignType(targetTypeVar!, entryType, diagDummy, typeVarMap))) { + if ( + entryTypes.some( + (entryType) => !canAssignType(targetTypeVar!, entryType, /* diag */ undefined, typeVarMap) + ) + ) { return undefined; } } @@ -10205,29 +12104,62 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function getTypeFromTernary(node: TernaryNode, flags: EvaluatorFlags, expectedType: Type | undefined): TypeResult { getTypeOfExpression(node.testExpression); - const ifType = getTypeOfExpression(node.ifExpression, expectedType, flags); - const elseType = getTypeOfExpression(node.elseExpression, expectedType, flags); + const typesToCombine: Type[] = []; + let isIncomplete = false; + + if (isNodeReachable(node.ifExpression)) { + const ifType = getTypeOfExpression(node.ifExpression, expectedType, flags); + typesToCombine.push(ifType.type); + if (ifType.isIncomplete) { + isIncomplete = true; + } + } + + if (isNodeReachable(node.elseExpression)) { + const elseType = getTypeOfExpression(node.elseExpression, expectedType, flags); + typesToCombine.push(elseType.type); + if (elseType.isIncomplete) { + isIncomplete = true; + } + } - const type = combineTypes([ifType.type, elseType.type]); - return { type, node, isIncomplete: ifType.isIncomplete || elseType.isIncomplete }; + return { type: combineTypes(typesToCombine), node, isIncomplete }; } function getTypeFromYield(node: YieldNode): TypeResult { + let expectedYieldType: Type | undefined; let sentType: Type | undefined; + let isIncomplete = false; const enclosingFunction = ParseTreeUtils.getEnclosingFunction(node); if (enclosingFunction) { const functionTypeInfo = getTypeOfFunction(enclosingFunction); if (functionTypeInfo) { - sentType = getDeclaredGeneratorSendType(functionTypeInfo.functionType); + const returnType = FunctionType.getSpecializedReturnType(functionTypeInfo.functionType); + if (returnType) { + const generatorTypeArgs = getGeneratorTypeArgs(returnType); + + if (generatorTypeArgs) { + if (generatorTypeArgs.length >= 1) { + expectedYieldType = generatorTypeArgs[0]; + } + + if (generatorTypeArgs.length >= 2) { + sentType = generatorTypeArgs[1]; + } + } + } } } if (node.expression) { - getTypeOfExpression(node.expression).type; + const exprResult = getTypeOfExpression(node.expression, expectedYieldType); + if (exprResult.isIncomplete) { + isIncomplete = true; + } } - return { type: sentType || UnknownType.create(), node }; + return { type: sentType || UnknownType.create(), node, isIncomplete }; } function getTypeFromYieldFrom(node: YieldFromNode): TypeResult { @@ -10239,6 +12171,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Is the expression a Generator type? if (generatorTypeArgs) { returnedType = generatorTypeArgs.length >= 2 ? generatorTypeArgs[2] : UnknownType.create(); + } else if (isClassInstance(yieldFromType) && ClassType.isBuiltIn(yieldFromType, 'Coroutine')) { + // Handle old-style (pre-await) Coroutines as a special case. + returnedType = UnknownType.create(); } else { const iterableType = getTypeFromIterable(yieldFromType, /* isAsync */ false, node) || UnknownType.create(); @@ -10246,8 +12181,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions generatorTypeArgs = getGeneratorTypeArgs(iterableType); if (generatorTypeArgs) { returnedType = generatorTypeArgs.length >= 2 ? generatorTypeArgs[2] : UnknownType.create(); - } else { - returnedType = UnknownType.create(); } } @@ -10256,9 +12189,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function getTypeFromLambda(node: LambdaNode, expectedType: Type | undefined): TypeResult { const functionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.None); + functionType.details.typeVarScopeId = getScopeIdForNode(node); // Pre-cache the newly-created function type. - writeTypeCache(node, functionType, /* isIncomplete */ false); + writeTypeCache(node, functionType, EvaluatorFlags.None, /* isIncomplete */ false); let expectedFunctionTypes: FunctionType[] = []; if (expectedType) { @@ -10286,18 +12220,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Determine the minimum number of parameters that are required to // satisfy the lambda. - const lambdaParamCount = node.parameters.filter( + const minLambdaParamCount = node.parameters.filter( (param) => param.category === ParameterCategory.Simple && param.defaultValue === undefined ).length; + const maxLambdaParamCount = node.parameters.filter( + (param) => param.category === ParameterCategory.Simple + ).length; // Remove any expected subtypes that don't satisfy the minimum // parameter count requirement. expectedFunctionTypes = expectedFunctionTypes.filter((functionType) => { - const functionParamCount = functionType.details.parameters.filter((param) => !!param.name).length; + const functionParamCount = functionType.details.parameters.filter( + (param) => !!param.name && !param.hasDefault + ).length; const hasVarArgs = functionType.details.parameters.some( (param) => !!param.name && param.category !== ParameterCategory.Simple ); - return hasVarArgs || functionParamCount === lambdaParamCount; + return ( + hasVarArgs || + (functionParamCount >= minLambdaParamCount && functionParamCount <= maxLambdaParamCount) + ); }); } @@ -10314,6 +12256,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions writeTypeCache( param.name, transformVariadicParamType(node, param.category, paramType), + EvaluatorFlags.None, /* isIncomplete */ false ); } @@ -10365,11 +12308,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } const elementType = elementTypeResult.type; - let isAsync = node.comprehensions.some((comp) => { - return comp.nodeType === ParseNodeType.ListComprehensionFor && comp.isAsync; + let isAsync = node.forIfNodes.some((comp) => { + return ( + (comp.nodeType === ParseNodeType.ListComprehensionFor && comp.isAsync) || + (comp.nodeType === ParseNodeType.ListComprehensionIf && + comp.testExpression.nodeType === ParseNodeType.Await) + ); }); let type: Type = UnknownType.create(); + if (node.expression.nodeType === ParseNodeType.Await) { + isAsync = true; + } + // Handle the special case where a generator function (e.g. `(await x for x in y)`) // is expected to be an AsyncGenerator. if ( @@ -10441,6 +12392,34 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + function evaluateListComprehensionForIf(node: ListComprehensionForIfNode) { + let isIncomplete = false; + + if (node.nodeType === ParseNodeType.ListComprehensionFor) { + const iterableTypeResult = getTypeOfExpression(node.iterableExpression); + if (iterableTypeResult.isIncomplete) { + isIncomplete = true; + } + const iterableType = stripLiteralValue(iterableTypeResult.type); + const itemType = + getTypeFromIterator(iterableType, !!node.isAsync, node.iterableExpression) || UnknownType.create(); + + const targetExpr = node.targetExpression; + assignTypeToExpression(targetExpr, itemType, !!iterableTypeResult.isIncomplete, node.iterableExpression); + } else { + assert(node.nodeType === ParseNodeType.ListComprehensionIf); + + // Evaluate the test expression to validate it and mark symbols + // as referenced. Don't bother doing this if we're in speculative + // mode because it doesn't affect the element type. + if (!speculativeTypeTracker.isSpeculative(node.testExpression)) { + getTypeOfExpression(node.testExpression); + } + } + + return isIncomplete; + } + // Returns the type of one entry returned by the list comprehension, // as opposed to the entire list. function getElementTypeFromListComprehension( @@ -10451,33 +12430,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let isIncomplete = false; // "Execute" the list comprehensions from start to finish. - for (const comprehension of node.comprehensions) { - if (comprehension.nodeType === ParseNodeType.ListComprehensionFor) { - const iterableTypeResult = getTypeOfExpression(comprehension.iterableExpression); - if (iterableTypeResult.isIncomplete) { - isIncomplete = true; - } - const iterableType = stripLiteralValue(iterableTypeResult.type); - const itemType = - getTypeFromIterator(iterableType, !!comprehension.isAsync, comprehension.iterableExpression) || - UnknownType.create(); - - const targetExpr = comprehension.targetExpression; - assignTypeToExpression( - targetExpr, - itemType, - !!iterableTypeResult.isIncomplete, - comprehension.iterableExpression - ); - } else { - assert(comprehension.nodeType === ParseNodeType.ListComprehensionIf); - - // Evaluate the test expression to validate it and mark symbols - // as referenced. Don't bother doing this if we're in speculative - // mode because it doesn't affect the element type. - if (!speculativeTypeTracker.isSpeculative(comprehension.testExpression)) { - getTypeOfExpression(comprehension.testExpression); - } + for (const forIfNode of node.forIfNodes) { + if (evaluateListComprehensionForIf(forIfNode)) { + isIncomplete = true; } } @@ -10535,36 +12490,39 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Verifies that a type argument's type is not disallowed. - function validateTypeArg( - argResult: TypeResult, - allowEmptyTuple = false, - allowVariadicTypeVar = false, - allowParamSpec = false - ): boolean { + function validateTypeArg(argResult: TypeResult, options?: ValidateTypeArgsOptions): boolean { if (argResult.typeList) { - addError(Localizer.Diagnostic.typeArgListNotAllowed(), argResult.node); - return false; + if (!options?.allowTypeArgList) { + addError(Localizer.Diagnostic.typeArgListNotAllowed(), argResult.node); + return false; + } else { + argResult.typeList!.forEach((typeArg) => { + validateTypeArg(typeArg); + }); + } } if (isEllipsisType(argResult.type)) { - addError(Localizer.Diagnostic.ellipsisContext(), argResult.node); - return false; + if (!options?.allowTypeArgList) { + addError(Localizer.Diagnostic.ellipsisContext(), argResult.node); + return false; + } } if (isModule(argResult.type)) { - addError(Localizer.Diagnostic.moduleContext(), argResult.node); + addError(Localizer.Diagnostic.moduleAsType(), argResult.node); return false; } if (isParamSpec(argResult.type)) { - if (!allowParamSpec) { + if (!options?.allowParamSpec) { addError(Localizer.Diagnostic.paramSpecContext(), argResult.node); return false; } } if (isVariadicTypeVar(argResult.type) && !argResult.type.isVariadicInUnion) { - if (!allowVariadicTypeVar) { + if (!options?.allowVariadicTypeVar) { addError(Localizer.Diagnostic.typeVarTupleContext(), argResult.node); return false; } else { @@ -10572,11 +12530,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - if (!allowEmptyTuple && argResult.isEmptyTupleShorthand) { + if (!options?.allowEmptyTuple && argResult.isEmptyTupleShorthand) { addError(Localizer.Diagnostic.zeroLengthTupleNotAllowed(), argResult.node); return false; } + if (isUnpackedClass(argResult.type)) { + if (!options?.allowUnpackedTuples) { + addError(Localizer.Diagnostic.unpackedArgInTypeArgument(), argResult.node); + return false; + } + } + return true; } @@ -10588,7 +12553,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Create a new function that is marked as "static" so there is later // no attempt to bind it as though it's an instance or class method. const functionType = FunctionType.createInstantiable('', '', '', FunctionTypeFlags.None); - TypeBase.setNonCallable(functionType); + TypeBase.setSpecialForm(functionType); functionType.details.declaredReturnType = UnknownType.create(); const enclosingScope = ParseTreeUtils.getEnclosingClassOrFunction(errorNode); @@ -10603,26 +12568,34 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (typeArgs && typeArgs.length > 0) { if (typeArgs[0].typeList) { const typeList = typeArgs[0].typeList; - let sawVariadic = false; - let reportedVariadicError = false; + let sawUnpacked = false; + let reportedUnpackedError = false; + const noteSawUnpacked = (entry: TypeResult) => { + // Make sure we have at most one unpacked variadic type variable. + if (sawUnpacked) { + if (!reportedUnpackedError) { + addError(Localizer.Diagnostic.variadicTypeArgsTooMany(), entry.node); + reportedUnpackedError = true; + } + } + sawUnpacked = true; + }; typeList.forEach((entry, index) => { let entryType = entry.type; let paramCategory: ParameterCategory = ParameterCategory.Simple; - const paramName = `_p${index.toString()}`; + const paramName = `__p${index.toString()}`; if (isVariadicTypeVar(entryType)) { - // Make sure we have at most one unpacked variadic type variable. - if (sawVariadic) { - if (!reportedVariadicError) { - addError(Localizer.Diagnostic.variadicTypeArgsTooMany(), entry.node); - reportedVariadicError = true; - } - } - sawVariadic = true; validateVariadicTypeVarIsUnpacked(entryType, entry.node); - paramCategory = ParameterCategory.Simple; - } else if (!validateTypeArg(entry)) { + paramCategory = ParameterCategory.VarArgList; + noteSawUnpacked(entry); + } else if (validateTypeArg(entry, { allowUnpackedTuples: true })) { + if (isUnpackedClass(entryType)) { + paramCategory = ParameterCategory.VarArgList; + noteSawUnpacked(entry); + } + } else { entryType = UnknownType.create(); } @@ -10634,9 +12607,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions hasDeclaredType: true, }); }); + + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + isNameSynthesized: false, + type: UnknownType.create(), + }); } else if (isEllipsisType(typeArgs[0].type)) { FunctionType.addDefaultParameters(functionType); - functionType.details.flags |= FunctionTypeFlags.SkipParamCompatibilityCheck; + functionType.details.flags |= FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck; } else if (isParamSpec(typeArgs[0].type)) { functionType.details.paramSpec = typeArgs[0].type; } else { @@ -10645,6 +12624,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (concatTypeArgs && concatTypeArgs.length > 0) { concatTypeArgs.forEach((typeArg, index) => { if (index === concatTypeArgs.length - 1) { + // Add a position-only separator + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + isNameSynthesized: false, + type: UnknownType.create(), + }); + if (isParamSpec(typeArg)) { functionType.details.paramSpec = typeArg; } @@ -10687,15 +12673,32 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } else { FunctionType.addDefaultParameters(functionType, /* useUnknown */ true); - functionType.details.flags |= FunctionTypeFlags.SkipParamCompatibilityCheck; + functionType.details.flags |= FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck; } return functionType; } // Creates an Optional[X] type. - function createOptionalType(errorNode: ParseNode, typeArgs?: TypeResult[]): Type { - if (!typeArgs || typeArgs.length !== 1) { + function createOptionalType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags + ): Type { + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & EvaluatorFlags.ExpectingTypeAnnotation) !== 0) { + addError(Localizer.Diagnostic.optionalExtraArgs(), errorNode); + return UnknownType.create(); + } + + return classType; + } + + if (typeArgs.length > 1) { addError(Localizer.Diagnostic.optionalExtraArgs(), errorNode); return UnknownType.create(); } @@ -10705,12 +12708,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeArg0Type = UnknownType.create(); } else if (!TypeBase.isInstantiable(typeArg0Type)) { addExpectedClassDiagnostic(typeArg0Type, typeArgs[0].node); + typeArg0Type = UnknownType.create(); } const optionalType = combineTypes([typeArg0Type, NoneType.createType()]); if (isUnion(optionalType)) { - TypeBase.setNonCallable(optionalType); + TypeBase.setSpecialForm(optionalType); } return optionalType; @@ -10813,14 +12817,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (!type) { - if ((flags & EvaluatorFlags.ExpectingType) !== 0) { - addError(Localizer.Diagnostic.literalUnsupportedType(), item); - type = UnknownType.create(); - } else { - // This is a Literal[x] used in a context where we were not - // expecting a type. Treat it as an "Any" type. - type = AnyType.create(); - } + addError(Localizer.Diagnostic.literalUnsupportedType(), item); + type = UnknownType.create(); } literalTypes.push(type); @@ -10830,13 +12828,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Creates a ClassVar type. - function createClassVarType(errorNode: ParseNode, typeArgs: TypeResult[] | undefined, flags: EvaluatorFlags): Type { + function createClassVarType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags + ): Type { if (flags & EvaluatorFlags.ClassVarDisallowed) { addError(Localizer.Diagnostic.classVarNotAllowed(), errorNode); return AnyType.create(); } - if (!typeArgs || typeArgs.length === 0) { + if (!typeArgs) { + return classType; + } else if (typeArgs.length === 0) { addError(Localizer.Diagnostic.classVarFirstArgMissing(), errorNode); return UnknownType.create(); } else if (typeArgs.length > 1) { @@ -10844,12 +12849,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return UnknownType.create(); } - let type = typeArgs[0].type; + const type = typeArgs[0].type; - // A ClassVar should not allow generic types, but the typeshed - // stubs use this in a few cases. For now, just specialize - // it in a general way. - type = makeTopLevelTypeVarsConcrete(type); + // A ClassVar should not allow TypeVars or generic types parameterized + // by TypeVars. + if (requiresSpecialization(type, /* ignorePseudoGeneric */ true, /* ignoreSelf */ true)) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.classVarWithTypeVar(), + typeArgs[0].node ?? errorNode + ); + } return type; } @@ -10858,30 +12871,116 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // isn't a generic type and therefore doesn't have a typeParameter. // We'll abuse our internal types a bit by specializing it with // a type argument anyway. - function createTypeGuardType(errorNode: ParseNode, classType: ClassType, typeArgs: TypeResult[] | undefined): Type { - if (!typeArgs || typeArgs.length !== 1) { + function createTypeGuardType( + errorNode: ParseNode, + classType: ClassType, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags + ): Type { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if (!typeArgs) { + if ((flags & EvaluatorFlags.ExpectingTypeAnnotation) !== 0) { + addError(Localizer.Diagnostic.typeGuardArgCount(), errorNode); + } + + return classType; + } else if (typeArgs.length !== 1) { addError(Localizer.Diagnostic.typeGuardArgCount(), errorNode); + return UnknownType.create(); } - let typeArg: Type; - if (typeArgs && typeArgs.length > 0) { - typeArg = typeArgs[0].type; - if (!validateTypeArg(typeArgs[0])) { - typeArg = UnknownType.create(); - } - } else { - typeArg = UnknownType.create(); - } + const convertedTypeArgs = typeArgs.map((typeArg) => { + return convertToInstance(validateTypeArg(typeArg) ? typeArg.type : UnknownType.create()); + }); - return ClassType.cloneForSpecialization(classType, [convertToInstance(typeArg)], !!typeArgs); + return ClassType.cloneForSpecialization(classType, convertedTypeArgs, /* isTypeArgumentExplicit */ true); } - function createRequiredType( - classType: ClassType, - errorNode: ParseNode, - isRequired: boolean, - typeArgs: TypeResult[] | undefined + function createSelfType(classType: ClassType, errorNode: ParseNode, typeArgs: TypeResult[] | undefined) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + + // Self doesn't support any type arguments. + if (typeArgs) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.typeArgsExpectingNone().format({ + name: classType.details.name, + }), + typeArgs[0].node ?? errorNode + ); + } + + const enclosingClass = ParseTreeUtils.getEnclosingClass(errorNode); + const enclosingClassTypeResult = enclosingClass ? getTypeOfClass(enclosingClass) : undefined; + if (!enclosingClassTypeResult) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.selfTypeContext(), + errorNode + ); + + return UnknownType.create(); + } + + const enclosingFunction = ParseTreeUtils.getEnclosingFunction(errorNode); + if (enclosingFunction) { + const functionFlags = getFunctionFlagsFromDecorators(enclosingFunction, /* isInClass */ true); + + // Check for static methods. + if (functionFlags & FunctionTypeFlags.StaticMethod) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.selfTypeContext(), + errorNode + ); + + return UnknownType.create(); + } + + if (enclosingFunction.parameters.length > 0) { + const firstParamTypeAnnotation = getTypeAnnotationForParameter(enclosingFunction, 0); + if ( + firstParamTypeAnnotation && + !ParseTreeUtils.isNodeContainedWithin(errorNode, firstParamTypeAnnotation) + ) { + const annotationType = getTypeOfAnnotation(firstParamTypeAnnotation, { + associateTypeVarsWithScope: true, + disallowRecursiveTypeAlias: true, + }); + if (!isTypeVar(annotationType) || !annotationType.details.isSynthesizedSelf) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.selfTypeWithTypedSelfOrCls(), + errorNode + ); + } + } + } + } + + return synthesizeTypeVarForSelfCls(enclosingClassTypeResult.classType, /* isClsParam */ true); + } + + function createRequiredType( + classType: ClassType, + errorNode: ParseNode, + isRequired: boolean, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags ): Type { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if (!typeArgs && (flags & EvaluatorFlags.ExpectingTypeAnnotation) === 0) { + return classType; + } + if (!typeArgs || typeArgs.length !== 1) { addError( isRequired ? Localizer.Diagnostic.requiredArgCount() : Localizer.Diagnostic.notRequiredArgCount(), @@ -10912,6 +13011,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + if ((flags & EvaluatorFlags.RequiredAllowed) !== 0) { + isUsageLegal = true; + } + if (!isUsageLegal) { addError( isRequired @@ -10925,7 +13028,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return typeArgType; } - function createUnpackType(errorNode: ParseNode, typeArgs: TypeResult[] | undefined): Type { + function createUnpackType(errorNode: ParseNode, typeArgs: TypeResult[] | undefined, flags: EvaluatorFlags): Type { if (!typeArgs || typeArgs.length !== 1) { addError(Localizer.Diagnostic.unpackArgCount(), errorNode); return UnknownType.create(); @@ -10936,12 +13039,47 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeArgType = typeArgType.subtypes[0]; } - if (!isVariadicTypeVar(typeArgType) || typeArgType.isVariadicUnpacked) { - addError(Localizer.Diagnostic.unpackExpectedTypeVarTuple(), errorNode); + const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); + + if ((flags & EvaluatorFlags.AllowUnpackedTupleOrTypeVarTuple) !== 0) { + if (isInstantiableClass(typeArgType) && !typeArgType.includeSubclasses && isTupleClass(typeArgType)) { + return ClassType.cloneForUnpacked(typeArgType); + } + + if (isVariadicTypeVar(typeArgType) && !typeArgType.isVariadicUnpacked) { + return TypeVarType.cloneForUnpacked(typeArgType); + } + + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.unpackExpectedTypeVarTuple(), + errorNode + ); + return UnknownType.create(); + } + + if ((flags & EvaluatorFlags.AllowUnpackedTypedDict) !== 0) { + if (isInstantiableClass(typeArgType) && ClassType.isTypedDictClass(typeArgType)) { + return ClassType.cloneForUnpacked(typeArgType); + } + + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.unpackExpectedTypedDict(), + errorNode + ); return UnknownType.create(); } - return TypeVarType.cloneForUnpacked(typeArgType); + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.unpackNotAllowed(), + errorNode + ); + return UnknownType.create(); } // Creates a "Final" type. @@ -11000,12 +13138,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return AnyType.create(); } - let typeArg0Type = typeArgs[0].type; - if (!validateTypeArg(typeArgs[0])) { - typeArg0Type = UnknownType.create(); - } - - return TypeBase.cloneForAnnotated(typeArg0Type); + return TypeBase.cloneForAnnotated(typeArgs[0].type); } // Creates one of several "special" types that are defined in typing.pyi @@ -11024,8 +13157,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isTupleTypeParam && typeArgs.length === 1 && typeArgs[0].isEmptyTupleShorthand) { typeArgs = []; } else { - let sawVariadic = false; - let reportedVariadicError = false; + let sawUnpacked = false; + const noteSawUnpacked = (typeArg: TypeResult) => { + if (sawUnpacked) { + if (!reportedUnpackedError) { + addError(Localizer.Diagnostic.variadicTypeArgsTooMany(), typeArg.node); + reportedUnpackedError = true; + } + } + sawUnpacked = true; + }; + let reportedUnpackedError = false; // Verify that we didn't receive any inappropriate types. typeArgs.forEach((typeArg, index) => { @@ -11046,15 +13188,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } else if (isParamSpec(typeArg.type) && allowParamSpec) { // Nothing to do - this is allowed. } else if (isVariadicTypeVar(typeArg.type) && paramLimit === undefined) { - // Make sure we have at most one unpacked variadic type variable. - if (sawVariadic) { - if (!reportedVariadicError) { - addError(Localizer.Diagnostic.variadicTypeArgsTooMany(), typeArg.node); - reportedVariadicError = true; - } - } + noteSawUnpacked(typeArg); validateVariadicTypeVarIsUnpacked(typeArg.type, typeArg.node); - sawVariadic = true; + } else if (paramLimit === undefined && isUnpacked(typeArg.type)) { + noteSawUnpacked(typeArg); + validateTypeArg(typeArg, { allowUnpackedTuples: true }); } else { validateTypeArg(typeArg); } @@ -11087,15 +13225,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Handle tuple type params as a special case. let returnType: Type; if (isTupleTypeParam) { + const tupleTypeArgTypes: TupleTypeArgument[] = []; + // If no type args are provided and it's a tuple, default to [Unknown, ...]. if (!typeArgs) { - typeArgTypes.push(UnknownType.create()); - typeArgTypes.push(AnyType.create(/* isEllipsis */ true)); + tupleTypeArgTypes.push({ type: UnknownType.create(), isUnbounded: true }); + } else { + typeArgs.forEach((typeArg, index) => { + if (index === 1 && isEllipsisType(typeArgTypes[index])) { + if (tupleTypeArgTypes.length === 1 && !tupleTypeArgTypes[0].isUnbounded) { + tupleTypeArgTypes[0] = { type: tupleTypeArgTypes[0].type, isUnbounded: true }; + } else { + addError(Localizer.Diagnostic.ellipsisSecondArg(), typeArg.node); + } + } else if (isUnpackedClass(typeArg.type) && typeArg.type.tupleTypeArguments) { + tupleTypeArgTypes.push(...typeArg.type.tupleTypeArguments); + } else { + tupleTypeArgTypes.push({ type: typeArgTypes[index], isUnbounded: false }); + } + }); } returnType = specializeTupleClass( classType, - typeArgTypes, + tupleTypeArgTypes, typeArgs !== undefined, /* stripLiterals */ false ); @@ -11104,26 +13257,49 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (!isCallable) { - TypeBase.setNonCallable(returnType); + TypeBase.setSpecialForm(returnType); } return returnType; } // Unpacks the index expression for a "Union[X, Y, Z]" type annotation. - function createUnionType(typeArgs?: TypeResult[]): Type { + function createUnionType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags + ): Type { const types: Type[] = []; - if (typeArgs) { - for (const typeArg of typeArgs) { - let typeArgType = typeArg.type; + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & EvaluatorFlags.ExpectingTypeAnnotation) !== 0) { + addError(Localizer.Diagnostic.unionTypeArgCount(), errorNode); + return NeverType.createNever(); + } - if (!validateTypeArg(typeArg, /* allowEmptyTuple */ false, /* allowVariadicTypeVar */ true)) { - typeArgType = UnknownType.create(); - } else if (!TypeBase.isInstantiable(typeArgType)) { - addExpectedClassDiagnostic(typeArgType, typeArg.node); - } + return classType; + } + + for (const typeArg of typeArgs) { + let typeArgType = typeArg.type; + + if (!validateTypeArg(typeArg, { allowVariadicTypeVar: true, allowUnpackedTuples: true })) { + typeArgType = UnknownType.create(); + } else if (!TypeBase.isInstantiable(typeArgType)) { + addExpectedClassDiagnostic(typeArgType, typeArg.node); + typeArgType = UnknownType.create(); + } + // If this is an unpacked tuple, explode out the individual items. + if (isUnpackedClass(typeArg.type) && typeArg.type.tupleTypeArguments) { + typeArg.type.tupleTypeArguments.forEach((tupleTypeArg) => { + types.push(convertToInstantiable(tupleTypeArg.type)); + }); + } else { // If this is an unpacked TypeVar, note that it is in a union so we can differentiate // between Unpack[Vs] and Union[Unpack[Vs]]. if (isTypeVar(typeArgType) && isVariadicTypeVar(typeArgType) && typeArgType.isVariadicUnpacked) { @@ -11134,37 +13310,56 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - if (types.length > 0) { - const unionType = combineTypes(types); - if (isUnion(unionType)) { - TypeBase.setNonCallable(unionType); + // Validate that we received at least two type arguments. One type argument + // is allowed if it's an unpacked variadic type var or tuple. None is also allowed + // since it is used to define NoReturn in typeshed stubs). + if (types.length === 1) { + if (!isVariadicTypeVar(types[0]) && !isUnpacked(types[0]) && !isNoneInstance(types[0])) { + addError(Localizer.Diagnostic.unionTypeArgCount(), errorNode); } - return unionType; } - return NeverType.create(); + const unionType = combineTypes(types); + if (isUnion(unionType)) { + TypeBase.setSpecialForm(unionType); + } + + return unionType; } // Creates a type that represents "Generic[T1, T2, ...]", used in the // definition of a generic class. - function createGenericType(errorNode: ParseNode, classType: ClassType, typeArgs?: TypeResult[]): Type { - // Make sure there's at least one type arg. - if (!typeArgs || typeArgs.length === 0) { - addError(Localizer.Diagnostic.genericTypeArgMissing(), errorNode); + function createGenericType( + classType: ClassType, + errorNode: ParseNode, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags + ): Type { + if (!typeArgs) { + // If no type arguments are provided, the resulting type + // depends on whether we're evaluating a type annotation or + // we're in some other context. + if ((flags & (EvaluatorFlags.ExpectingTypeAnnotation | EvaluatorFlags.DisallowNakedGeneric)) !== 0) { + addError(Localizer.Diagnostic.genericTypeArgMissing(), errorNode); + } + + return classType; } - // Make sure that all of the type args are typeVars and are unique. const uniqueTypeVars: TypeVarType[] = []; if (typeArgs) { + // Make sure there's at least one type arg. + if (typeArgs.length === 0) { + addError(Localizer.Diagnostic.genericTypeArgMissing(), errorNode); + } + + // Make sure that all of the type args are typeVars and are unique. typeArgs.forEach((typeArg) => { if (!isTypeVar(typeArg.type)) { addError(Localizer.Diagnostic.genericTypeArgTypeVar(), typeArg.node); } else { - for (const typeVar of uniqueTypeVars) { - if (typeVar === typeArg.type) { - addError(Localizer.Diagnostic.genericTypeArgUnique(), typeArg.node); - break; - } + if (uniqueTypeVars.some((t) => isTypeSame(t, typeArg.type))) { + addError(Localizer.Diagnostic.genericTypeArgUnique(), typeArg.node); } uniqueTypeVars.push(typeArg.type); @@ -11276,12 +13471,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } const fileInfo = AnalyzerNodeInfo.getFileInfo(name); + const typeAliasScopeId = getScopeIdForNode(name); + + const boundTypeVars = typeParameters.filter( + (typeVar) => typeVar.scopeId !== typeAliasScopeId && typeVar.scopeType === TypeVarScopeType.Class + ); + if (boundTypeVars.length > 0) { + addError( + Localizer.Diagnostic.genericTypeAliasBoundTypeVar().format({ + names: boundTypeVars.map((t) => `${t.details.name}`).join(', '), + }), + errorNode + ); + } return TypeBase.cloneForTypeAlias( type, name.value, `${fileInfo.moduleName}.${name.value}`, - getScopeIdForNode(name), + typeAliasScopeId, typeParameters.length > 0 ? typeParameters : undefined ); } @@ -11310,15 +13518,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions baseClass = getBuiltInType(node, baseClassName); } else if (aliasMapEntry.module === 'collections') { // The typing.pyi file imports collections. - if (fileInfo.collectionsModulePath) { - const lookupResult = importLookup(fileInfo.collectionsModulePath); - if (lookupResult) { - const symbol = lookupResult.symbolTable.get(baseClassName); - if (symbol) { - baseClass = getEffectiveTypeOfSymbol(symbol); - } - } - } + baseClass = getTypeFromModule(node, baseClassName, ['collections']); } else if (aliasMapEntry.module === 'self') { const symbolWithScope = lookUpSymbolRecursive(node, baseClassName, /* honorCodeFlow */ false); if (symbolWithScope) { @@ -11379,20 +13579,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ['TypeAlias', { alias: '', module: 'builtins' }], ['Concatenate', { alias: '', module: 'builtins' }], ['TypeGuard', { alias: '', module: 'builtins' }], + ['StrictTypeGuard', { alias: '', module: 'builtins' }], ['Unpack', { alias: '', module: 'builtins' }], ['Required', { alias: '', module: 'builtins' }], ['NotRequired', { alias: '', module: 'builtins' }], + ['Self', { alias: '', module: 'builtins' }], + ['NoReturn', { alias: '', module: 'builtins' }], + ['Never', { alias: '', module: 'builtins' }], + ['LiteralString', { alias: '', module: 'builtins' }], ]); const aliasMapEntry = specialTypes.get(assignedName); if (aliasMapEntry) { - const cachedType = readTypeCache(node); + const cachedType = readTypeCache(node, EvaluatorFlags.None); if (cachedType) { assert(isInstantiableClass(cachedType)); return cachedType as ClassType; } const specialType = createSpecialBuiltInClass(node, assignedName, aliasMapEntry); - writeTypeCache(node, specialType, /* isIncomplete */ false); + writeTypeCache(node, specialType, EvaluatorFlags.None, /* isIncomplete */ false); return specialType; } @@ -11419,6 +13624,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ['_promote', { alias: '', module: 'builtins' }], ['no_type_check', { alias: '', module: 'builtins' }], ['NoReturn', { alias: '', module: 'builtins' }], + ['Never', { alias: '', module: 'builtins' }], ['Counter', { alias: 'Counter', module: 'collections' }], ['List', { alias: 'list', module: 'builtins' }], ['Dict', { alias: 'dict', module: 'builtins' }], @@ -11445,12 +13651,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If the entire statement has already been evaluated, don't // re-evaluate it. - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } // Is this type already cached? - let rightHandType = readTypeCache(node.rightExpression); + let rightHandType = readTypeCache(node.rightExpression, EvaluatorFlags.None); let isIncomplete = false; let expectedTypeDiagAddendum: DiagnosticAddendum | undefined; @@ -11460,7 +13666,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (fileInfo.isTypingStubFile || fileInfo.isTypingExtensionsStubFile) { rightHandType = handleTypingStubAssignment(node); if (rightHandType) { - writeTypeCache(node.rightExpression, rightHandType, /* isIncomplete */ false); + writeTypeCache(node.rightExpression, rightHandType, EvaluatorFlags.None, /* isIncomplete */ false); } } @@ -11468,12 +13674,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Determine whether there is a declared type. const declaredType = getDeclaredTypeForExpression(node.leftExpression, { method: 'set' }); - let flags: EvaluatorFlags = EvaluatorFlags.DoNotSpecialize; + let flags: EvaluatorFlags = EvaluatorFlags.None; if (fileInfo.isStubFile) { // An assignment of ellipsis means "Any" within a type stub file. flags |= EvaluatorFlags.ConvertEllipsisToUnknown; } + if ( + node.rightExpression.nodeType === ParseNodeType.Name || + node.rightExpression.nodeType === ParseNodeType.MemberAccess + ) { + // Don't specialize a generic class on assignment (e.g. "x = list" + // or "x = collections.OrderedDict") because we may want to later + // specialize it (e.g. "x[int]"). + flags |= EvaluatorFlags.DoNotSpecialize; + } + let typeAliasNameNode: NameNode | undefined; let isSpeculativeTypeAlias = false; @@ -11483,6 +13699,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions EvaluatorFlags.EvaluateStringLiteralAsType | EvaluatorFlags.ParamSpecDisallowed | EvaluatorFlags.TypeVarTupleDisallowed; + flags &= ~EvaluatorFlags.DoNotSpecialize; typeAliasNameNode = (node.leftExpression as TypeAnnotationNode).valueExpression as NameNode; } else if (node.leftExpression.nodeType === ParseNodeType.Name) { @@ -11512,10 +13729,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeAliasTypeVar.scopeId = scopeId; // Write the type back to the type cache. It will be replaced below. - writeTypeCache(node, typeAliasTypeVar, /* isIncomplete */ false); - writeTypeCache(node.leftExpression, typeAliasTypeVar, /* isIncomplete */ false); + writeTypeCache(node, typeAliasTypeVar, /* flags */ undefined, /* isIncomplete */ false); + writeTypeCache( + node.leftExpression, + typeAliasTypeVar, + /* flags */ undefined, + /* isIncomplete */ false + ); if (node.leftExpression.nodeType === ParseNodeType.TypeAnnotation) { - writeTypeCache(node.leftExpression.valueExpression, typeAliasTypeVar, /* isIncomplete */ false); + writeTypeCache( + node.leftExpression.valueExpression, + typeAliasTypeVar, + /* flags */ undefined, + /* isIncomplete */ false + ); } } @@ -11541,9 +13768,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If there was a declared type, make sure the RHS value is compatible. if (declaredType) { - const diagAddendum = new DiagnosticAddendum(); - - if (canAssignType(declaredType, srcType, diagAddendum)) { + if (canAssignType(declaredType, srcType)) { // Narrow the resulting type if possible. if (!isAnyOrUnknown(srcType)) { srcType = narrowTypeBasedOnAssignment(declaredType, srcType); @@ -11619,14 +13844,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isIncomplete, node.rightExpression, /* ignoreEmptyContainers */ true, + /* allowAssignmentToFinalVar */ true, expectedTypeDiagAddendum ); - writeTypeCache(node, rightHandType, isIncomplete); + writeTypeCache(node, rightHandType, EvaluatorFlags.None, isIncomplete); } function evaluateTypesForAugmentedAssignment(node: AugmentedAssignmentNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -11638,12 +13864,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.rightExpression ); - writeTypeCache(node, destTypeResult.type, !!destTypeResult.isIncomplete); + writeTypeCache(node, destTypeResult.type, EvaluatorFlags.None, !!destTypeResult.isIncomplete); } function getTypeOfClass(node: ClassNode): ClassTypeResult | undefined { // Is this type already cached? - const cachedClassType = readTypeCache(node.name); + const cachedClassType = readTypeCache(node.name, EvaluatorFlags.None); if (cachedClassType) { if (!isInstantiableClass(cachedClassType)) { @@ -11651,7 +13877,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // is located in an unreachable code block. return undefined; } - return { classType: cachedClassType, decoratedType: readTypeCache(node) || UnknownType.create() }; + return { + classType: cachedClassType, + decoratedType: readTypeCache(node, EvaluatorFlags.None) || UnknownType.create(), + }; } // The type wasn't cached, so we need to create a new one. @@ -11711,8 +13940,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions setSymbolResolutionPartialType(classSymbol, classDecl, classType); } classType.details.flags |= ClassTypeFlags.PartiallyConstructed; - writeTypeCache(node, classType, /* isIncomplete */ false); - writeTypeCache(node.name, classType, /* isIncomplete */ false); + writeTypeCache(node, classType, /* flags */ undefined, /* isIncomplete */ false); + writeTypeCache(node.name, classType, /* flags */ undefined, /* isIncomplete */ false); // Keep a list of unique type parameters that are used in the // base class arguments. @@ -11726,7 +13955,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let metaclassNode: ExpressionNode | undefined; let exprFlags = EvaluatorFlags.ExpectingType | - EvaluatorFlags.GenericClassTypeAllowed | + EvaluatorFlags.AllowGenericClassType | + EvaluatorFlags.DisallowNakedGeneric | EvaluatorFlags.DisallowTypeVarsWithScopeId | EvaluatorFlags.AssociateTypeVarsWithCurrentScope; if (fileInfo.isStubFile) { @@ -11770,10 +14000,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // If the class directly derives from NamedTuple (in Python 3.6 or - // newer), it's considered a dataclass. + // newer), it's considered a (read-only) dataclass. if (fileInfo.executionEnvironment.pythonVersion >= PythonVersion.V3_6) { if (ClassType.isBuiltIn(argType, 'NamedTuple')) { - classType.details.flags |= ClassTypeFlags.DataClass; + classType.details.flags |= + ClassTypeFlags.DataClass | ClassTypeFlags.ReadOnlyInstanceVariables; } } @@ -11906,7 +14137,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Make sure we don't have 'object' derive from itself. Infinite // recursion will result. - if (!ClassType.isBuiltIn(classType, 'object')) { + if ( + !ClassType.isBuiltIn(classType, 'object') && + classType.details.baseClasses.filter((baseClass) => isClass(baseClass)).length === 0 + ) { + // If there are no other (known) base classes, the class implicitly derives from object. classType.details.baseClasses.push(getBuiltInType(node, 'object')); } @@ -11937,37 +14172,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType.details.fields = innerScope?.symbolTable || new Map(); // Determine whether the class's instance variables are constrained - // to those defined by __slots__. If so, build a complete list of - // all slots names defined by the class hierarchy. + // to those defined by __slots__. We need to do this prior to dataclass + // processing because dataclasses can implicitly add to the slots + // list. const slotsNames = innerScope?.getSlotsNames(); if (slotsNames) { classType.details.localSlotsNames = slotsNames; - - let isLimitedToSlots = true; - const extendedSlotsNames = [...slotsNames]; - - classType.details.baseClasses.forEach((baseClass) => { - if (isInstantiableClass(baseClass)) { - if ( - !ClassType.isBuiltIn(baseClass, 'object') && - !ClassType.isBuiltIn(baseClass, 'type') && - !ClassType.isBuiltIn(baseClass, 'Generic') - ) { - if (baseClass.details.inheritedSlotsNames === undefined) { - isLimitedToSlots = false; - } else { - extendedSlotsNames.push(...baseClass.details.inheritedSlotsNames); - } - } - } else { - isLimitedToSlots = false; - } - }); - - if (isLimitedToSlots) { - classType.details.inheritedSlotsNames = extendedSlotsNames; - } } + if (ClassType.isTypedDictClass(classType)) { synthesizeTypedDictClassMethods(evaluatorInterface, node, classType); } @@ -12002,7 +14214,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions typeVar.details.isSynthesized = true; typeVar.scopeId = getScopeIdForNode(initDeclNode); typeVar.details.boundType = UnknownType.create(); - return TypeVarType.cloneForScopeId(typeVar, getScopeIdForNode(node), node.name.value); + return TypeVarType.cloneForScopeId( + typeVar, + getScopeIdForNode(node), + node.name.value, + TypeVarScopeType.Class + ); }); } } @@ -12013,7 +14230,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Determine if the class has a custom __class_getitem__ method. This applies // only to classes that have no type parameters, since those with type parameters // are assumed to follow normal subscripting semantics for generic classes. - if (classType.details.typeParameters.length === 0) { + if (classType.details.typeParameters.length === 0 && !ClassType.isBuiltIn(classType, 'type')) { if ( classType.details.baseClasses.some( (baseClass) => isInstantiableClass(baseClass) && ClassType.hasCustomClassGetItem(baseClass) @@ -12028,6 +14245,15 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (metaclassNode) { const metaclassType = getTypeOfExpression(metaclassNode, undefined, exprFlags).type; if (isInstantiableClass(metaclassType) || isUnknown(metaclassType)) { + if (requiresSpecialization(metaclassType)) { + addDiagnostic( + fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.metaclassIsGeneric(), + metaclassNode + ); + } + classType.details.declaredMetaclass = metaclassType; if (isInstantiableClass(metaclassType)) { if (ClassType.isBuiltIn(metaclassType, 'EnumMeta')) { @@ -12117,48 +14343,113 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - if (isInstantiableClass(effectiveMetaclass)) { - // Mark the class as a dataclass if the metaclass provides dataclass-like behaviors. - if (effectiveMetaclass.details.metaclassDataClassTransform) { - applyDataClassDefaultBehaviors(classType, effectiveMetaclass.details.metaclassDataClassTransform); - applyDataClassMetaclassBehaviorOverrides(evaluatorInterface, classType, initSubclassArgs); + // Determine whether this class derives from (or has a metaclass) that imbues + // it with dataclass-like behaviors. If so, we'll apply those here. + let dataClassBehaviors: DataClassBehaviors | undefined; + if (isInstantiableClass(effectiveMetaclass) && effectiveMetaclass.details.classDataClassTransform) { + dataClassBehaviors = effectiveMetaclass.details.classDataClassTransform; + } else { + const baseClassDataTransform = classType.details.mro.find((mroClass) => { + return isClass(mroClass) && mroClass.details.classDataClassTransform !== undefined; + }); + + if (baseClassDataTransform) { + dataClassBehaviors = (baseClassDataTransform as ClassType).details.classDataClassTransform!; } } + if (dataClassBehaviors) { + applyDataClassDefaultBehaviors(classType, dataClassBehaviors); + applyDataClassClassBehaviorOverrides(evaluatorInterface, classType, initSubclassArgs); + } + // Clear the "partially constructed" flag. classType.details.flags &= ~ClassTypeFlags.PartiallyConstructed; // Synthesize dataclass methods. if (ClassType.isDataClass(classType)) { - let skipSynthesizedInit = ClassType.isSkipSynthesizedDataClassInit(classType); + const skipSynthesizedInit = ClassType.isSkipSynthesizedDataClassInit(classType); + let hasExistingInitMethod = skipSynthesizedInit; + + // See if there's already a non-synthesized __init__ method. + // We shouldn't override it. if (!skipSynthesizedInit) { - // See if there's already a non-synthesized __init__ method. - // We shouldn't override it. const initSymbol = lookUpClassMember(classType, '__init__', ClassMemberLookupFlags.SkipBaseClasses); if (initSymbol) { const initSymbolType = getTypeOfMember(initSymbol); if (isFunction(initSymbolType)) { if (!FunctionType.isSynthesizedMethod(initSymbolType)) { - skipSynthesizedInit = true; + hasExistingInitMethod = true; } } else { - skipSynthesizedInit = true; + hasExistingInitMethod = true; } } } - synthesizeDataClassMethods(evaluatorInterface, node, classType, skipSynthesizedInit); + let skipSynthesizeHash = false; + const hashSymbol = lookUpClassMember(classType, '__hash__', ClassMemberLookupFlags.SkipBaseClasses); + if (hashSymbol) { + const hashSymbolType = getTypeOfMember(hashSymbol); + if (isFunction(hashSymbolType) && !FunctionType.isSynthesizedMethod(hashSymbolType)) { + skipSynthesizeHash = true; + } + } + + synthesizeDataClassMethods( + evaluatorInterface, + node, + classType, + skipSynthesizedInit, + hasExistingInitMethod, + skipSynthesizeHash + ); + } + + // Build a complete list of all slots names defined by the class hierarchy. + // This needs to be done after dataclass processing. + if (classType.details.localSlotsNames) { + let isLimitedToSlots = true; + const extendedSlotsNames = [...classType.details.localSlotsNames]; + + classType.details.baseClasses.forEach((baseClass) => { + if (isInstantiableClass(baseClass)) { + if ( + !ClassType.isBuiltIn(baseClass, 'object') && + !ClassType.isBuiltIn(baseClass, 'type') && + !ClassType.isBuiltIn(baseClass, 'Generic') + ) { + if (baseClass.details.inheritedSlotsNames === undefined) { + isLimitedToSlots = false; + } else { + extendedSlotsNames.push(...baseClass.details.inheritedSlotsNames); + } + } + } else { + isLimitedToSlots = false; + } + }); + + if (isLimitedToSlots) { + classType.details.inheritedSlotsNames = extendedSlotsNames; + } } // Update the undecorated class type. - writeTypeCache(node.name, classType, /* isIncomplete */ false); + writeTypeCache(node.name, classType, EvaluatorFlags.None, /* isIncomplete */ false); // Update the decorated class type. - writeTypeCache(node, decoratedType, /* isIncomplete */ false); + writeTypeCache(node, decoratedType, EvaluatorFlags.None, /* isIncomplete */ false); - // Validate __init_subclass__ call. + // Validate __init_subclass__ call or metaclass keyword arguments. validateInitSubclassArgs(node, classType, initSubclassArgs); + // Stash away a reference to the UnionType class if we encounter it. + // There's no easy way to otherwise reference it. + if (ClassType.isBuiltIn(classType, 'UnionType')) { + unionType = ClassType.cloneAsInstance(classType); + } + return { classType, decoratedType }; } @@ -12196,13 +14487,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions originalClassType: ClassType, decoratorNode: DecoratorNode ): Type { - const decoratorType = getTypeOfExpression(decoratorNode.expression).type; + const fileInfo = AnalyzerNodeInfo.getFileInfo(decoratorNode); + let flags = fileInfo.isStubFile ? EvaluatorFlags.AllowForwardReferences : EvaluatorFlags.None; + if (decoratorNode.expression.nodeType !== ParseNodeType.Call) { + flags |= EvaluatorFlags.DoNotSpecialize; + } + const decoratorType = getTypeOfExpression(decoratorNode.expression, /* expectedType */ undefined, flags).type; if (decoratorNode.expression.nodeType === ParseNodeType.Call) { - const decoratorCallType = getTypeOfExpression(decoratorNode.expression.leftExpression).type; + const decoratorCallType = getTypeOfExpression( + decoratorNode.expression.leftExpression, + /* expectedType */ undefined, + flags | EvaluatorFlags.DoNotSpecialize + ).type; + if (isFunction(decoratorCallType)) { - if (decoratorCallType.details.name === '__dataclass_transform__') { - originalClassType.details.metaclassDataClassTransform = validateDataClassTransformDecorator( + if ( + decoratorCallType.details.name === '__dataclass_transform__' || + decoratorCallType.details.builtInName === 'dataclass_transform' + ) { + originalClassType.details.classDataClassTransform = validateDataClassTransformDecorator( evaluatorInterface, decoratorNode.expression ); @@ -12219,12 +14523,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions dataclassBehaviors, /* callNode */ undefined ); + return inputClassType; } } else if (isFunction(decoratorType)) { if (decoratorType.details.builtInName === 'final') { originalClassType.details.flags |= ClassTypeFlags.Final; + + // Don't call getTypeFromDecorator for final. We'll hard-code its + // behavior because its function definition results in a cyclical + // dependency between builtins, typing and _typeshed stubs. + return inputClassType; } else if (decoratorType.details.builtInName === 'runtime_checkable') { originalClassType.details.flags |= ClassTypeFlags.RuntimeCheckable; + + // Don't call getTypeFromDecorator for runtime_checkable. It appears + // frequently in stubs, and it's a waste of time to validate its + // parameters. + return inputClassType; } // Is this a dataclass decorator? @@ -12233,10 +14548,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (decoratorNode.expression.nodeType === ParseNodeType.Call) { callNode = decoratorNode.expression; - const decoratorCallType = getTypeOfExpression(callNode.leftExpression).type; + const decoratorCallType = getTypeOfExpression( + callNode.leftExpression, + /* expectedType */ undefined, + flags | EvaluatorFlags.DoNotSpecialize + ).type; dataclassBehaviors = getDataclassDecoratorBehaviors(decoratorCallType); } else { - const decoratorType = getTypeOfExpression(decoratorNode.expression).type; + const decoratorType = getTypeOfExpression( + decoratorNode.expression, + /* expectedType */ undefined, + flags + ).type; dataclassBehaviors = getDataclassDecoratorBehaviors(decoratorType); } @@ -12256,7 +14579,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions classType, '__init_subclass__', { method: 'get' }, - new DiagnosticAddendum(), + /* diag */ undefined, MemberAccessFlags.AccessClassMembersOnly | MemberAccessFlags.SkipObjectBaseClass | MemberAccessFlags.SkipOriginalClass, @@ -12276,13 +14599,98 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions NoneType.createInstance() ); } - } + } else if (classType.details.effectiveMetaclass && isClass(classType.details.effectiveMetaclass)) { + // See if the metaclass has a `__new__` method that accepts keyword parameters. + const newMethodMember = lookUpClassMember( + classType.details.effectiveMetaclass, + '__new__', + ClassMemberLookupFlags.SkipTypeBaseClass + ); - // Evaluate all of the expressions so they are checked and marked referenced. - argList.forEach((arg) => { - if (arg.valueExpression) { - getTypeOfExpression(arg.valueExpression); - } + if (newMethodMember) { + const newMethodType = getTypeOfMember(newMethodMember); + if (isFunction(newMethodType)) { + const paramListDetails = getParameterListDetails(newMethodType); + + if (paramListDetails.firstKeywordOnlyIndex !== undefined) { + // Build a map of the keyword-only parameters. + const paramMap = new Map(); + for (let i = paramListDetails.firstKeywordOnlyIndex; i < paramListDetails.params.length; i++) { + const paramInfo = paramListDetails.params[i]; + if (paramInfo.param.category === ParameterCategory.Simple && paramInfo.param.name) { + paramMap.set(paramInfo.param.name, i); + } + } + + argList.forEach((arg) => { + if (arg.argumentCategory === ArgumentCategory.Simple && arg.name) { + const paramIndex = paramMap.get(arg.name.value) ?? paramListDetails.kwargsIndex; + + if (paramIndex !== undefined) { + const paramInfo = paramListDetails.params[paramIndex]; + const argParam: ValidateArgTypeParams = { + paramCategory: paramInfo.param.category, + paramType: FunctionType.getEffectiveParameterType( + newMethodType, + paramInfo.index + ), + requiresTypeVarMatching: false, + argument: arg, + errorNode: arg.valueExpression ?? errorNode, + }; + + validateArgType( + argParam, + new TypeVarMap(), + newMethodType, + /* skipUnknownCheck */ true, + /* skipOverloadArg */ true, + /* useNarrowBoundOnly */ false, + /* conditionFilter */ undefined + ); + paramMap.delete(arg.name.value); + } else { + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(node).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + Localizer.Diagnostic.paramNameMissing().format({ name: arg.name.value }), + arg.name ?? errorNode + ); + } + } + }); + + // See if we have any remaining unmatched parameters without + // default values. + const unassignedParams: string[] = []; + paramMap.forEach((index, paramName) => { + const paramInfo = paramListDetails.params[index]; + if (!paramInfo.param.hasDefault) { + unassignedParams.push(paramName); + } + }); + + if (unassignedParams.length > 0) { + const missingParamNames = unassignedParams.map((p) => `"${p}"`).join(', '); + addDiagnostic( + AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, + DiagnosticRule.reportGeneralTypeIssues, + unassignedParams.length === 1 + ? Localizer.Diagnostic.argMissingForParam().format({ name: missingParamNames }) + : Localizer.Diagnostic.argMissingForParams().format({ names: missingParamNames }), + errorNode + ); + } + } + } + } + } + + // Evaluate all of the expressions so they are checked and marked referenced. + argList.forEach((arg) => { + if (arg.valueExpression) { + getTypeOfExpression(arg.valueExpression); + } }); } @@ -12290,7 +14698,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const fileInfo = AnalyzerNodeInfo.getFileInfo(node); // Is this type already cached? - const cachedFunctionType = readTypeCache(node.name) as FunctionType; + const cachedFunctionType = readTypeCache(node.name, EvaluatorFlags.None) as FunctionType; if (cachedFunctionType) { if (!isFunction(cachedFunctionType)) { @@ -12298,7 +14706,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // function declaration falls within an unreachable code block. return undefined; } - return { functionType: cachedFunctionType, decoratedType: readTypeCache(node) || UnknownType.create() }; + return { + functionType: cachedFunctionType, + decoratedType: readTypeCache(node, EvaluatorFlags.None) || UnknownType.create(), + }; } let functionDecl: FunctionDeclaration | undefined; @@ -12331,7 +14742,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (fileInfo.isStubFile) { functionFlags |= FunctionTypeFlags.StubDefinition; - } else if (fileInfo.isInPyTypedPackage && evaluatorOptions.disableInferenceForPyTypedSources) { + } else if (fileInfo.isInPyTypedPackage) { functionFlags |= FunctionTypeFlags.PyTypedDefinition; } @@ -12365,8 +14776,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (functionDecl && functionSymbol) { setSymbolResolutionPartialType(functionSymbol.symbol, functionDecl, functionType); } - writeTypeCache(node, functionType, /* isIncomplete */ false); - writeTypeCache(node.name, functionType, /* isIncomplete */ false); + writeTypeCache(node, functionType, /* flags */ undefined, /* isIncomplete */ false); + writeTypeCache(node.name, functionType, /* flags */ undefined, /* isIncomplete */ false); // Is this an "__init__" method within a pseudo-generic class? If so, // we'll add generic types to the constructor's parameters. @@ -12415,6 +14826,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } }; + let paramsArePositionOnly = true; + node.parameters.forEach((param, index) => { let paramType: Type | undefined; let annotatedType: Type | undefined; @@ -12452,11 +14865,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (paramTypeNode) { - annotatedType = getTypeOfAnnotation(paramTypeNode, { - associateTypeVarsWithScope: true, - allowTypeVarTuple: param.category === ParameterCategory.VarArgList, - disallowRecursiveTypeAlias: true, - }); + annotatedType = getTypeOfParameterAnnotation(paramTypeNode, param.category); if (isVariadicTypeVar(annotatedType) && !annotatedType.isVariadicUnpacked) { addError( @@ -12478,17 +14887,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (annotatedType) { - // PEP 484 indicates that if a parameter has a default value of 'None' - // the type checker should assume that the type is optional (i.e. a union - // of the specified type and 'None'). - if (param.defaultValue && param.defaultValue.nodeType === ParseNodeType.Constant) { - if (param.defaultValue.constType === KeywordType.None) { - isNoneWithoutOptional = true; - - if (!fileInfo.diagnosticRuleSet.strictParameterNoneValue) { - annotatedType = combineTypes([annotatedType, NoneType.createInstance()]); - } - } + const adjustedAnnotatedType = adjustParameterAnnotatedType(param, annotatedType); + if (adjustedAnnotatedType !== annotatedType) { + annotatedType = adjustedAnnotatedType; + isNoneWithoutOptional = true; } } @@ -12539,15 +14941,39 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions paramType = annotatedType; } + const isPositionOnlyParam = + param.category === ParameterCategory.Simple && param.name && isPrivateName(param.name.value); + const isPositionOnlySeparator = param.category === ParameterCategory.Simple && !param.name; + + if (index > 0 && paramsArePositionOnly && !isPositionOnlyParam && !isPositionOnlySeparator) { + // Insert an implicit "position-only parameter" separator. + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + type: UnknownType.create(), + }); + } + + if (!isPositionOnlyParam || isPositionOnlySeparator) { + paramsArePositionOnly = false; + } + + // If there was no annotation for the parameter, infer its type if possible. + let isTypeInferred = false; + if (!paramType) { + isTypeInferred = true; + paramType = inferParameterType(node, functionType.details.flags, index, containingClassType); + } + const functionParam: FunctionParameter = { category: param.category, name: param.name ? param.name.value : undefined, hasDefault: !!param.defaultValue, defaultValueExpression: param.defaultValue, defaultType: defaultValueType, - type: paramType || UnknownType.create(), + type: paramType ?? UnknownType.create(), typeAnnotation: paramTypeNode, hasDeclaredType: !!paramTypeNode, + isTypeInferred, }; FunctionType.addParameter(functionType, functionParam); @@ -12560,23 +14986,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } }); - if (containingClassType) { - // If the first parameter doesn't have an explicit type annotation, - // provide a type if it's an instance, class or constructor method. - if (functionType.details.parameters.length > 0) { - const typeAnnotation = getTypeAnnotationForParameter(node, 0); - if (!typeAnnotation) { - const inferredParamType = inferFirstParamType(functionType.details.flags, containingClassType); - if (inferredParamType) { - functionType.details.parameters[0].type = inferredParamType; - if (!isAnyOrUnknown(inferredParamType)) { - functionType.details.parameters[0].isTypeInferred = true; - } - - paramTypes[0] = inferredParamType; - } - } - } + if (paramsArePositionOnly && functionType.details.parameters.length > 0) { + // Insert an implicit "position-only parameter" separator. + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + type: UnknownType.create(), + }); } // Update the types for the nodes associated with the parameters. @@ -12586,10 +15001,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isUnknown(paramType)) { functionType.details.flags |= FunctionTypeFlags.UnannotatedParams; } - writeTypeCache(paramNameNode, paramType, /* isIncomplete */ false); + writeTypeCache(paramNameNode, paramType, EvaluatorFlags.None, /* isIncomplete */ false); } }); + // If the function ends in P.args and P.kwargs parameters, make it exempt from + // args/kwargs compatibility checks. This is important for protocol comparisons. + if (paramTypes.length >= 2) { + const paramType1 = paramTypes[paramTypes.length - 2]; + const paramType2 = paramTypes[paramTypes.length - 1]; + if ( + isParamSpec(paramType1) && + paramType1.paramSpecAccess === 'args' && + isParamSpec(paramType2) && + paramType2.paramSpecAccess === 'kwargs' + ) { + functionType.details.flags |= FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck; + } + } + // If there was a defined return type, analyze that first so when we // walk the contents of the function, return statements can be // validated against this type. @@ -12626,6 +15056,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + // If the return type is explicitly annotated as a generator, mark the + // function as a generator even though it may not contain a "yield" statement. + // This is important for generator functions declared in stub files, abstract + // methods or protocol definitions. + if (fileInfo.isStubFile || ParseTreeUtils.isSuiteEmpty(node.suite)) { + if ( + functionType.details.declaredReturnType && + isClassInstance(functionType.details.declaredReturnType) && + ClassType.isBuiltIn(functionType.details.declaredReturnType, [ + 'Generator', + 'AsyncGenerator', + 'AwaitableGenerator', + ]) + ) { + functionType.details.flags |= FunctionTypeFlags.Generator; + } + } + // If it's an async function, wrap the return type in an Awaitable or Generator. const preDecoratedType = node.isAsync ? createAsyncFunction(node, functionType) : functionType; @@ -12666,20 +15114,138 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions decoratedType = addOverloadsToFunctionType(node, decoratedType); } - writeTypeCache(node.name, functionType, /* isIncomplete */ false); - writeTypeCache(node, decoratedType, /* isIncomplete */ false); + writeTypeCache(node.name, functionType, EvaluatorFlags.None, /* isIncomplete */ false); + writeTypeCache(node, decoratedType, EvaluatorFlags.None, /* isIncomplete */ false); return { functionType, decoratedType }; } - // Synthesizes the "self" or "cls" parameter type if they are not explicitly annotated. - function inferFirstParamType(flags: FunctionTypeFlags, containingClassType: ClassType): Type | undefined { - if ((flags & FunctionTypeFlags.StaticMethod) === 0) { - if (containingClassType) { - const hasClsParam = - (flags & (FunctionTypeFlags.ClassMethod | FunctionTypeFlags.ConstructorMethod)) !== 0; - return synthesizeTypeVarForSelfCls(containingClassType, hasClsParam); + function adjustParameterAnnotatedType(param: ParameterNode, type: Type): Type { + // PEP 484 indicates that if a parameter has a default value of 'None' + // the type checker should assume that the type is optional (i.e. a union + // of the specified type and 'None'). Skip this step if the type is already + // optional to avoid losing alias names when combining the types. + if ( + param.defaultValue?.nodeType === ParseNodeType.Constant && + param.defaultValue.constType === KeywordType.None && + !isOptionalType(type) && + !AnalyzerNodeInfo.getFileInfo(param).diagnosticRuleSet.strictParameterNoneValue + ) { + type = combineTypes([type, NoneType.createInstance()]); + } else if (isTypeVar(type) && param.defaultValue && type.scopeType === TypeVarScopeType.Function) { + // Handle the case where a default argument type is provided when the + // parameter is annotated with a "raw" function-scoped type variable, as in: + // "def foo(value: T = 3)" + // In this case, we need to include the default value type in a union. + const defaultArgType = getTypeOfExpression( + param.defaultValue, + type, + EvaluatorFlags.ConvertEllipsisToAny + ).type; + + if (!isAny(defaultArgType)) { + type = combineTypes([type, defaultArgType]); + } + } + + return type; + } + + // Attempts to infer an unannotated parameter type from available context. + function inferParameterType( + functionNode: FunctionNode, + functionFlags: FunctionTypeFlags, + paramIndex: number, + containingClassType: ClassType | undefined + ) { + // Is the function a method within a class? If so, see if a base class + // defines the same method and provides annotations. + if (containingClassType) { + if (paramIndex === 0) { + if ((functionFlags & FunctionTypeFlags.StaticMethod) === 0) { + const hasClsParam = + (functionFlags & (FunctionTypeFlags.ClassMethod | FunctionTypeFlags.ConstructorMethod)) !== 0; + return synthesizeTypeVarForSelfCls(containingClassType, hasClsParam); + } + } + + const methodName = functionNode.name.value; + const baseClassMemberInfo = lookUpClassMember( + containingClassType, + methodName, + ClassMemberLookupFlags.SkipOriginalClass + ); + + if (baseClassMemberInfo) { + const memberDecls = baseClassMemberInfo.symbol.getDeclarations(); + if (memberDecls.length === 1 && memberDecls[0].type === DeclarationType.Function) { + const baseClassMethodNode = memberDecls[0].node; + + // Does the signature match exactly with the exception of annotations? + if ( + baseClassMethodNode.parameters.length === functionNode.parameters.length && + baseClassMethodNode.parameters.every((param, index) => { + const overrideParam = functionNode.parameters[index]; + return ( + overrideParam.name?.value === param.name?.value && + overrideParam.category === param.category + ); + }) + ) { + const baseClassParam = baseClassMethodNode.parameters[paramIndex]; + const baseClassParamAnnotation = + baseClassParam.typeAnnotation ?? baseClassParam.typeAnnotationComment; + if (baseClassParamAnnotation) { + let inferredParamType = getTypeOfParameterAnnotation( + baseClassParamAnnotation, + functionNode.parameters[paramIndex].category + ); + + const fileInfo = AnalyzerNodeInfo.getFileInfo(functionNode); + if (fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + inferredParamType = TypeBase.cloneForAmbiguousType(inferredParamType); + } + + return inferredParamType; + } + } + } + } + } + + // If the parameter has a default argument value, we may be able to infer its + // type from this information. + const paramValueExpr = functionNode.parameters[paramIndex].defaultValue; + if (paramValueExpr) { + const defaultValueType = getTypeOfExpression( + paramValueExpr, + /* expectedType */ undefined, + EvaluatorFlags.ConvertEllipsisToAny + ).type; + + let inferredParamType: Type | undefined; + if (isNoneInstance(defaultValueType)) { + // Infer Optional[Unknown] in this case. + inferredParamType = combineTypes([NoneType.createInstance(), UnknownType.create()]); + } else { + // Do not infer certain types like tuple because it's likely to be + // more restrictive (narrower) than intended. + if ( + !isClassInstance(defaultValueType) || + !ClassType.isBuiltIn(defaultValueType, ['tuple', 'list', 'set', 'dict']) + ) { + inferredParamType = stripLiteralValue(defaultValueType); + } } + + if (inferredParamType) { + const fileInfo = AnalyzerNodeInfo.getFileInfo(functionNode); + if (fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + inferredParamType = TypeBase.cloneForAmbiguousType(inferredParamType); + } + } + + return inferredParamType; } return undefined; @@ -12699,26 +15265,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return type; } - if (tupleClassType && isInstantiableClass(tupleClassType)) { - let tupleTypeArgs: Type[]; - let isForVariadic = false; - - if (isVariadicTypeVar(type) && type.isVariadicUnpacked) { - // Create a tuple[*X] type. - tupleTypeArgs = [type]; - isForVariadic = true; - } else { - // Create a tuple[X, ...] type. - tupleTypeArgs = [type, AnyType.create(/* isEllipsis */ true)]; - } + if (isUnpackedClass(type)) { + return ClassType.cloneForUnpacked(type, /* isUnpackedTuple */ false); + } + if (tupleClassType && isInstantiableClass(tupleClassType)) { return ClassType.cloneAsInstance( specializeTupleClass( tupleClassType, - tupleTypeArgs, + [{ type, isUnbounded: !isVariadicTypeVar(type) }], /* isTypeArgumentExplicit */ true, - /* stripLiterals */ true, - isForVariadic + /* stripLiterals */ true ) ); } @@ -12727,10 +15284,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } case ParameterCategory.VarArgDictionary: { + // Leave a ParamSpec alone. if (isTypeVar(type) && type.paramSpecAccess) { return type; } + // Is this an unpacked TypedDict? If so, return it unmodified. + if (isClassInstance(type) && ClassType.isTypedDictClass(type) && type.isUnpacked) { + return type; + } + + // Wrap the type in a dict with str keys. const dictType = getBuiltInType(node, 'dict'); const strType = getBuiltInObject(node, 'str'); @@ -12764,18 +15328,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } for (const decoratorNode of node.decorators) { - let evaluatorFlags = EvaluatorFlags.DoNotSpecialize; - if (fileInfo.isStubFile) { - // Some stub files (e.g. builtins.pyi) rely on forward - // declarations of decorators. - evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; + // Some stub files (e.g. builtins.pyi) rely on forward declarations of decorators. + let evaluatorFlags = fileInfo.isStubFile ? EvaluatorFlags.AllowForwardReferences : EvaluatorFlags.None; + if (decoratorNode.expression.nodeType !== ParseNodeType.Call) { + evaluatorFlags |= EvaluatorFlags.DoNotSpecialize; } - const decoratorType = getTypeOfExpression( + const decoratorTypeResult = getTypeOfExpression( decoratorNode.expression, /* expectedType */ undefined, evaluatorFlags - ).type; + ); + const decoratorType = decoratorTypeResult.type; + if (isFunction(decoratorType)) { if (decoratorType.details.builtInName === 'abstractmethod') { if (isInClass) { @@ -12810,14 +15375,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ): Type { const fileInfo = AnalyzerNodeInfo.getFileInfo(decoratorNode); - let evaluatorFlags = EvaluatorFlags.DoNotSpecialize; - if (fileInfo.isStubFile) { - // Some stub files (e.g. builtins.pyi) rely on forward - // declarations of decorators. - evaluatorFlags |= EvaluatorFlags.AllowForwardReferences; + // Some stub files (e.g. builtins.pyi) rely on forward declarations of decorators. + let evaluatorFlags = fileInfo.isStubFile ? EvaluatorFlags.AllowForwardReferences : EvaluatorFlags.None; + if (decoratorNode.expression.nodeType !== ParseNodeType.Call) { + evaluatorFlags |= EvaluatorFlags.DoNotSpecialize; } - const decoratorType = getTypeOfExpression(decoratorNode.expression, undefined, evaluatorFlags).type; + const decoratorTypeResult = getTypeOfExpression( + decoratorNode.expression, + /* expectedType */ undefined, + evaluatorFlags + ); + const decoratorType = decoratorTypeResult.type; // Special-case the "overload" because it has no definition. Older versions of typeshed // defined "overload" as an object, but newer versions define it as a function. @@ -12833,10 +15402,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (decoratorNode.expression.nodeType === ParseNodeType.Call) { - const decoratorCallType = getTypeOfExpression(decoratorNode.expression.leftExpression).type; + const decoratorCallType = getTypeOfExpression( + decoratorNode.expression.leftExpression, + /* expectedType */ undefined, + evaluatorFlags | EvaluatorFlags.DoNotSpecialize + ).type; if (isFunction(decoratorCallType)) { - if (decoratorCallType.details.name === '__dataclass_transform__') { + if ( + decoratorCallType.details.name === '__dataclass_transform__' || + decoratorCallType.details.builtInName === 'dataclass_transform' + ) { undecoratedType.details.decoratorDataClassBehaviors = validateDataClassTransformDecorator( evaluatorInterface, decoratorNode.expression @@ -12846,13 +15422,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - // Special-case the "no_type_check" because it has no definition. - // Pyright chooses not to implement the semantics of "no_type_check" - // because it's an ill-conceived construct. - if (isInstantiableClass(decoratorType) && ClassType.isSpecialBuiltIn(decoratorType, 'no_type_check')) { - return inputFunctionType; - } - let returnType = getTypeFromDecorator(decoratorNode, inputFunctionType); // Check for some built-in decorator types with known semantics. @@ -12863,20 +15432,35 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Handle property setters and deleters. if (decoratorNode.expression.nodeType === ParseNodeType.MemberAccess) { - const baseType = getTypeOfExpression(decoratorNode.expression.leftExpression).type; + const baseType = getTypeOfExpression( + decoratorNode.expression.leftExpression, + /* expectedType */ undefined, + evaluatorFlags | EvaluatorFlags.DoNotSpecialize + ).type; + if (isProperty(baseType)) { const memberName = decoratorNode.expression.memberName.value; if (memberName === 'setter') { if (isFunction(inputFunctionType)) { - validatePropertyMethod(inputFunctionType, decoratorNode); - return clonePropertyWithSetter(baseType, inputFunctionType, functionNode); + validatePropertyMethod(evaluatorInterface, inputFunctionType, decoratorNode); + return clonePropertyWithSetter( + evaluatorInterface, + baseType, + inputFunctionType, + functionNode + ); } else { return inputFunctionType; } } else if (memberName === 'deleter') { if (isFunction(inputFunctionType)) { - validatePropertyMethod(inputFunctionType, decoratorNode); - return clonePropertyWithDeleter(baseType, inputFunctionType, functionNode); + validatePropertyMethod(evaluatorInterface, inputFunctionType, decoratorNode); + return clonePropertyWithDeleter( + evaluatorInterface, + baseType, + inputFunctionType, + functionNode + ); } else { return inputFunctionType; } @@ -12888,6 +15472,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions switch (decoratorType.details.name) { case 'classmethod': case 'staticmethod': { + const requiredFlag = + decoratorType.details.name === 'classmethod' + ? FunctionTypeFlags.ClassMethod + : FunctionTypeFlags.StaticMethod; + + // If the function isn't currently a class method or static method + // (which can happen if the function was wrapped in a decorator), + // add the appropriate flag. + if (isFunction(inputFunctionType) && (inputFunctionType.details.flags & requiredFlag) === 0) { + const newFunction = FunctionType.clone(inputFunctionType); + newFunction.details.flags &= ~( + FunctionTypeFlags.ConstructorMethod | + FunctionTypeFlags.StaticMethod | + FunctionTypeFlags.ClassMethod + ); + newFunction.details.flags |= requiredFlag; + return newFunction; + } + return inputFunctionType; } } @@ -12896,13 +15499,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Handle properties and subclasses of properties specially. if (ClassType.isPropertyClass(decoratorType)) { if (isFunction(inputFunctionType)) { - validatePropertyMethod(inputFunctionType, decoratorNode); - return createProperty( - decoratorNode, - decoratorType.details.name, - inputFunctionType, - ParseTreeUtils.getTypeSourceId(decoratorNode) - ); + validatePropertyMethod(evaluatorInterface, inputFunctionType, decoratorNode); + return createProperty(evaluatorInterface, decoratorNode, decoratorType, inputFunctionType); } else if (isClassInstance(inputFunctionType)) { const callMember = lookUpObjectMember(inputFunctionType, '__call__'); if (callMember) { @@ -12910,12 +15508,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isFunction(memberType) || isOverloadedFunction(memberType)) { const boundMethod = bindFunctionToClassOrObject(inputFunctionType, memberType); if (boundMethod && isFunction(boundMethod)) { - return createProperty( - decoratorNode, - decoratorType.details.name, - boundMethod, - ParseTreeUtils.getTypeSourceId(decoratorNode) - ); + return createProperty(evaluatorInterface, decoratorNode, decoratorType, boundMethod); } } } @@ -12943,310 +15536,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return returnType; } - function validatePropertyMethod(method: FunctionType, errorNode: ParseNode) { - if (FunctionType.isStaticMethod(method)) { - addDiagnostic( - AnalyzerNodeInfo.getFileInfo(errorNode).diagnosticRuleSet.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.propertyStaticMethod(), - errorNode - ); - } - } - - function createProperty( - decoratorNode: DecoratorNode, - className: string, - fget: FunctionType, - typeSourceId: TypeSourceId - ): ClassType { - const fileInfo = AnalyzerNodeInfo.getFileInfo(decoratorNode); - const typeMetaclass = getBuiltInType(decoratorNode, 'type'); - const propertyClass = ClassType.createInstantiable( - className, - ParseTreeUtils.getClassFullName(decoratorNode, fileInfo.moduleName, `__property_${fget.details.name}`), - fileInfo.moduleName, - fileInfo.filePath, - ClassTypeFlags.PropertyClass, - typeSourceId, - /* declaredMetaclass */ undefined, - isInstantiableClass(typeMetaclass) ? typeMetaclass : UnknownType.create() - ); - computeMroLinearization(propertyClass); - - const propertyObject = ClassType.cloneAsInstance(propertyClass); - - // Fill in the fget method. - const fields = propertyClass.details.fields; - const fgetSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fget); - fields.set('fget', fgetSymbol); - - if (FunctionType.isClassMethod(fget)) { - propertyClass.details.flags |= ClassTypeFlags.ClassProperty; - } - - // Fill in the __get__ method with an overload. - const getFunction1 = FunctionType.createInstance( - '__get__', - '', - '', - FunctionTypeFlags.SynthesizedMethod | FunctionTypeFlags.Overloaded - ); - getFunction1.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'self', - type: propertyObject, - hasDeclaredType: true, - }); - getFunction1.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'obj', - type: NoneType.createInstance(), - hasDeclaredType: true, - }); - getFunction1.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'objtype', - type: AnyType.create(), - hasDeclaredType: true, - hasDefault: true, - defaultType: AnyType.create(), - }); - getFunction1.details.declaredReturnType = FunctionType.isClassMethod(fget) - ? FunctionType.getSpecializedReturnType(fget) - : propertyObject; - getFunction1.details.declaration = fget.details.declaration; - - const getFunction2 = FunctionType.createInstance( - '__get__', - '', - '', - FunctionTypeFlags.SynthesizedMethod | FunctionTypeFlags.Overloaded - ); - getFunction2.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'self', - type: propertyObject, - hasDeclaredType: true, - }); - - // Use the type of the "self" parameter for the object type. If it - // was a synthesized "self" TypeVar with a bound type, use the bound - // type instead. Note that this might also be a "cls" parameter if - // the property is a classmethod. - let objType = fget.details.parameters.length > 0 ? fget.details.parameters[0].type : AnyType.create(); - if (isTypeVar(objType) && objType.details.isSynthesized && objType.details.boundType) { - objType = makeTopLevelTypeVarsConcrete(objType); - } - getFunction2.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'obj', - type: objType, - hasDeclaredType: true, - }); - getFunction2.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'objtype', - type: AnyType.create(), - hasDeclaredType: true, - hasDefault: true, - defaultType: AnyType.create(), - }); - getFunction2.details.declaredReturnType = FunctionType.getSpecializedReturnType(fget); - getFunction2.details.declaration = fget.details.declaration; - - // Override the scope ID since we're using parameter types from the - // decorated function. - getFunction2.details.typeVarScopeId = getTypeVarScopeId(fget); - - const getFunctionOverload = OverloadedFunctionType.create([getFunction1, getFunction2]); - const getSymbol = Symbol.createWithType(SymbolFlags.ClassMember, getFunctionOverload); - fields.set('__get__', getSymbol); - - // Fill in the getter, setter and deleter methods. - ['getter', 'setter', 'deleter'].forEach((accessorName) => { - const accessorFunction = FunctionType.createInstance( - accessorName, - '', - '', - FunctionTypeFlags.SynthesizedMethod - ); - accessorFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'self', - type: AnyType.create(), - hasDeclaredType: true, - }); - accessorFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'accessor', - type: AnyType.create(), - hasDeclaredType: true, - }); - accessorFunction.details.declaredReturnType = propertyObject; - const accessorSymbol = Symbol.createWithType(SymbolFlags.ClassMember, accessorFunction); - fields.set(accessorName, accessorSymbol); - }); - - return propertyObject; - } - - function clonePropertyWithSetter(prop: Type, fset: FunctionType, errorNode: FunctionNode): Type { - if (!isProperty(prop)) { - return prop; - } - - const classType = prop as ClassType; - const propertyClass = ClassType.createInstantiable( - classType.details.name, - classType.details.fullName, - classType.details.moduleName, - AnalyzerNodeInfo.getFileInfo(errorNode).filePath, - classType.details.flags, - classType.details.typeSourceId, - classType.details.declaredMetaclass, - classType.details.effectiveMetaclass - ); - computeMroLinearization(propertyClass); - - const propertyObject = ClassType.cloneAsInstance(propertyClass); - - // Clone the symbol table of the old class type. - const fields = propertyClass.details.fields; - classType.details.fields.forEach((symbol, name) => { - if (!symbol.isIgnoredForProtocolMatch()) { - fields.set(name, symbol); - } - }); - - // Verify parameters for fset. - // We'll skip this test if the diagnostic rule is disabled because it - // can be somewhat expensive, especially in code that is not annotated. - const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); - if (fileInfo.diagnosticRuleSet.reportPropertyTypeMismatch !== 'none') { - if (errorNode.parameters.length >= 2) { - const typeAnnotation = getTypeAnnotationForParameter(errorNode, 1); - if (typeAnnotation) { - // Verify consistency of the type. - const fgetType = getGetterTypeFromProperty(classType, /* inferTypeIfNeeded */ false); - if (fgetType && !isAnyOrUnknown(fgetType)) { - const fsetType = getTypeOfAnnotation(typeAnnotation); - - // The setter type should be assignable to the getter type. - const diag = new DiagnosticAddendum(); - if (!canAssignType(fgetType, fsetType, diag)) { - addDiagnostic( - fileInfo.diagnosticRuleSet.reportPropertyTypeMismatch, - DiagnosticRule.reportPropertyTypeMismatch, - Localizer.Diagnostic.setterGetterTypeMismatch() + diag.getString(), - typeAnnotation - ); - } - } - } - } - } - - // Fill in the fset method. - const fsetSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fset); - fields.set('fset', fsetSymbol); - - // Fill in the __set__ method. - const setFunction = FunctionType.createInstance('__set__', '', '', FunctionTypeFlags.SynthesizedMethod); - setFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'self', - type: prop, - hasDeclaredType: true, - }); - let objType = fset.details.parameters.length > 0 ? fset.details.parameters[0].type : AnyType.create(); - if (isTypeVar(objType) && objType.details.isSynthesized && objType.details.boundType) { - objType = makeTopLevelTypeVarsConcrete(objType); - } - setFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'obj', - type: combineTypes([objType, NoneType.createInstance()]), - hasDeclaredType: true, - }); - setFunction.details.declaredReturnType = NoneType.createInstance(); - let setParamType: Type = UnknownType.create(); - if ( - fset.details.parameters.length >= 2 && - fset.details.parameters[1].category === ParameterCategory.Simple && - fset.details.parameters[1].name - ) { - setParamType = fset.details.parameters[1].type; - } - setFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'value', - type: setParamType, - hasDeclaredType: true, - }); - const setSymbol = Symbol.createWithType(SymbolFlags.ClassMember, setFunction); - fields.set('__set__', setSymbol); - - return propertyObject; - } - - function clonePropertyWithDeleter(prop: Type, fdel: FunctionType, errorNode: FunctionNode): Type { - if (!isProperty(prop)) { - return prop; - } - - const classType = prop as ClassType; - const propertyClass = ClassType.createInstantiable( - classType.details.name, - classType.details.fullName, - classType.details.moduleName, - AnalyzerNodeInfo.getFileInfo(errorNode).filePath, - classType.details.flags, - classType.details.typeSourceId, - classType.details.declaredMetaclass, - classType.details.effectiveMetaclass - ); - computeMroLinearization(propertyClass); - - const propertyObject = ClassType.cloneAsInstance(propertyClass); - - // Clone the symbol table of the old class type. - const fields = propertyClass.details.fields; - classType.details.fields.forEach((symbol, name) => { - if (!symbol.isIgnoredForProtocolMatch()) { - fields.set(name, symbol); - } - }); - - // Fill in the fdel method. - const fdelSymbol = Symbol.createWithType(SymbolFlags.ClassMember, fdel); - fields.set('fdel', fdelSymbol); - - // Fill in the __delete__ method. - const delFunction = FunctionType.createInstance('__delete__', '', '', FunctionTypeFlags.SynthesizedMethod); - delFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'self', - type: prop, - hasDeclaredType: true, - }); - let objType = fdel.details.parameters.length > 0 ? fdel.details.parameters[0].type : AnyType.create(); - if (isTypeVar(objType) && objType.details.isSynthesized && objType.details.boundType) { - objType = makeTopLevelTypeVarsConcrete(objType); - } - delFunction.details.parameters.push({ - category: ParameterCategory.Simple, - name: 'obj', - type: combineTypes([objType, NoneType.createInstance()]), - hasDeclaredType: true, - }); - delFunction.details.declaredReturnType = NoneType.createInstance(); - const delSymbol = Symbol.createWithType(SymbolFlags.ClassMember, delFunction); - fields.set('__delete__', delSymbol); - - return propertyObject; - } - // Given a function node and the function type associated with it, this // method searches for prior function nodes that are marked as @overload // and creates an OverloadedFunctionType that includes this function and @@ -13332,7 +15621,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions awaitableFunctionType.details.declaredReturnType = createAwaitableReturnType( node, functionType.details.declaredReturnType, - !!functionType.details.declaration?.isGenerator + FunctionType.isGenerator(functionType) ); } @@ -13411,7 +15700,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Is this type already cached? - let inferredReturnType = readTypeCache(node.suite); + let inferredReturnType = readTypeCache(node.suite, EvaluatorFlags.None); if (inferredReturnType) { return inferredReturnType; } @@ -13443,12 +15732,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isAbstract || methodAlwaysRaisesNotImplemented(functionDecl)) { inferredReturnType = UnknownType.create(); } else { - const noReturnClass = getTypingType(node, 'NoReturn'); - if (noReturnClass && isInstantiableClass(noReturnClass)) { - inferredReturnType = ClassType.cloneAsInstance(noReturnClass); - } else { - inferredReturnType = UnknownType.create(); - } + inferredReturnType = NeverType.createNoReturn(); } } else { const inferredReturnTypes: Type[] = []; @@ -13474,26 +15758,34 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Remove any unbound values since those would generate an exception // before being returned. inferredReturnType = removeUnbound(inferredReturnType); - - // Remove NoReturn types if they appear within a union. - inferredReturnType = removeNoReturnFromUnion(inferredReturnType); } // Is it a generator? if (functionDecl?.isGenerator) { const inferredYieldTypes: Type[] = []; + let useAwaitableGenerator = false; + if (functionDecl.yieldStatements) { functionDecl.yieldStatements.forEach((yieldNode) => { if (isNodeReachable(yieldNode)) { if (yieldNode.nodeType === ParseNodeType.YieldFrom) { const iteratorType = getTypeOfExpression(yieldNode.expression).type; - const yieldType = getTypeFromIterator( - iteratorType, - /* isAsync */ false, - yieldNode - ); - inferredYieldTypes.push(yieldType || UnknownType.create()); - } else { + if ( + isClassInstance(iteratorType) && + ClassType.isBuiltIn(iteratorType, 'Coroutine') + ) { + // Handle old-style (pre-await) Coroutines. + inferredYieldTypes.push(); + useAwaitableGenerator = true; + } else { + const yieldType = getTypeFromIterator( + iteratorType, + /* isAsync */ false, + yieldNode + ); + inferredYieldTypes.push(yieldType || UnknownType.create()); + } + } else { if (yieldNode.expression) { const yieldType = getTypeOfExpression(yieldNode.expression).type; inferredYieldTypes.push(yieldType || UnknownType.create()); @@ -13510,20 +15802,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } const inferredYieldType = combineTypes(inferredYieldTypes); - // Inferred yield types need to be wrapped in a Generator to - // produce the final result. - const generatorType = getTypingType(node, 'Generator'); + // Inferred yield types need to be wrapped in a Generator or + // AwaitableGenerator to produce the final result. + const generatorType = getTypingType( + node, + useAwaitableGenerator ? 'AwaitableGenerator' : 'Generator' + ); + if (generatorType && isInstantiableClass(generatorType)) { + const typeArgs: Type[] = []; + + if (useAwaitableGenerator) { + typeArgs.push(AnyType.create()); + } + + typeArgs.push( + inferredYieldType, + NoneType.createInstance(), + isNever(inferredReturnType) ? NoneType.createInstance() : inferredReturnType + ); + inferredReturnType = ClassType.cloneAsInstance( ClassType.cloneForSpecialization( generatorType, - [ - inferredYieldType, - NoneType.createInstance(), - isNoReturnType(inferredReturnType) - ? NoneType.createInstance() - : inferredReturnType, - ], + typeArgs, /* isTypeArgumentExplicit */ true ) ); @@ -13533,7 +15835,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - writeTypeCache(node.suite, inferredReturnType, /* isIncomplete */ false); + writeTypeCache(node.suite, inferredReturnType, EvaluatorFlags.None, /* isIncomplete */ false); } finally { functionRecursionMap.delete(node.id); } @@ -13575,7 +15877,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } function evaluateTypesForForStatement(node: ForNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13591,14 +15893,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.targetExpression ); - writeTypeCache(node, iteratedType, !!iteratorTypeResult.isIncomplete); + writeTypeCache(node, iteratedType, EvaluatorFlags.None, !!iteratorTypeResult.isIncomplete); } function evaluateTypesForExceptStatement(node: ExceptNode): void { // This should be called only if the except node has a target exception. assert(node.typeExpression !== undefined); - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13637,7 +15939,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const tupleType = getSpecializedTupleType(subType); if (tupleType && tupleType.tupleTypeArguments) { const entryTypes = tupleType.tupleTypeArguments.map((t) => { - return getExceptionType(t, node.typeExpression!); + return getExceptionType(t.type, node.typeExpression!); }); return combineTypes(entryTypes); } @@ -13649,11 +15951,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignTypeToExpression(node.name, targetType, /* isIncomplete */ false, node.name); } - writeTypeCache(node, targetType, /* isIncomplete */ false); + writeTypeCache(node, targetType, EvaluatorFlags.None, /* isIncomplete */ false); } function evaluateTypesForWithStatement(node: WithItemNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13772,11 +16074,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignTypeToExpression(node.target, scopedType, !!exprTypeResult.isIncomplete, node.target); } - writeTypeCache(node, scopedType, !!exprTypeResult.isIncomplete); + writeTypeCache(node, scopedType, EvaluatorFlags.None, !!exprTypeResult.isIncomplete); } function evaluateTypesForImportAs(node: ImportAsNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13800,7 +16102,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Is there a cached module type associated with this node? If so, use // it instead of the type we just created. - const cachedModuleType = readTypeCache(node) as ModuleType; + const cachedModuleType = readTypeCache(node, EvaluatorFlags.None) as ModuleType; if (cachedModuleType && isModule(cachedModuleType) && symbolType) { if (isTypeSame(symbolType, cachedModuleType)) { symbolType = cachedModuleType; @@ -13809,11 +16111,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignTypeToNameNode(symbolNameNode, symbolType, /* isIncomplete */ false); - writeTypeCache(node, symbolType, /* isIncomplete */ false); + writeTypeCache(node, symbolType, EvaluatorFlags.None, /* isIncomplete */ false); } function evaluateTypesForImportFromAs(node: ImportFromAsNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13880,11 +16182,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } assignTypeToNameNode(aliasNode, symbolType, /* isIncomplete */ false); - writeTypeCache(node, symbolType, /* isIncomplete */ false); + writeTypeCache(node, symbolType, EvaluatorFlags.None, /* isIncomplete */ false); } - function evaluateTypesForMatchNode(node: MatchNode) { - if (readTypeCache(node)) { + function evaluateTypesForMatchNode(node: MatchNode): void { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13903,11 +16205,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - writeTypeCache(node, subjectType, !!subjectTypeResult.isIncomplete); + writeTypeCache(node, subjectType, EvaluatorFlags.None, !!subjectTypeResult.isIncomplete); } - function evaluateTypesForCaseNode(node: CaseNode) { - if (readTypeCache(node)) { + function evaluateTypesForCaseNode(node: CaseNode): void { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13935,6 +16237,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + // Determine if the pre-narrowed subject type contains an object. + let subjectIsObject = false; + doForEachSubtype(makeTopLevelTypeVarsConcrete(subjectType), (subtype) => { + if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'object')) { + subjectIsObject = true; + } + }); + // Apply positive narrowing for the current case statement. subjectType = narrowTypeBasedOnPattern( evaluatorInterface, @@ -13942,13 +16252,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.pattern, /* isPositiveTest */ true ); - assignTypeToPatternTargets(evaluatorInterface, subjectType, !!subjectTypeResult.isIncomplete, node.pattern); - writeTypeCache(node, subjectType, !!subjectTypeResult.isIncomplete); + assignTypeToPatternTargets( + evaluatorInterface, + subjectType, + !!subjectTypeResult.isIncomplete, + subjectIsObject, + node.pattern + ); + + writeTypeCache(node, subjectType, EvaluatorFlags.None, !!subjectTypeResult.isIncomplete); } function evaluateTypesForImportFrom(node: ImportFromNode): void { - if (readTypeCache(node)) { + if (readTypeCache(node, EvaluatorFlags.None)) { return; } @@ -13960,7 +16277,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Is there a cached module type associated with this node? If so, use // it instead of the type we just created. - const cachedModuleType = readTypeCache(node) as ModuleType; + const cachedModuleType = readTypeCache(node, EvaluatorFlags.None) as ModuleType; if (cachedModuleType && isModule(cachedModuleType) && symbolType) { if (isTypeSame(symbolType, cachedModuleType)) { symbolType = cachedModuleType; @@ -13969,7 +16286,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignTypeToNameNode(symbolNameNode, symbolType, /* isIncomplete */ false); - writeTypeCache(node, symbolType, /* isIncomplete */ false); + writeTypeCache(node, symbolType, EvaluatorFlags.None, /* isIncomplete */ false); } function getAliasedSymbolTypeForName( @@ -14018,6 +16335,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return undefined; } + if (!resolvedAliasInfo.declaration) { + return evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); + } + if (node.nodeType === ParseNodeType.ImportFromAs) { if (resolvedAliasInfo.isPrivate) { addDiagnostic( @@ -14051,7 +16372,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - return getInferredTypeOfDeclaration(aliasDecl); + return getInferredTypeOfDeclaration(symbolWithScope.symbol, aliasDecl); } // In some cases, an expression must be evaluated in the context of another @@ -14099,6 +16420,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions node.nodeType === ParseNodeType.MemberAccess || node.nodeType === ParseNodeType.Set || node.nodeType === ParseNodeType.String || + node.nodeType === ParseNodeType.StringList || node.nodeType === ParseNodeType.Tuple || node.nodeType === ParseNodeType.Unpack || node.nodeType === ParseNodeType.DictionaryKeyEntry || @@ -14135,11 +16457,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { // For global and nonlocal statements, allow forward references so // we don't use code flow during symbol lookups. - getTypeOfExpression( - node, - /* expectedType */ undefined, - EvaluatorFlags.AllowForwardReferences | EvaluatorFlags.SkipUnboundCheck - ); + getTypeOfExpression(node, /* expectedType */ undefined, EvaluatorFlags.AllowForwardReferences); return; } } @@ -14163,7 +16481,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (lastContextualExpression === parent.typeAnnotationComment) { getTypeOfAnnotation(lastContextualExpression, { isVariableAnnotation: true, - allowTypeVarTuple: ParseTreeUtils.isFinalAllowedForAssignmentTarget(parent.leftExpression), + allowFinal: ParseTreeUtils.isFinalAllowedForAssignmentTarget(parent.leftExpression), + allowClassVar: ParseTreeUtils.isClassVarAllowedForAssignmentTarget(parent.leftExpression), }); } else { evaluateTypesForAssignmentStatement(parent); @@ -14198,9 +16517,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions allowFinal: ParseTreeUtils.isFinalAllowedForAssignmentTarget(node.valueExpression), allowClassVar: ParseTreeUtils.isClassVarAllowedForAssignmentTarget(node.valueExpression), }); - if (annotationType) { - writeTypeCache(node.valueExpression, annotationType, /* isIncomplete */ false); - } + writeTypeCache(node.valueExpression, annotationType, EvaluatorFlags.None, /* isIncomplete */ false); } }; @@ -14214,11 +16531,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return; } - if (parent.nodeType === ParseNodeType.Parameter) { + if (parent.nodeType === ParseNodeType.Parameter && lastContextualExpression !== parent.defaultValue) { evaluateTypeOfParameter(parent); return; } + if (parent.nodeType === ParseNodeType.Function) { + if ( + lastContextualExpression === parent.returnTypeAnnotation || + lastContextualExpression === parent.functionAnnotationComment + ) { + getTypeOfAnnotation(lastContextualExpression, { + associateTypeVarsWithScope: true, + disallowRecursiveTypeAlias: true, + }); + return; + } + } + if (parent.nodeType === ParseNodeType.ModuleName) { // A name within a module name isn't an expression, // so there's nothing we can evaluate here. @@ -14231,6 +16561,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return; } + // A class argument must be evaluated in the context of the class declaration. + if (parent.nodeType === ParseNodeType.Argument && parent.parent?.nodeType === ParseNodeType.Class) { + getTypeOfClass(parent.parent); + return; + } + if (parent.nodeType === ParseNodeType.Return && parent.returnExpression) { const enclosingFunctionNode = ParseTreeUtils.getEnclosingFunction(node); const declaredReturnType = enclosingFunctionNode @@ -14258,7 +16594,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } function evaluateTypeOfParameter(node: ParameterNode): void { - assert(node.name !== undefined); + // If this parameter has no name, we have nothing to do. + if (!node.name) { + return; + } // We need to handle lambdas differently from functions because // the former never have parameter type annotations but can @@ -14277,34 +16616,45 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeAnnotation = getTypeAnnotationForParameter(functionNode, paramIndex); if (typeAnnotation) { - writeTypeCache( - node.name!, - transformVariadicParamType( - node, - node.category, - getTypeOfAnnotation(typeAnnotation, { - associateTypeVarsWithScope: true, - allowTypeVarTuple: - functionNode.parameters[paramIndex].category === ParameterCategory.VarArgList, - disallowRecursiveTypeAlias: true, - }) - ), - /* isIncomplete */ false + const param = functionNode.parameters[paramIndex]; + const annotatedType = getTypeOfParameterAnnotation( + typeAnnotation, + functionNode.parameters[paramIndex].category + ); + + const adjType = transformVariadicParamType( + node, + node.category, + adjustParameterAnnotatedType(param, annotatedType) ); + + writeTypeCache(node.name!, adjType, EvaluatorFlags.None, /* isIncomplete */ false); return; } - // We may be able to infer the type of the first parameter. - if (paramIndex === 0) { - const containingClassNode = ParseTreeUtils.getEnclosingClass(functionNode, /* stopAtFunction */ true); - if (containingClassNode) { - const classInfo = getTypeOfClass(containingClassNode); - if (classInfo) { - const functionFlags = getFunctionFlagsFromDecorators(functionNode, /* isInClass */ true); - // If the first parameter doesn't have an explicit type annotation, - // provide a type if it's an instance, class or constructor method. - const inferredParamType = inferFirstParamType(functionFlags, classInfo.classType); - writeTypeCache(node.name!, inferredParamType || UnknownType.create(), /* isIncomplete */ false); + const containingClassNode = ParseTreeUtils.getEnclosingClass(functionNode, /* stopAtFunction */ true); + if (containingClassNode) { + const classInfo = getTypeOfClass(containingClassNode); + + if (classInfo) { + // See if the function is a method in a child class. We may be able to + // infer the type of the parameter from a method of the same name in + // a parent class if it has an annotated type. + const functionFlags = getFunctionFlagsFromDecorators(functionNode, /* isInClass */ true); + const inferredParamType = inferParameterType( + functionNode, + functionFlags, + paramIndex, + classInfo.classType + ); + + if (inferredParamType) { + writeTypeCache( + node.name!, + transformVariadicParamType(node, node.category, inferredParamType), + EvaluatorFlags.None, + /* isIncomplete */ false + ); return; } } @@ -14315,6 +16665,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions writeTypeCache( node.name!, transformVariadicParamType(node, node.category, UnknownType.create()), + EvaluatorFlags.None, /* isIncomplete */ false ); } @@ -14394,7 +16745,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions case ParseNodeType.ListComprehensionFor: { const listComprehension = curNode.parent as ListComprehensionNode; assert(listComprehension.nodeType === ParseNodeType.ListComprehension); - evaluateTypesForExpressionInContext(listComprehension); + if (curNode === listComprehension.expression) { + evaluateTypesForExpressionInContext(listComprehension); + } else { + // Evaluate the individual iterations starting with the first + // up to the curNode. + for (const forIfNode of listComprehension.forIfNodes) { + evaluateListComprehensionForIf(forIfNode); + if (forIfNode === curNode) { + break; + } + } + } return; } @@ -14422,99 +16784,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions curNode = curNode.parent; } - fail('Unexpected assignment target'); - return undefined; - } - - function getTypeFromWildcardImport(flowNode: FlowWildcardImport, name: string): Type { - const importInfo = AnalyzerNodeInfo.getImportInfo(flowNode.node.module); - assert(importInfo !== undefined && importInfo.isImportFound); - assert(flowNode.node.isWildcardImport); - - const symbolWithScope = lookUpSymbolRecursive(flowNode.node, name, /* honorCodeFlow */ false); - assert(symbolWithScope !== undefined); - const decls = symbolWithScope!.symbol.getDeclarations(); - const wildcardDecl = decls.find((decl) => decl.node === flowNode.node); - - if (!wildcardDecl) { - return UnknownType.create(); - } - - return getInferredTypeOfDeclaration(wildcardDecl) || UnknownType.create(); - } - - // When we're evaluating a call to determine whether it returns NoReturn, - // we don't want to do a full type evaluation, which would be expensive - // and create circular dependencies in type evaluation. Instead, we do - // a best-effort evaluation using only declared types (functions, parameters, - // etc.). - function getDeclaredCallBaseType(node: ExpressionNode): Type | undefined { - if (node.nodeType === ParseNodeType.Name) { - const symbolWithScope = lookUpSymbolRecursive(node, node.value, /* honorCodeFlow */ false); - - if (!symbolWithScope) { - return undefined; - } - - const symbol = symbolWithScope.symbol; - const type = getDeclaredTypeOfSymbol(symbol); - if (type) { - return type; - } - - // There was no declared type. Before we give up, see if the - // symbol is a function parameter whose value can be inferred - // or an imported symbol. - const declarations = symbol.getDeclarations(); - if (declarations.length === 0) { - return undefined; - } - - const decl = declarations[declarations.length - 1]; - if (decl.type === DeclarationType.Parameter) { - return evaluateTypeForSubnode(decl.node.name!, () => { - evaluateTypeOfParameter(decl.node); - })?.type; - } - - // If it is a symbol from an outer execution scope or an alias, it - // is safe to infer its type. - if (decl.type === DeclarationType.Alias || symbolWithScope.isBeyondExecutionScope) { - return getInferredTypeOfDeclaration(decl); - } - - return undefined; - } - - if (node.nodeType === ParseNodeType.MemberAccess) { - const memberName = node.memberName.value; - let baseType = getDeclaredCallBaseType(node.leftExpression); - if (!baseType) { - return undefined; - } - - baseType = makeTopLevelTypeVarsConcrete(baseType); - - const declaredTypeOfSymbol = mapSubtypes(baseType, (subtype) => { - let symbol: Symbol | undefined; - if (isModule(subtype)) { - symbol = ModuleType.getField(subtype, memberName); - } else if (isInstantiableClass(subtype)) { - const classMemberInfo = lookUpClassMember(subtype, memberName); - symbol = classMemberInfo ? classMemberInfo.symbol : undefined; - } else if (isClassInstance(subtype)) { - const classMemberInfo = lookUpClassMember(subtype, memberName); - symbol = classMemberInfo ? classMemberInfo.symbol : undefined; - } - - return symbol ? getDeclaredTypeOfSymbol(symbol) : undefined; - }); - - if (!isNever(declaredTypeOfSymbol)) { - return declaredTypeOfSymbol; - } - } - + fail('Unexpected statement'); return undefined; } @@ -14525,7 +16795,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function evaluateTypeForSubnode(subnode: ParseNode, callback: () => void): TypeResult | undefined { // If the type cache is already populated, don't bother // doing additional work. - let subnodeType = readTypeCache(subnode); + let subnodeType = readTypeCache(subnode, undefined); if (subnodeType) { return { node: subnode, type: subnodeType }; } @@ -14534,7 +16804,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions try { incompleteTypeCache = new Map(); callback(); - subnodeType = readTypeCache(subnode); + subnodeType = readTypeCache(subnode, undefined); if (subnodeType) { return { node: subnode, type: subnodeType }; } @@ -14543,174 +16813,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (subnodeType) { return { node: subnode, type: subnodeType, isIncomplete: true }; } - } finally { + + incompleteTypeCache = oldIncompleteCache; + } catch (e) { + // We don't use a finally clause here because the debugger doesn't + // handle it well when stepping through code. incompleteTypeCache = oldIncompleteCache; + throw e; } return undefined; } - // Performs a cursory analysis to determine whether the expression - // corresponds to a context manager object that supports the swallowing - // of exceptions. By convention, these objects have an "__exit__" method - // that returns a bool response (as opposed to a None). This function is - // called during code flow, so it can't rely on full type evaluation. It - // makes some simplifying assumptions that work in most cases. - function isExceptionContextManager(node: ExpressionNode, isAsync: boolean) { - // See if this information is cached already. - if (isExceptionContextManagerCache.has(node.id)) { - return isExceptionContextManagerCache.get(node.id); - } - - // Initially set to false to avoid infinite recursion. - isExceptionContextManagerCache.set(node.id, false); - - let cmSwallowsExceptions = false; - - // We assume that the context manager is instantiated through a call. - if (node.nodeType === ParseNodeType.Call) { - const callType = getDeclaredCallBaseType(node.leftExpression); - if (callType && isInstantiableClass(callType)) { - const exitMethodName = isAsync ? '__aexit__' : '__exit__'; - const exitType = getTypeFromObjectMember( - node.leftExpression, - ClassType.cloneAsInstance(callType), - exitMethodName - )?.type; - - if (exitType && isFunction(exitType) && exitType.details.declaredReturnType) { - const returnType = exitType.details.declaredReturnType; - cmSwallowsExceptions = isClassInstance(returnType) && ClassType.isBuiltIn(returnType, 'bool'); - } - } - } - - // Cache the value for next time. - isExceptionContextManagerCache.set(node.id, cmSwallowsExceptions); - - return cmSwallowsExceptions; - } - - // Performs a cursory analysis to determine whether a call never returns - // without fully evaluating its type. This is done during code flow, - // so it can't rely on full type analysis. It makes some simplifying - // assumptions that work fine in practice. - function isCallNoReturn(node: CallNode) { - // See if this information is cached already. - if (callIsNoReturnCache.has(node.id)) { - return callIsNoReturnCache.get(node.id); - } - - // Initially set to false to avoid infinite recursion. - callIsNoReturnCache.set(node.id, false); - - let noReturnTypeCount = 0; - let subtypeCount = 0; - - // Evaluate the call base type. - const callType = getDeclaredCallBaseType(node.leftExpression); - if (callType) { - doForEachSubtype(callType, (callSubtype) => { - // Track the number of subtypes we've examined. - subtypeCount++; - - // We assume here that no constructors or __call__ methods - // will be inferred "no return" types, so we can restrict - // our check to functions. - let functionType: FunctionType | undefined; - if (isFunction(callSubtype)) { - functionType = callSubtype; - } else if (isOverloadedFunction(callSubtype)) { - // Use the last overload, which should be the most general. - const overloadedFunction = callSubtype; - functionType = overloadedFunction.overloads[overloadedFunction.overloads.length - 1]; - } - - if (functionType && !FunctionType.isAsync(functionType)) { - if (functionType.details.declaredReturnType) { - if (isNoReturnType(functionType.details.declaredReturnType)) { - noReturnTypeCount++; - } - } else if (functionType.details.declaration) { - // If the function has yield expressions, it's a generator, and - // we'll assume the yield statements are reachable. Also, don't - // infer a "no return" type for abstract methods. - if ( - !functionType.details.declaration.yieldStatements && - !FunctionType.isAbstractMethod(functionType) && - !FunctionType.isStubDefinition(functionType) && - !FunctionType.isPyTypedDefinition(functionType) - ) { - // Check specifically for a common idiom where the only statement - // (other than a possible docstring) is a "raise NotImplementedError". - const functionStatements = functionType.details.declaration.node.suite.statements; - - let foundRaiseNotImplemented = false; - for (const statement of functionStatements) { - if ( - statement.nodeType !== ParseNodeType.StatementList || - statement.statements.length !== 1 - ) { - break; - } - - const simpleStatement = statement.statements[0]; - if (simpleStatement.nodeType === ParseNodeType.StringList) { - continue; - } - - if ( - simpleStatement.nodeType === ParseNodeType.Raise && - simpleStatement.typeExpression - ) { - // Check for "raise NotImplementedError" or "raise NotImplementedError()" - const isNotImplementedName = (node: ParseNode) => { - return ( - node?.nodeType === ParseNodeType.Name && - node.value === 'NotImplementedError' - ); - }; - - if (isNotImplementedName(simpleStatement.typeExpression)) { - foundRaiseNotImplemented = true; - } else if ( - simpleStatement.typeExpression.nodeType === ParseNodeType.Call && - isNotImplementedName(simpleStatement.typeExpression.leftExpression) - ) { - foundRaiseNotImplemented = true; - } - } - - break; - } - - if ( - !foundRaiseNotImplemented && - !isAfterNodeReachable(functionType.details.declaration.node) - ) { - noReturnTypeCount++; - } - } - } - } - }); - } - - // The call is considered NoReturn if all subtypes evaluate to NoReturn. - const callIsNoReturn = subtypeCount > 0 && noReturnTypeCount === subtypeCount; - - // Cache the value for next time. - callIsNoReturnCache.set(node.id, callIsNoReturn); - - return callIsNoReturn; - } - function getCodeFlowAnalyzerForNode(nodeId: number) { let analyzer = codeFlowAnalyzerCache.get(nodeId); if (!analyzer) { // Allocate a new code flow analyzer. - analyzer = createCodeFlowAnalyzer(); + analyzer = codeFlowEngine.createCodeFlowAnalyzer(); codeFlowAnalyzerCache.set(nodeId, analyzer); } @@ -14719,24 +16839,27 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Attempts to determine the type of the reference expression at the // point in the code. If the code flow analysis has nothing to say - // about that expression, it return undefined. + // about that expression, it return undefined. Normally flow analysis + // starts from the reference node, but startNode can be specified to + // override this in a few special cases (functions and lambdas) to + // support analysis of captured variables. function getFlowTypeOfReference( reference: CodeFlowReferenceExpressionNode, targetSymbolId: number, initialType: Type | undefined, - isInitialTypeIncomplete: boolean + isInitialTypeIncomplete: boolean, + startNode?: FunctionNode | LambdaNode ): FlowNodeTypeResult { // See if this execution scope requires code flow for this reference expression. const referenceKey = createKeyForReference(reference); - const executionScope = ParseTreeUtils.getExecutionScopeNode(reference); - const codeFlowExpressions = AnalyzerNodeInfo.getCodeFlowExpressions(executionScope); + const executionNode = ParseTreeUtils.getExecutionScopeNode(startNode?.parent ?? reference); + const codeFlowExpressions = AnalyzerNodeInfo.getCodeFlowExpressions(executionNode); if (!codeFlowExpressions || !codeFlowExpressions.has(referenceKey)) { - return { type: undefined, usedOuterScopeAlias: false, isIncomplete: false }; + return { type: undefined, isIncomplete: false }; } // Is there an code flow analyzer cached for this execution scope? - const executionNode = ParseTreeUtils.getExecutionScopeNode(reference); let analyzer: CodeFlowAnalyzer | undefined; if (isNodeInReturnTypeInferenceContext(executionNode)) { @@ -14749,9 +16872,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions analyzer = getCodeFlowAnalyzerForNode(executionNode.id); } - const flowNode = AnalyzerNodeInfo.getFlowNode(reference); + const flowNode = AnalyzerNodeInfo.getFlowNode(startNode ?? reference); if (flowNode === undefined) { - return { type: undefined, usedOuterScopeAlias: false, isIncomplete: false }; + return { type: undefined, isIncomplete: false }; } return getTypeFromCodeFlow( @@ -14783,8 +16906,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions initialType, isInitialTypeIncomplete ); - } finally { + incompleteTypeTracker.exitTrackingScope(); + } catch (e) { + // We don't use a finally clause here because the debugger doesn't + // handle it well when stepping through the code. + incompleteTypeTracker.exitTrackingScope(); + throw e; } if (codeFlowResult.isIncomplete) { @@ -14794,979 +16922,128 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return codeFlowResult; } - // Creates a new code flow analyzer that can be used to narrow the types - // of the expressions within an execution context. Each code flow analyzer - // instance maintains a cache of types it has already determined. - function createCodeFlowAnalyzer(): CodeFlowAnalyzer { - const flowNodeTypeCacheSet = new Map(); - - function getTypeFromCodeFlow( - flowNode: FlowNode, - reference: CodeFlowReferenceExpressionNode | undefined, - targetSymbolId: number | undefined, - initialType: Type | undefined, - isInitialTypeIncomplete: boolean - ): FlowNodeTypeResult { - const referenceKey = reference !== undefined ? createKeyForReference(reference) : undefined; - let subexpressionReferenceKeys: string[] | undefined; - const referenceKeyWithSymbolId = - referenceKey !== undefined && targetSymbolId !== undefined - ? referenceKey + `.${targetSymbolId.toString()}` - : '.'; - let flowNodeTypeCache = flowNodeTypeCacheSet.get(referenceKeyWithSymbolId); - if (!flowNodeTypeCache) { - flowNodeTypeCache = new Map(); - flowNodeTypeCacheSet.set(referenceKeyWithSymbolId, flowNodeTypeCache); - } - - // Caches the type of the flow node in our local cache, keyed by the flow node ID. - function setCacheEntry( - flowNode: FlowNode, - type: Type | undefined, - usedOuterScopeAlias: boolean, - isIncomplete: boolean - ): FlowNodeTypeResult { - if (!isIncomplete) { - flowIncompleteGeneration++; - } else { - const prevEntry = flowNodeTypeCache!.get(flowNode.id); - if (prevEntry === undefined) { - flowIncompleteGeneration++; - } else if (type && (prevEntry as IncompleteType).isIncompleteType) { - const prevIncompleteType = prevEntry as IncompleteType; - if (prevIncompleteType.type && !isTypeSame(prevIncompleteType.type, type)) { - flowIncompleteGeneration++; - } - } + // Specializes the specified (potentially generic) class type using + // the specified type arguments, reporting errors as appropriate. + // Returns the specialized type and a boolean indicating whether + // the type indicates a class type (true) or an object type (false). + function createSpecializedClassType( + classType: ClassType, + typeArgs: TypeResult[] | undefined, + flags: EvaluatorFlags, + errorNode: ParseNode + ): Type { + // Handle the special-case classes that are not defined + // in the type stubs. + if (ClassType.isSpecialBuiltIn(classType)) { + const aliasedName = classType.aliasName || classType.details.name; + switch (aliasedName) { + case 'Callable': { + return createCallableType(typeArgs, errorNode); } - // For speculative or incomplete types, we'll create a separate - // object. For non-speculative and complete types, we'll store - // the type directly. - const entry: CachedType | undefined = isIncomplete - ? { - isIncompleteType: true, - type, - incompleteSubtypes: [], - generationCount: flowIncompleteGeneration, - } - : type; - - flowNodeTypeCache!.set(flowNode.id, entry); - speculativeTypeTracker.trackEntry(flowNodeTypeCache!, flowNode.id); + case 'Never': { + if (typeArgs && typeArgs.length > 0) { + addError( + Localizer.Diagnostic.typeArgsExpectingNone().format({ name: 'Never' }), + typeArgs[0].node + ); + } + return NeverType.createNever(); + } - return { - type, - usedOuterScopeAlias, - isIncomplete, - generationCount: flowIncompleteGeneration, - incompleteSubtypes: isIncomplete ? [] : undefined, - }; - } + case 'NoReturn': { + if (typeArgs && typeArgs.length > 0) { + addError( + Localizer.Diagnostic.typeArgsExpectingNone().format({ name: 'NoReturn' }), + typeArgs[0].node + ); + } + return NeverType.createNoReturn(); + } - function setIncompleteSubtype( - flowNode: FlowNode, - index: number, - type: Type | undefined, - isIncomplete: boolean, - isPending: boolean, - usedOuterScopeAlias: boolean - ) { - const cachedEntry = flowNodeTypeCache!.get(flowNode.id); - if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { - fail('setIncompleteSubtype can be called only on a valid incomplete cache entry'); + case 'Optional': { + return createOptionalType(classType, errorNode, typeArgs, flags); } - const incompleteEntries = cachedEntry.incompleteSubtypes; - if (index < incompleteEntries.length) { - const oldEntry = incompleteEntries[index]; + case 'Type': { + // PEP 484 says that Type[Any] should be considered + // equivalent to type. if ( - oldEntry.isIncomplete !== isIncomplete || - oldEntry.type === undefined || - type === undefined || - !isTypeSame(oldEntry.type, type) + typeArgs?.length === 1 && + isAnyOrUnknown(typeArgs[0].type) && + typeClassType && + isInstantiableClass(typeClassType) ) { - incompleteEntries[index] = { type, isIncomplete, isPending }; - flowIncompleteGeneration++; - } else if (oldEntry.isPending !== isPending) { - incompleteEntries[index] = { type, isIncomplete, isPending }; + return typeClassType; } - } else { - assert(incompleteEntries.length === index); - incompleteEntries.push({ type, isIncomplete, isPending }); - flowIncompleteGeneration++; + + let typeType = createSpecialType(classType, typeArgs, 1); + if (isInstantiableClass(typeType)) { + typeType = explodeGenericClass(typeType); + } + return typeType; } - return getCacheEntry(flowNode, usedOuterScopeAlias); - } + case 'ClassVar': { + return createClassVarType(classType, errorNode, typeArgs, flags); + } - function incrementFlowNodeVisitCount(flowNode: FlowNode) { - const cachedEntry = flowNodeTypeCache!.get(flowNode.id); - if (cachedEntry === undefined || !isIncompleteType(cachedEntry)) { - fail('incrementFlowNodeVisitCount can be called only on a valid incomplete cache entry'); + case 'Protocol': { + return createSpecialType( + classType, + typeArgs, + /* paramLimit */ undefined, + /* allowParamSpec */ true + ); } - cachedEntry.recursiveVisitCount = (cachedEntry.recursiveVisitCount ?? 0) + 1; + case 'Tuple': { + return createSpecialType(classType, typeArgs, /* paramLimit */ undefined); + } - return cachedEntry.recursiveVisitCount; - } + case 'Union': { + return createUnionType(classType, errorNode, typeArgs, flags); + } - function getCacheEntry(flowNode: FlowNode, usedOuterScopeAlias: boolean): FlowNodeTypeResult | undefined { - if (!flowNodeTypeCache!.has(flowNode.id)) { - return undefined; + case 'Generic': { + return createGenericType(classType, errorNode, typeArgs, flags); } - const cachedEntry = flowNodeTypeCache!.get(flowNode.id); - if (cachedEntry === undefined) { - return { - type: cachedEntry, - usedOuterScopeAlias, - isIncomplete: false, - }; + case 'Final': { + return createFinalType(classType, errorNode, typeArgs, flags); } - if (!isIncompleteType(cachedEntry)) { - return { - type: cachedEntry, - usedOuterScopeAlias, - isIncomplete: false, - }; + case 'Annotated': { + return createAnnotatedType(errorNode, typeArgs); } - let type = cachedEntry.type; + case 'Concatenate': { + return createConcatenateType(errorNode, classType, typeArgs); + } - if (cachedEntry.incompleteSubtypes.length > 0) { - // Recompute the effective type based on all of the incomplete - // types we've accumulated so far. - const typesToCombine: Type[] = []; - cachedEntry.incompleteSubtypes.forEach((t) => { - if (t.type) { - typesToCombine.push(t.type); - } - }); - type = typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; + case 'TypeGuard': + case 'StrictTypeGuard': { + return createTypeGuardType(errorNode, classType, typeArgs, flags); } - return { - type, - usedOuterScopeAlias, - isIncomplete: true, - incompleteSubtypes: cachedEntry.incompleteSubtypes, - generationCount: cachedEntry.generationCount, - }; - } + case 'Unpack': { + return createUnpackType(errorNode, typeArgs, flags); + } - function evaluateAssignmentFlowNode(flowNode: FlowAssignment): TypeResult | undefined { - // For function and class nodes, the reference node is the name - // node, but we need to use the parent node (the FunctionNode or ClassNode) - // to access the decorated type in the type cache. - let nodeForCacheLookup: ParseNode = flowNode.node; - const parentNode = flowNode.node.parent; - if (parentNode) { - if (parentNode.nodeType === ParseNodeType.Function || parentNode.nodeType === ParseNodeType.Class) { - nodeForCacheLookup = parentNode; - } + case 'Required': + case 'NotRequired': { + return createRequiredType(classType, errorNode, aliasedName === 'Required', typeArgs, flags); } - return evaluateTypeForSubnode(nodeForCacheLookup, () => { - evaluateTypesForStatement(flowNode.node); - }); + case 'Self': { + return createSelfType(classType, errorNode, typeArgs); + } + + case 'LiteralString': { + return createSpecialType(classType, typeArgs, 0); + } } - - // If this flow has no knowledge of the target expression, it returns undefined. - // If the start flow node for this scope is reachable, the typeAtStart value is - // returned. - function getTypeFromFlowNode( - flowNode: FlowNode, - reference: CodeFlowReferenceExpressionNode | undefined, - targetSymbolId: number | undefined, - initialType: Type | undefined, - isInitialTypeIncomplete: boolean - ): FlowNodeTypeResult { - let curFlowNode = flowNode; - let usedOuterScopeAlias = false; - - // Record how many times this function has been called. - const codeFlowInvocationsAtStart = codeFlowInvocations; - codeFlowInvocations++; - - // This is a frequently-called routine, so it's a good place to call - // the cancellation check. If the operation is canceled, an exception - // will be thrown at this point. - checkForCancellation(); - - while (true) { - // Have we already been here? If so, use the cached value. - const cachedEntry = getCacheEntry(curFlowNode, usedOuterScopeAlias); - if (cachedEntry) { - // If the cached entry is incomplete, we can use it only if nothing - // has changed that may cause the previously-reported incomplete type to change. - if (!cachedEntry.isIncomplete || cachedEntry.generationCount === flowIncompleteGeneration) { - return cachedEntry; - } - } - - if (curFlowNode.flags & FlowFlags.Unreachable) { - // We can get here if there are nodes in a compound logical expression - // (e.g. "False and x") that are never executed but are evaluated. - // The type doesn't matter in this case. - return setCacheEntry(curFlowNode, undefined, usedOuterScopeAlias, /* isIncomplete */ false); - } - - if (curFlowNode.flags & FlowFlags.VariableAnnotation) { - const varAnnotationNode = curFlowNode as FlowVariableAnnotation; - curFlowNode = varAnnotationNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.Call) { - const callFlowNode = curFlowNode as FlowCall; - - // If this function returns a "NoReturn" type, that means - // it always raises an exception or otherwise doesn't return, - // so we can assume that the code before this is unreachable. - if (isCallNoReturn(callFlowNode.node)) { - return setCacheEntry(curFlowNode, undefined, usedOuterScopeAlias, /* isIncomplete */ false); - } - - curFlowNode = callFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.Assignment) { - const assignmentFlowNode = curFlowNode as FlowAssignment; - // Are we targeting the same symbol? We need to do this extra check because the same - // symbol name might refer to different symbols in different scopes (e.g. a list - // comprehension introduces a new scope). - if (reference) { - if ( - targetSymbolId === assignmentFlowNode.targetSymbolId && - ParseTreeUtils.isMatchingExpression(reference, assignmentFlowNode.node) - ) { - // Is this a special "unbind" assignment? If so, - // we can handle it immediately without any further evaluation. - if (curFlowNode.flags & FlowFlags.Unbind) { - return setCacheEntry( - curFlowNode, - UnboundType.create(), - usedOuterScopeAlias, - /* isIncomplete */ false - ); - } - - // If there was a cache entry already, that means we hit a recursive - // case (something like "int: int = 4"). Avoid infinite recursion - // by returning an undefined type. - if (cachedEntry && cachedEntry.type === undefined) { - return { type: undefined, usedOuterScopeAlias, isIncomplete: true }; - } - - // Set the cache entry to undefined before evaluating the - // expression in case it depends on itself. - setCacheEntry(curFlowNode, undefined, usedOuterScopeAlias, /* isIncomplete */ true); - let flowTypeResult = evaluateAssignmentFlowNode(assignmentFlowNode); - if (flowTypeResult && isTypeAliasPlaceholder(flowTypeResult.type)) { - flowTypeResult = undefined; - } - return setCacheEntry( - curFlowNode, - flowTypeResult?.type, - usedOuterScopeAlias, - !!flowTypeResult?.isIncomplete - ); - } else if (ParseTreeUtils.isPartialMatchingExpression(reference, assignmentFlowNode.node)) { - // If the node partially matches the reference, we need to "kill" any narrowed - // types further above this point. For example, if we see the sequence - // a.b = 3 - // a = Foo() - // x = a.b - // The type of "a.b" can no longer be assumed to be Literal[3]. - return { - type: initialType, - usedOuterScopeAlias, - isIncomplete: isInitialTypeIncomplete, - }; - } - } - - curFlowNode = assignmentFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.AssignmentAlias) { - const aliasFlowNode = curFlowNode as FlowAssignmentAlias; - - // If the target symbol ID matches, replace with its alias - // and continue to traverse the code flow graph. - if (targetSymbolId === aliasFlowNode.targetSymbolId) { - targetSymbolId = aliasFlowNode.aliasSymbolId; - usedOuterScopeAlias = true; - } - curFlowNode = aliasFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.BranchLabel) { - const branchFlowNode = curFlowNode as FlowBranchLabel; - if (curFlowNode.flags & FlowFlags.PostContextManager) { - // Determine whether any of the context managers support exception - // suppression. If not, none of its antecedents are reachable. - const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; - if ( - !contextMgrNode.expressions.some((expr) => - isExceptionContextManager(expr, contextMgrNode.isAsync) - ) - ) { - return setCacheEntry( - curFlowNode, - undefined, - usedOuterScopeAlias, - /* isIncomplete */ false - ); - } - } - - // Is the current symbol modified in any way within the scope of the branch? - // If not, we can skip all processing within the branch scope. - if (reference && branchFlowNode.preBranchAntecedent && branchFlowNode.affectedExpressions) { - if (!subexpressionReferenceKeys) { - subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); - } - - if ( - !subexpressionReferenceKeys.some((key) => - branchFlowNode.affectedExpressions!.has(key) - ) && - isFlowNodeReachable(curFlowNode, branchFlowNode.preBranchAntecedent) - ) { - curFlowNode = branchFlowNode.preBranchAntecedent; - continue; - } - } - - const labelNode = curFlowNode as FlowLabel; - const typesToCombine: Type[] = []; - let branchUsedOuterScopeAlias = usedOuterScopeAlias; - - let sawIncomplete = false; - - labelNode.antecedents.forEach((antecedent) => { - const flowTypeResult = getTypeFromFlowNode( - antecedent, - reference, - targetSymbolId, - initialType, - isInitialTypeIncomplete - ); - - if (flowTypeResult.isIncomplete) { - sawIncomplete = true; - } - - if (flowTypeResult.usedOuterScopeAlias) { - branchUsedOuterScopeAlias = true; - } - - if (flowTypeResult.type) { - typesToCombine.push(flowTypeResult.type); - } - }); - - const effectiveType = - !!reference || typesToCombine.length > 0 ? combineTypes(typesToCombine) : undefined; - - // Limit the number of recursive calls before we give up and call the type - // complete. This can theoretically result in incorrect type information in - // very complex code flows, but it's preferable to extremely long analysis times. - if (codeFlowInvocations - codeFlowInvocationsAtStart > maxCodeFlowInvocationsPerLoop) { - sawIncomplete = false; - } - - return setCacheEntry(curFlowNode, effectiveType, branchUsedOuterScopeAlias, sawIncomplete); - } - - if (curFlowNode.flags & FlowFlags.LoopLabel) { - const loopNode = curFlowNode as FlowLabel; - - // Is the current symbol modified in any way within the loop? If not, we can skip all - // processing within the loop and assume that the type comes from the first antecedent, - // which feeds the loop. - if (reference) { - if (!subexpressionReferenceKeys) { - subexpressionReferenceKeys = createKeysForReferenceSubexpressions(reference); - } - - if (!subexpressionReferenceKeys.some((key) => loopNode.affectedExpressions!.has(key))) { - curFlowNode = loopNode.antecedents[0]; - continue; - } - } - - let sawIncomplete = false; - let loopUsedOuterScopeAlias = usedOuterScopeAlias; - - // See if we've been here before. If so, there will be an incomplete cache entry. - let cacheEntry = getCacheEntry(curFlowNode, usedOuterScopeAlias); - if (cacheEntry === undefined) { - // We haven't been here before, so create a new incomplete cache entry. - cacheEntry = setCacheEntry( - curFlowNode, - undefined, - usedOuterScopeAlias, - /* isIncomplete */ true - ); - } - - const isRecursive = - cacheEntry.incompleteSubtypes !== undefined && - cacheEntry.incompleteSubtypes.some((subtype) => subtype.isPending); - const visitCount = incrementFlowNodeVisitCount(curFlowNode); - - loopNode.antecedents.forEach((antecedent, index) => { - cacheEntry = getCacheEntry(curFlowNode, usedOuterScopeAlias)!; - - // Have we already been here (i.e. does the entry exist and is - // not marked "pending")? If so, we can use the type that was already - // computed if it is complete. - const subtypeEntry = - cacheEntry.incompleteSubtypes !== undefined && - index < cacheEntry.incompleteSubtypes.length - ? cacheEntry.incompleteSubtypes[index] - : undefined; - if ( - subtypeEntry === undefined || - (!subtypeEntry?.isPending && subtypeEntry?.isIncomplete) - ) { - // Set this entry to "pending" to prevent infinite recursion. - // We'll mark it "not pending" below. - cacheEntry = setIncompleteSubtype( - curFlowNode, - index, - subtypeEntry?.type, - /* isIncomplete */ true, - /* isPending */ true, - usedOuterScopeAlias - ); - - try { - const flowTypeResult = getTypeFromFlowNode( - antecedent, - reference, - targetSymbolId, - initialType, - isInitialTypeIncomplete - ); - - if (flowTypeResult.isIncomplete) { - sawIncomplete = true; - } - - if (flowTypeResult.usedOuterScopeAlias) { - loopUsedOuterScopeAlias = true; - } - - cacheEntry = setIncompleteSubtype( - curFlowNode, - index, - flowTypeResult.type, - flowTypeResult.isIncomplete, - /* isPending */ false, - loopUsedOuterScopeAlias - ); - } catch (e) { - setIncompleteSubtype( - curFlowNode, - index, - undefined, - /* isIncomplete */ true, - /* isPending */ false, - usedOuterScopeAlias - ); - throw e; - } - } - }); - - if (isRecursive) { - // This was not the first time through the loop, so we are recursively trying - // to resolve other parts of the incomplete type. It will be marked complete - // once the stack pops back up to the first caller. - - // If we have visited the loop node maxFlowNodeLoopVisitCount times already - // and some of the subtypes are still incomplete, bail and base the - // isIncomplete flag on the first subtype, which is the one that feeds - // the top of the loop. - let isIncomplete = - visitCount >= maxFlowNodeLoopVisitCount - ? cacheEntry.incompleteSubtypes![0].isIncomplete - : reference !== undefined; - - // Limit the number of recursive calls before we give up and call the type - // complete. This can theoretically result in incorrect type information in - // very complex code flows, but it's preferable to extremely long analysis times. - if (codeFlowInvocations - codeFlowInvocationsAtStart > maxCodeFlowInvocationsPerLoop) { - isIncomplete = false; - } - - return { - type: cacheEntry.type, - usedOuterScopeAlias, - isIncomplete, - }; - } - - // The result is incomplete if one or more entries were incomplete. - if (sawIncomplete) { - // If there is an "Unknown" type within an unknown type, remove - // it. Otherwise we might end up resolving the cycle with a type - // that includes an undesirable unknown. - return { - type: cacheEntry?.type ? removeUnknownFromUnion(cacheEntry.type) : undefined, - usedOuterScopeAlias: loopUsedOuterScopeAlias, - isIncomplete: true, - }; - } - - // We have made it all the way through all the antecedents, and we can - // mark the type as complete. - return setCacheEntry( - curFlowNode, - cacheEntry!.type, - loopUsedOuterScopeAlias, - /* isIncomplete */ false - ); - } - - if (curFlowNode.flags & (FlowFlags.TrueCondition | FlowFlags.FalseCondition)) { - const conditionalFlowNode = curFlowNode as FlowCondition; - - if (reference) { - const typeNarrowingCallback = getTypeNarrowingCallback( - evaluatorInterface, - reference, - conditionalFlowNode - ); - if (typeNarrowingCallback) { - const flowTypeResult = getTypeFromFlowNode( - conditionalFlowNode.antecedent, - reference, - targetSymbolId, - initialType, - isInitialTypeIncomplete - ); - let flowType = flowTypeResult.type; - if (flowType) { - flowType = typeNarrowingCallback(flowType); - } - - return setCacheEntry( - curFlowNode, - flowType, - flowTypeResult.usedOuterScopeAlias, - flowTypeResult.isIncomplete - ); - } - } - - curFlowNode = conditionalFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & (FlowFlags.TrueNeverCondition | FlowFlags.FalseNeverCondition)) { - const conditionalFlowNode = curFlowNode as FlowCondition; - if (conditionalFlowNode.reference) { - // Make sure the reference type has a declared type. If not, - // don't bother trying to infer its type because that would be - // too expensive. - const symbolWithScope = lookUpSymbolRecursive( - conditionalFlowNode.reference, - conditionalFlowNode.reference.value, - /* honorCodeFlow */ false - ); - if (symbolWithScope && symbolWithScope.symbol.getTypedDeclarations().length > 0) { - const typeNarrowingCallback = getTypeNarrowingCallback( - evaluatorInterface, - conditionalFlowNode.reference, - conditionalFlowNode - ); - if (typeNarrowingCallback) { - const refTypeInfo = getTypeOfExpression(conditionalFlowNode.reference!); - const narrowedType = typeNarrowingCallback(refTypeInfo.type) || refTypeInfo.type; - - // If the narrowed type is "never", don't allow further exploration. - if (isNever(narrowedType)) { - return setCacheEntry( - curFlowNode, - undefined, - usedOuterScopeAlias, - !!refTypeInfo.isIncomplete - ); - } - } - } - } - curFlowNode = conditionalFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.ExhaustedMatch) { - const exhaustedMatchFlowNode = curFlowNode as FlowExhaustedMatch; - const narrowedTypeResult = evaluateTypeForSubnode(exhaustedMatchFlowNode.node, () => { - evaluateTypesForMatchNode(exhaustedMatchFlowNode.node); - }); - - // If the narrowed type is "never", don't allow further exploration. - if (narrowedTypeResult && isNever(narrowedTypeResult.type)) { - return setCacheEntry( - curFlowNode, - undefined, - usedOuterScopeAlias, - !!narrowedTypeResult.isIncomplete - ); - } - - curFlowNode = exhaustedMatchFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.NarrowForPattern) { - const patternFlowNode = curFlowNode as FlowNarrowForPattern; - if ( - !reference || - ParseTreeUtils.isMatchingExpression(reference, patternFlowNode.subjectExpression) - ) { - const typeResult = evaluateTypeForSubnode(patternFlowNode.statement, () => { - if (patternFlowNode.statement.nodeType === ParseNodeType.Case) { - evaluateTypesForCaseNode(patternFlowNode.statement); - } else { - evaluateTypesForMatchNode(patternFlowNode.statement); - } - }); - if (typeResult) { - if (!reference) { - if (isNever(typeResult.type)) { - return setCacheEntry( - curFlowNode, - undefined, - usedOuterScopeAlias, - !!typeResult.isIncomplete - ); - } - } else { - return setCacheEntry( - curFlowNode, - typeResult.type, - usedOuterScopeAlias, - !!typeResult.isIncomplete - ); - } - } - } - curFlowNode = patternFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.PreFinallyGate) { - const preFinallyFlowNode = curFlowNode as FlowPreFinallyGate; - if (preFinallyFlowNode.isGateClosed) { - return { type: undefined, usedOuterScopeAlias, isIncomplete: false }; - } - curFlowNode = preFinallyFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.PostFinally) { - const postFinallyFlowNode = curFlowNode as FlowPostFinally; - const wasGateClosed = postFinallyFlowNode.preFinallyGate.isGateClosed; - try { - postFinallyFlowNode.preFinallyGate.isGateClosed = true; - let flowTypeResult: FlowNodeTypeResult | undefined; - - // Use speculative mode for the remainder of the finally suite - // because the final types within this parse node block should be - // evaluated when the gate is open. - useSpeculativeMode(postFinallyFlowNode.finallyNode, () => { - flowTypeResult = getTypeFromFlowNode( - postFinallyFlowNode.antecedent, - reference, - targetSymbolId, - initialType, - isInitialTypeIncomplete - ); - }); - - // If the type is incomplete, don't write back to the cache. - return flowTypeResult!.isIncomplete - ? flowTypeResult! - : setCacheEntry( - curFlowNode, - flowTypeResult!.type, - flowTypeResult!.usedOuterScopeAlias, - /* isIncomplete */ false - ); - } finally { - postFinallyFlowNode.preFinallyGate.isGateClosed = wasGateClosed; - } - } - - if (curFlowNode.flags & FlowFlags.Start) { - return setCacheEntry(curFlowNode, initialType, usedOuterScopeAlias, isInitialTypeIncomplete); - } - - if (curFlowNode.flags & FlowFlags.WildcardImport) { - const wildcardImportFlowNode = curFlowNode as FlowWildcardImport; - if (reference && reference.nodeType === ParseNodeType.Name) { - const nameValue = reference.value; - if (wildcardImportFlowNode.names.some((name) => name === nameValue)) { - const type = getTypeFromWildcardImport(wildcardImportFlowNode, nameValue); - return setCacheEntry(curFlowNode, type, usedOuterScopeAlias, /* isIncomplete */ false); - } - } - - curFlowNode = wildcardImportFlowNode.antecedent; - continue; - } - - // We shouldn't get here. - fail('Unexpected flow node flags'); - return setCacheEntry(curFlowNode, undefined, usedOuterScopeAlias, /* isIncomplete */ false); - } - } - - if (!flowNode) { - // This should happen only in cases where we're evaluating - // parse nodes that are created after the initial parse - // (namely, string literals that are used for forward - // referenced types). - return { - type: initialType, - usedOuterScopeAlias: false, - isIncomplete: isInitialTypeIncomplete, - }; - } - - return getTypeFromFlowNode(flowNode, reference, targetSymbolId, initialType, isInitialTypeIncomplete); - } - - return { - getTypeFromCodeFlow, - }; - } - - // Determines whether the specified flowNode can be reached by any - // control flow path within the execution context. If sourceFlowNode - // is specified, it returns true only if at least one control flow - // path passes through sourceFlowNode. - function isFlowNodeReachable(flowNode: FlowNode, sourceFlowNode?: FlowNode): boolean { - const visitedFlowNodeMap = new Set(); - - function isFlowNodeReachableRecursive(flowNode: FlowNode, sourceFlowNode: FlowNode | undefined): boolean { - let curFlowNode = flowNode; - - while (true) { - // If we've already visited this node, we can assume - // it wasn't reachable. - if (visitedFlowNodeMap.has(curFlowNode.id)) { - return false; - } - - // Note that we've been here before. - visitedFlowNodeMap.add(curFlowNode.id); - - if (curFlowNode.flags & FlowFlags.Unreachable) { - return false; - } - - if (curFlowNode === sourceFlowNode) { - return true; - } - - if ( - curFlowNode.flags & - (FlowFlags.VariableAnnotation | - FlowFlags.Assignment | - FlowFlags.AssignmentAlias | - FlowFlags.TrueCondition | - FlowFlags.FalseCondition | - FlowFlags.WildcardImport | - FlowFlags.TrueNeverCondition | - FlowFlags.FalseNeverCondition | - FlowFlags.NarrowForPattern | - FlowFlags.ExhaustedMatch) - ) { - const typedFlowNode = curFlowNode as - | FlowVariableAnnotation - | FlowAssignment - | FlowAssignmentAlias - | FlowCondition - | FlowWildcardImport - | FlowCondition - | FlowExhaustedMatch; - curFlowNode = typedFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & FlowFlags.Call) { - const callFlowNode = curFlowNode as FlowCall; - - // If this function returns a "NoReturn" type, that means - // it always raises an exception or otherwise doesn't return, - // so we can assume that the code before this is unreachable. - if (isCallNoReturn(callFlowNode.node)) { - return false; - } - - curFlowNode = callFlowNode.antecedent; - continue; - } - - if (curFlowNode.flags & (FlowFlags.BranchLabel | FlowFlags.LoopLabel)) { - if (curFlowNode.flags & FlowFlags.PostContextManager) { - // Determine whether any of the context managers support exception - // suppression. If not, none of its antecedents are reachable. - const contextMgrNode = curFlowNode as FlowPostContextManagerLabel; - if ( - !contextMgrNode.expressions.some((expr) => - isExceptionContextManager(expr, contextMgrNode.isAsync) - ) - ) { - return false; - } - } - - const labelNode = curFlowNode as FlowLabel; - for (const antecedent of labelNode.antecedents) { - if (isFlowNodeReachableRecursive(antecedent, sourceFlowNode)) { - return true; - } - } - return false; - } - - if (curFlowNode.flags & FlowFlags.Start) { - // If we hit the start but were looking for a particular source flow - // node, return false. Otherwise, the start is what we're looking for. - return sourceFlowNode ? false : true; - } - - if (curFlowNode.flags & FlowFlags.PreFinallyGate) { - const preFinallyFlowNode = curFlowNode as FlowPreFinallyGate; - return !preFinallyFlowNode.isGateClosed; - } - - if (curFlowNode.flags & FlowFlags.PostFinally) { - const postFinallyFlowNode = curFlowNode as FlowPostFinally; - const wasGateClosed = postFinallyFlowNode.preFinallyGate.isGateClosed; - - try { - postFinallyFlowNode.preFinallyGate.isGateClosed = true; - return isFlowNodeReachableRecursive(postFinallyFlowNode.antecedent, sourceFlowNode); - } finally { - postFinallyFlowNode.preFinallyGate.isGateClosed = wasGateClosed; - } - } - - // We shouldn't get here. - fail('Unexpected flow node flags'); - return false; - } - } - - // Protect against infinite recursion. - if (isReachableRecursionMap.has(flowNode.id)) { - return true; - } - isReachableRecursionMap.set(flowNode.id, true); - - try { - return isFlowNodeReachableRecursive(flowNode, sourceFlowNode); - } finally { - isReachableRecursionMap.delete(flowNode.id); - } - } - - // Specializes the specified (potentially generic) class type using - // the specified type arguments, reporting errors as appropriate. - // Returns the specialized type and a boolean indicating whether - // the type indicates a class type (true) or an object type (false). - function createSpecializedClassType( - classType: ClassType, - typeArgs: TypeResult[] | undefined, - flags: EvaluatorFlags, - errorNode: ParseNode - ): Type { - // Handle the special-case classes that are not defined - // in the type stubs. - if (ClassType.isSpecialBuiltIn(classType)) { - const aliasedName = classType.aliasName || classType.details.name; - switch (aliasedName) { - case 'Callable': { - return createCallableType(typeArgs, errorNode); - } - - case 'Optional': { - return createOptionalType(errorNode, typeArgs); - } - - case 'Type': { - let typeType = createSpecialType(classType, typeArgs, 1); - if (isInstantiableClass(typeType)) { - typeType = explodeGenericClass(typeType); - } - return typeType; - } - - case 'ClassVar': { - return createClassVarType(errorNode, typeArgs, flags); - } - - case 'Protocol': { - return createSpecialType( - classType, - typeArgs, - /* paramLimit */ undefined, - /* allowParamSpec */ true - ); - } - - case 'Tuple': { - return createSpecialType(classType, typeArgs, /* paramLimit */ undefined); - } - - case 'Union': { - return createUnionType(typeArgs); - } - - case 'Generic': { - return createGenericType(errorNode, classType, typeArgs); - } - - case 'Final': { - return createFinalType(classType, errorNode, typeArgs, flags); - } - - case 'Annotated': { - return createAnnotatedType(errorNode, typeArgs); - } - - case 'Concatenate': { - return createConcatenateType(errorNode, classType, typeArgs); - } - - case 'TypeGuard': { - return createTypeGuardType(errorNode, classType, typeArgs); - } - - case 'Unpack': { - return createUnpackType(errorNode, typeArgs); - } - - case 'Required': - case 'NotRequired': { - return createRequiredType(classType, errorNode, aliasedName === 'Required', typeArgs); - } - } - } + } const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); if ( @@ -15778,6 +17055,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Handle "type" specially, since it needs to act like "Type" // in Python 3.9 and newer. if (ClassType.isBuiltIn(classType, 'type') && typeArgs) { + // PEP 484 says that type[Any] should be considered + // equivalent to type. + if (typeArgs.length === 1 && isAnyOrUnknown(typeArgs[0].type)) { + return classType; + } + const typeClass = getTypingType(errorNode, 'Type'); if (typeClass && isInstantiableClass(typeClass)) { let typeType = createSpecialType( @@ -15812,7 +17095,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let typeArgCount = typeArgs ? typeArgs.length : 0; // Make sure the argument list count is correct. - const typeParameters = ClassType.getTypeParameters(classType); + const typeParameters = ClassType.isPseudoGenericClass(classType) ? [] : ClassType.getTypeParameters(classType); // If there are no type parameters or args, the class is already specialized. // No need to do any more work. @@ -15830,10 +17113,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, - Localizer.Diagnostic.typeArgsExpectingNone(), + Localizer.Diagnostic.typeArgsExpectingNone().format({ + name: classType.aliasName || classType.details.name, + }), typeArgs[typeParameters.length].node ); - } else { + } else if (typeParameters.length !== 1 || !isParamSpec(typeParameters[0])) { addDiagnostic( fileInfo.diagnosticRuleSet.reportGeneralTypeIssues, DiagnosticRule.reportGeneralTypeIssues, @@ -15876,26 +17161,124 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - validateTypeArg( - typeArg, - /* allowEmptyTuple */ false, - /* allowVariadicTypeVar */ false, - /* allowParamSpec */ true - ); + const typeParam = index < typeParameters.length ? typeParameters[index] : undefined; + const isParamSpecTarget = typeParam?.details.isParamSpec; + + validateTypeArg(typeArg, { + allowParamSpec: true, + allowTypeArgList: isParamSpecTarget, + }); }); } - // Fill in any missing type arguments with Unknown. - const typeArgTypes = typeArgs ? typeArgs.map((t) => convertToInstance(t.type)) : []; - const typeParams = ClassType.getTypeParameters(classType); - for (let i = typeArgTypes.length; i < typeParams.length; i++) { - typeArgTypes.push(UnknownType.create()); + // Handle ParamSpec arguments and fill in any missing type arguments with Unknown. + let typeArgTypes: Type[] = []; + const fullTypeParams = ClassType.getTypeParameters(classType); + + // PEP 612 says that if the class has only one type parameter consisting + // of a ParamSpec, the list of arguments does not need to be enclosed in + // a list. We'll handle that case specially here. + if (fullTypeParams.length === 1 && fullTypeParams[0].details.isParamSpec && typeArgs) { + if ( + typeArgs.every( + (typeArg) => !isEllipsisType(typeArg.type) && !typeArg.typeList && !isParamSpec(typeArg.type) + ) + ) { + if ( + typeArgs.length !== 1 || + !isInstantiableClass(typeArgs[0].type) || + !ClassType.isBuiltIn(typeArgs[0].type, 'Concatenate') + ) { + // Package up the type arguments into a typeList. + typeArgs = [ + { + type: UnknownType.create(), + node: typeArgs[0].node, + typeList: typeArgs, + }, + ]; + } + } else if (typeArgs.length > 1) { + const paramSpecTypeArg = typeArgs.find((typeArg) => isParamSpec(typeArg.type)); + if (paramSpecTypeArg) { + addError(Localizer.Diagnostic.paramSpecContext(), paramSpecTypeArg.node); + } + + const listTypeArg = typeArgs.find((typeArg) => !!typeArg.typeList); + if (listTypeArg) { + addError(Localizer.Diagnostic.typeArgListNotAllowed(), listTypeArg.node); + } + } } - typeArgTypes.forEach((typeArgType, index) => { + fullTypeParams.forEach((typeParam, index) => { + if (typeArgs && index < typeArgs.length) { + if (typeParam.details.isParamSpec) { + const typeArg = typeArgs[index]; + const functionType = FunctionType.createInstantiable('', '', '', FunctionTypeFlags.ParamSpecValue); + TypeBase.setSpecialForm(functionType); + + if (isEllipsisType(typeArg.type)) { + FunctionType.addDefaultParameters(functionType); + functionType.details.flags |= FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck; + typeArgTypes.push(functionType); + return; + } + + if (typeArg.typeList) { + typeArg.typeList!.forEach((paramType, paramIndex) => { + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + name: `__p${paramIndex}`, + isNameSynthesized: true, + type: convertToInstance(paramType.type), + hasDeclaredType: true, + }); + }); + typeArgTypes.push(functionType); + return; + } + + if (isInstantiableClass(typeArg.type) && ClassType.isBuiltIn(typeArg.type, 'Concatenate')) { + const concatTypeArgs = typeArg.type.typeArguments; + if (concatTypeArgs && concatTypeArgs.length > 0) { + concatTypeArgs.forEach((typeArg, index) => { + if (index === concatTypeArgs.length - 1) { + if (isParamSpec(typeArg)) { + functionType.details.paramSpec = typeArg; + } + } else { + FunctionType.addParameter(functionType, { + category: ParameterCategory.Simple, + name: `__p${index}`, + isNameSynthesized: true, + hasDeclaredType: true, + type: typeArg, + }); + } + }); + } + + typeArgTypes.push(functionType); + return; + } + } + + typeArgTypes.push(convertToInstance(typeArgs[index].type)); + return; + } + + typeArgTypes.push(UnknownType.create()); + }); + + typeArgTypes = typeArgTypes.map((typeArgType, index) => { if (index < typeArgCount) { const diag = new DiagnosticAddendum(); - if (!canAssignToTypeVar(typeParameters[index], typeArgType, diag)) { + const adjustedTypeArgType = applyTypeArgToTypeVar(typeParameters[index], typeArgType, diag); + + if (adjustedTypeArgType) { + typeArgType = adjustedTypeArgType; + } else { // Avoid emitting this error for a partially-constructed class. if (!isClassInstance(typeArgType) || !ClassType.isPartiallyConstructed(typeArgType)) { const fileInfo = AnalyzerNodeInfo.getFileInfo(typeArgs![index].node); @@ -15911,6 +17294,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } } + + return typeArgType; }); const specializedClass = ClassType.cloneForSpecialization(classType, typeArgTypes, typeArgs !== undefined); @@ -15918,28 +17303,28 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return specializedClass; } - function getTypeForArgument(arg: FunctionArgument): Type { + function getTypeForArgument(arg: FunctionArgument): TypeArgumentResult { if (arg.type) { - return arg.type; + return { type: arg.type }; } if (!arg.valueExpression) { // We shouldn't ever get here, but just in case. - return UnknownType.create(); + return { type: UnknownType.create() }; } // If there was no defined type provided, there should always // be a value expression from which we can retrieve the type. - return getTypeOfExpression(arg.valueExpression).type; + return getTypeOfExpression(arg.valueExpression); } // This function is like getTypeForArgument except that it is // used in cases where the argument is expected to be a type // and therefore follows the normal rules of types (e.g. they // can be forward-declared in stubs, etc.). - function getTypeForArgumentExpectingType(arg: FunctionArgument): Type { + function getTypeForArgumentExpectingType(arg: FunctionArgument): TypeArgumentResult { if (arg.type) { - return arg.type; + return { type: arg.type }; } // If there was no defined type provided, there should always @@ -15947,7 +17332,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return getTypeForExpressionExpectingType(arg.valueExpression!); } - function getTypeForExpressionExpectingType(node: ExpressionNode, allowFinal = false) { + function getTypeForExpressionExpectingType( + node: ExpressionNode, + allowFinal = false, + allowRequired = false, + interpreterParsesStringLiteral = false + ): TypeResult { let flags = EvaluatorFlags.ExpectingType | EvaluatorFlags.EvaluateStringLiteralAsType | @@ -15958,13 +17348,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const fileInfo = AnalyzerNodeInfo.getFileInfo(node); if (fileInfo.isStubFile) { flags |= EvaluatorFlags.AllowForwardReferences; + } else { + flags |= EvaluatorFlags.InterpreterParsesStringLiteral; } if (!allowFinal) { flags |= EvaluatorFlags.FinalDisallowed; } - return getTypeOfExpression(node, undefined, flags).type; + if (allowRequired) { + flags |= EvaluatorFlags.RequiredAllowed; + } + + return getTypeOfExpression(node, undefined, flags); } function getBuiltInType(node: ParseNode, name: string): Type { @@ -15998,11 +17394,22 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return nameType; } - function lookUpSymbolRecursive(node: ParseNode, name: string, honorCodeFlow: boolean) { + function lookUpSymbolRecursive( + node: ParseNode, + name: string, + honorCodeFlow: boolean, + preferGlobalScope = false + ): SymbolWithScope | undefined { const scope = ScopeUtils.getScopeForNode(node); let symbolWithScope = scope?.lookUpSymbolRecursive(name); + const scopeType = scope?.type ?? ScopeType.Module; + + // Functions and list comprehensions don't allow access to implicitly + // aliased symbols in outer scopes if they haven't yet been assigned + // within the local scope. + const scopeTypeHonorsCodeFlow = scopeType !== ScopeType.Function && scopeType !== ScopeType.ListComprehension; - if (symbolWithScope && honorCodeFlow) { + if (symbolWithScope && honorCodeFlow && scopeTypeHonorsCodeFlow) { // Filter the declarations based on flow reachability. const reachableDecls = symbolWithScope.symbol.getDeclarations().filter((decl) => { if (decl.type !== DeclarationType.Alias && decl.type !== DeclarationType.Intrinsic) { @@ -16019,12 +17426,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // to the source, see if the usage node is reachable by // any path. const flowNode = AnalyzerNodeInfo.getFlowNode(node); - const isReachable = flowNode && isFlowNodeReachable(flowNode); + const isReachable = flowNode && codeFlowEngine.isFlowNodeReachable(flowNode); return !isReachable; } } } - return true; }); @@ -16043,16 +17449,50 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + // PEP 563 indicates that if a forward reference can be resolved in the module + // scope (or, by implication, in the builtins scope), it should prefer that + // resolution over local resolutions. + if (symbolWithScope && preferGlobalScope) { + let curSymbolWithScope: SymbolWithScope | undefined = symbolWithScope; + while ( + curSymbolWithScope.scope.type !== ScopeType.Module && + curSymbolWithScope.scope.type !== ScopeType.Builtin && + curSymbolWithScope.scope.parent + ) { + curSymbolWithScope = curSymbolWithScope.scope.parent.lookUpSymbolRecursive( + name, + curSymbolWithScope.isOutsideCallerModule, + curSymbolWithScope.isBeyondExecutionScope || curSymbolWithScope.scope.isIndependentlyExecutable() + ); + if (!curSymbolWithScope) { + break; + } + } + + if ( + curSymbolWithScope?.scope.type === ScopeType.Module || + curSymbolWithScope?.scope.type === ScopeType.Builtin + ) { + symbolWithScope = curSymbolWithScope; + } + } + return symbolWithScope; } // Disables recording of errors and warnings. function suppressDiagnostics(node: ParseNode, callback: () => T) { suppressedNodeStack.push(node); + try { - return callback(); - } finally { + const result = callback(); suppressedNodeStack.pop(); + return result; + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + suppressedNodeStack.pop(); + throw e; } } @@ -16063,18 +17503,28 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions speculativeTypeTracker.enterSpeculativeContext(speculativeNode, allowCacheRetention); try { - return callback(); - } finally { + const result = callback(); + speculativeTypeTracker.leaveSpeculativeContext(); + return result; + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. speculativeTypeTracker.leaveSpeculativeContext(); + throw e; } } function disableSpeculativeMode(callback: () => void) { const stack = speculativeTypeTracker.disableSpeculativeMode(); + try { callback(); - } finally { speculativeTypeTracker.enableSpeculativeMode(stack); + } catch (e) { + // We don't use finally here because the TypeScript debugger doesn't + // handle finally well when single stepping. + speculativeTypeTracker.enableSpeculativeMode(stack); + throw e; } } @@ -16127,17 +17577,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return decl.type === DeclarationType.Alias && decl.node === node.parent; }); - // Make a shallow copy and clear the "usesLocalName" field. - const nonLocalDecls = declsForThisImport.map((localDecl) => { - if (localDecl.type === DeclarationType.Alias) { - const nonLocalDecl: AliasDeclaration = { ...localDecl }; - nonLocalDecl.usesLocalName = false; - return nonLocalDecl; - } - return localDecl; - }); - - declarations.push(...nonLocalDecls); + declarations.push(...getDeclarationsWithUsesLocalNameRemoved(declsForThisImport)); } } } else if ( @@ -16212,17 +17652,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions evaluateTypesForStatement(node); // Synthesize an alias declaration for this name part. The only - // time this case is used is for the hover provider. - const aliasDeclaration: AliasDeclaration = { - type: DeclarationType.Alias, - node: undefined!, - path: importInfo.resolvedPaths[namePartIndex], - range: getEmptyRange(), - implicitImports: new Map(), - usesLocalName: false, - moduleName: '', - }; - declarations.push(aliasDeclaration); + // time this case is used is for IDE services such as + // the find all references, hover provider and etc. + declarations.push(createSynthesizedAliasDeclaration(importInfo.resolvedPaths[namePartIndex])); } } } else if (node.parent && node.parent.nodeType === ParseNodeType.Argument && node === node.parent.name) { @@ -16231,7 +17663,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const argNode = node.parent; const paramName = node.value; if (argNode.parent && argNode.parent.nodeType === ParseNodeType.Call) { - const baseType = getTypeOfExpression(argNode.parent.leftExpression).type; + const baseType = getTypeOfExpression( + argNode.parent.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; if (baseType) { if (isFunction(baseType) && baseType.details.declaration) { @@ -16252,7 +17688,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ClassType.cloneAsInstance(baseType), '__init__', { method: 'get' }, - new DiagnosticAddendum(), + /* diag */ undefined, MemberAccessFlags.SkipObjectBaseClass )?.type; @@ -16272,19 +17708,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } else { const fileInfo = AnalyzerNodeInfo.getFileInfo(node); - let allowForwardReferences = fileInfo.isStubFile; // Determine if this node is within a quoted type annotation. - if ( - ParseTreeUtils.isWithinTypeAnnotation( - node, - !isAnnotationEvaluationPostponed(AnalyzerNodeInfo.getFileInfo(node)) - ) - ) { - allowForwardReferences = true; - } + const isWithinTypeAnnotation = ParseTreeUtils.isWithinTypeAnnotation( + node, + !isAnnotationEvaluationPostponed(AnalyzerNodeInfo.getFileInfo(node)) + ); + const allowForwardReferences = isWithinTypeAnnotation || fileInfo.isStubFile; + + const symbolWithScope = lookUpSymbolRecursive( + node, + node.value, + !allowForwardReferences, + isWithinTypeAnnotation + ); - const symbolWithScope = lookUpSymbolRecursive(node, node.value, !allowForwardReferences); if (symbolWithScope) { declarations.push(...symbolWithScope.symbol.getDeclarations()); } @@ -16385,12 +17823,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (typeAnnotationNode) { - const declaredType = getTypeOfAnnotation(typeAnnotationNode, { - associateTypeVarsWithScope: true, - allowTypeVarTuple: declaration.node.category === ParameterCategory.VarArgList, - disallowRecursiveTypeAlias: true, - }); - return transformVariadicParamType(declaration.node, declaration.node.category, declaredType); + const declaredType = getTypeOfParameterAnnotation(typeAnnotationNode, declaration.node.category); + + return transformVariadicParamType( + declaration.node, + declaration.node.category, + adjustParameterAnnotatedType(declaration.node, declaredType) + ); } return undefined; @@ -16403,12 +17842,28 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeAliasNode = isDeclaredTypeAlias(typeAnnotationNode) ? ParseTreeUtils.getTypeAnnotationNode(typeAnnotationNode) : undefined; - let declaredType = getTypeOfAnnotation(typeAnnotationNode, { - isVariableAnnotation: true, - allowClassVar: - !declaration.isFinal && - ParseTreeUtils.isClassVarAllowedForAssignmentTarget(declaration.node), - }); + let declaredType: Type; + + if (declaration.isRuntimeTypeExpression) { + declaredType = convertToInstance( + getTypeForExpressionExpectingType( + typeAnnotationNode, + /* allowFinal */ true, + /* allowRequired */ true + ).type + ); + } else { + const declNode = + declaration.isDefinedByMemberAccess && + declaration.node.parent?.nodeType === ParseNodeType.MemberAccess + ? declaration.node.parent + : declaration.node; + declaredType = getTypeOfAnnotation(typeAnnotationNode, { + isVariableAnnotation: true, + allowClassVar: ParseTreeUtils.isClassVarAllowedForAssignmentTarget(declNode), + allowFinal: ParseTreeUtils.isFinalAllowedForAssignmentTarget(declNode), + }); + } if (declaredType) { // Apply enum transform if appropriate. @@ -16438,7 +17893,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - function getInferredTypeOfDeclaration(decl: Declaration): Type | undefined { + function getInferredTypeOfDeclaration(symbol: Symbol, decl: Declaration): Type | undefined { const resolvedDecl = resolveAliasDeclaration( decl, /* resolveLocalNames */ true, @@ -16448,7 +17903,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // We couldn't resolve the alias. Substitute an unknown // type in this case. if (!resolvedDecl) { - return UnknownType.create(); + return evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); } function applyLoaderActionsToModuleType( @@ -16456,13 +17911,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions loaderActions: ModuleLoaderActions, importLookup: ImportLookup ): Type { - if (loaderActions.path) { + if (loaderActions.path && loaderActions.loadSymbolsFromPath) { const lookupResults = importLookup(loaderActions.path); if (lookupResults) { moduleType.fields = lookupResults.symbolTable; moduleType.docString = lookupResults.docString; } else { - return UnknownType.create(); + return evaluatorOptions.evaluateUnknownImportsAsAny ? AnyType.create() : UnknownType.create(); } } @@ -16524,29 +17979,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If this is part of a "py.typed" package, don't fall back on type inference // unless it's marked Final, is a constant, or is a declared type alias. const fileInfo = AnalyzerNodeInfo.getFileInfo(resolvedDecl.node); - let isSpeculativeTypeAliasFromPyTypedFile = false; - - if (fileInfo.isInPyTypedPackage && !fileInfo.isStubFile && evaluatorOptions.disableInferenceForPyTypedSources) { - if (resolvedDecl.type !== DeclarationType.Variable) { - return UnknownType.create(); - } + let isUnambiguousType = !fileInfo.isInPyTypedPackage || fileInfo.isStubFile; - // Special-case variables within an enum class. These are effectively - // constants, so we'll treat them as such. - const enclosingClass = ParseTreeUtils.getEnclosingClass(resolvedDecl.node, /* stopAtFunction */ true); - let isEnumValue = false; - if (enclosingClass) { - const classTypeInfo = getTypeOfClass(enclosingClass); - if (classTypeInfo && ClassType.isEnumClass(classTypeInfo.classType)) { - isEnumValue = true; + // If this is a py.typed package, determine if this is a case where an unannotated + // variable is considered "unambiguous" because all type checkers are almost + // guaranteed to infer its type the same. + if (!isUnambiguousType) { + if (resolvedDecl.type === DeclarationType.Variable) { + // Special-case variables within an enum class. These are effectively + // constants, so we'll treat them as unambiguous. + const enclosingClass = ParseTreeUtils.getEnclosingClass(resolvedDecl.node, /* stopAtFunction */ true); + if (enclosingClass) { + const classTypeInfo = getTypeOfClass(enclosingClass); + if (classTypeInfo && ClassType.isEnumClass(classTypeInfo.classType)) { + isUnambiguousType = true; + } } - } - if (!resolvedDecl.isFinal && !resolvedDecl.isConstant && !isEnumValue) { - if (!resolvedDecl.typeAliasName) { - return UnknownType.create(); - } else if (!resolvedDecl.typeAliasAnnotation) { - isSpeculativeTypeAliasFromPyTypedFile = true; + if (resolvedDecl.isFinal || resolvedDecl.isConstant) { + isUnambiguousType = true; } } } @@ -16586,15 +18037,35 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (inferredType && resolvedDecl.typeAliasName) { // If this was a speculative type alias, it becomes a real type alias only - // in the event that its inferred type is instantiable. - if (TypeBase.isInstantiable(inferredType) && !isAnyOrUnknown(inferredType)) { + // in the event that its inferred type is instantiable or explicitly Any + // (but not an ellipsis). + if ( + TypeBase.isInstantiable(inferredType) && + !isUnknown(inferredType) && + !isEllipsisType(inferredType) + ) { inferredType = transformTypeForTypeAlias( inferredType, resolvedDecl.typeAliasName, resolvedDecl.node ); - } else if (isSpeculativeTypeAliasFromPyTypedFile) { - return UnknownType.create(); + + isUnambiguousType = true; + } + } + + // Determine whether we need to mark the annotation as ambiguous. + if (inferredType && fileInfo.isInPyTypedPackage && !fileInfo.isStubFile) { + if (!isUnambiguousType) { + // See if this particular inference can be considered "unambiguous". + // Any symbol that is assigned more than once is considered ambiguous. + if (isUnambiguousInference(symbol, decl, inferredType)) { + isUnambiguousType = true; + } + } + + if (!isUnambiguousType) { + inferredType = TypeBase.cloneForAmbiguousType(inferredType); } } @@ -16604,6 +18075,68 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return undefined; } + // Applies some heuristics to determine whether it's likely that all Python + // type checkers will infer the same type. + function isUnambiguousInference(symbol: Symbol, decl: Declaration, inferredType: Type): boolean { + const nonSlotsDecls = symbol.getDeclarations().filter((decl) => { + return decl.type !== DeclarationType.Variable || !decl.isInferenceAllowedInPyTyped; + }); + + // Any symbol with more than one assignment is considered ambiguous. + if (nonSlotsDecls.length > 1) { + return false; + } + + if (decl.type !== DeclarationType.Variable) { + return false; + } + + // If there are no non-slots declarations, don't mark the inferred type as ambiguous. + if (nonSlotsDecls.length === 0) { + return true; + } + + // TypeVar definitions don't require a declaration. + if (isTypeVar(inferredType)) { + return true; + } + + let assignmentNode: AssignmentNode | undefined; + + const parentNode = decl.node.parent; + if (parentNode) { + // Is this a simple assignment (x = y) or an assignment of an instance variable (self.x = y)? + if (parentNode.nodeType === ParseNodeType.Assignment) { + assignmentNode = parentNode; + } else if ( + parentNode.nodeType === ParseNodeType.MemberAccess && + parentNode.parent?.nodeType === ParseNodeType.Assignment + ) { + assignmentNode = parentNode.parent; + } + } + + if (!assignmentNode) { + return false; + } + + const assignedType = getTypeOfExpression(assignmentNode.rightExpression).type; + + // Assume that literal values will always result in the same inferred type. + if (isClassInstance(assignedType) && isLiteralType(assignedType)) { + return true; + } + + // If the assignment is a simple name corresponding to an unambiguous + // type, we'll assume the resulting variable will receive the same + // unambiguous type. + if (assignmentNode.rightExpression.nodeType === ParseNodeType.Name && !TypeBase.isAmbiguous(assignedType)) { + return true; + } + + return false; + } + // If the specified declaration is an alias declaration that points to a symbol, // it resolves the alias and looks up the symbol, then returns the first declaration // associated with that symbol. It does this recursively if necessary. If a symbol @@ -16652,7 +18185,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ): EffectiveTypeResult { // If there's a declared type, it takes precedence over inferred types. if (symbol.hasTypedDeclarations()) { - const declaredType = getDeclaredTypeOfSymbol(symbol); + const declaredType = getDeclaredTypeOfSymbol(symbol, usageNode); return { type: declaredType || UnknownType.create(), isIncomplete: false, @@ -16676,16 +18209,27 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Infer the type. const typesToCombine: Type[] = []; - const isPrivate = symbol.isPrivateMember(); const decls = symbol.getDeclarations(); const isFinalVar = isFinalVariable(symbol); let isIncomplete = false; let includesVariableDecl = false; let includesSpeculativeResult = false; + let declIndexToConsider: number | undefined; + + // If the caller has requested that we use only the last decl, we + // will use only the last one, but we'll ignore decls that are in + // except clauses. + if (useLastDecl) { + decls.forEach((decl, index) => { + if (!decl.isInExceptSuite) { + declIndexToConsider = index; + } + }); + } + decls.forEach((decl, index) => { - // If useLastDecl is true, consider only the last declaration. - let considerDecl = !useLastDecl || index === decls.length - 1; + let considerDecl = declIndexToConsider === undefined || index === declIndexToConsider; if (usageNode !== undefined) { if (decl.type !== DeclarationType.Alias) { @@ -16701,7 +18245,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (considerDecl) { - const isTypeAlias = isExplicitTypeAliasDeclaration(decl) || isPossibleTypeAliasDeclaration(decl); + const isExplicitTypeAlias = isExplicitTypeAliasDeclaration(decl); + const isTypeAlias = isExplicitTypeAlias || isPossibleTypeAliasDeclaration(decl); // If this is a type alias, evaluate it outside of the recursive symbol // resolution check so we can evaluate the full assignment statement. @@ -16714,13 +18259,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (decl.typeAliasAnnotation) { // Mark "TypeAlias" declaration as accessed. - getTypeOfExpression(decl.typeAliasAnnotation); + getTypeOfAnnotation(decl.typeAliasAnnotation, { + isVariableAnnotation: true, + allowFinal: ParseTreeUtils.isFinalAllowedForAssignmentTarget(decl.node), + allowClassVar: ParseTreeUtils.isClassVarAllowedForAssignmentTarget(decl.node), + }); } } if (pushSymbolResolution(symbol, decl)) { try { - let type = getInferredTypeOfDeclaration(decl); + let type = getInferredTypeOfDeclaration(symbol, decl); if (!popSymbolResolution(symbol)) { isIncomplete = true; @@ -16738,15 +18287,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions isConstant = true; } - // If the symbol is private or constant, we can retain the literal + // If the symbol is constant, we can retain the literal // value. Otherwise, strip literal values to widen the type. - if ( - TypeBase.isInstance(type) && - !isTypeAlias && - !isPrivate && - !isConstant && - !isFinalVar - ) { + if (TypeBase.isInstance(type) && !isExplicitTypeAlias && !isConstant && !isFinalVar) { type = stripLiteralValue(type); } } @@ -16802,19 +18345,43 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }; } - function getDeclaredTypeOfSymbol(symbol: Symbol): Type | undefined { + function getDeclaredTypeOfSymbol(symbol: Symbol, usageNode?: NameNode): Type | undefined { const synthesizedType = symbol.getSynthesizedType(); if (synthesizedType) { return synthesizedType; } - const typedDecls = symbol.getTypedDeclarations(); + let typedDecls = symbol.getTypedDeclarations(); if (typedDecls.length === 0) { // There was no declaration with a defined type. return undefined; } + // If there is more than one typed decl, filter out any that are not + // reachable from the usage node (if specified). This can happen in + // cases where a property symbol is redefined to add a setter, deleter, + // etc. + if (typedDecls.length > 1 && usageNode) { + const filteredTypedDecls = typedDecls.filter((decl) => { + if (decl.type !== DeclarationType.Alias) { + // Is the declaration in the same execution scope as the "usageNode" node? + const usageScope = ParseTreeUtils.getExecutionScopeNode(usageNode); + const declScope = ParseTreeUtils.getExecutionScopeNode(decl.node); + if (usageScope === declScope) { + if (!isFlowPathBetweenNodes(decl.node, usageNode, /* allowSelf */ false)) { + return false; + } + } + } + return true; + }); + + if (filteredTypedDecls.length > 0) { + typedDecls = filteredTypedDecls; + } + } + // Start with the last decl. If that's already being resolved, // use the next-to-last decl, etc. This can happen when resolving // property methods. Often the setter method is defined in reference to @@ -16873,6 +18440,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return ClassType.isEnumClass(classInfo.classType); } + function inferReturnTypeIfNecessary(type: Type) { + if (isFunction(type)) { + getFunctionEffectiveReturnType(type); + } else if (isOverloadedFunction(type)) { + type.overloads.forEach((overload) => { + getFunctionEffectiveReturnType(overload); + }); + } + } + // Returns the return type of the function. If the type is explicitly provided in // a type annotation, that type is returned. If not, an attempt is made to infer // the return type. If a list of args is provided, the inference logic may take @@ -16897,8 +18474,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function getFunctionInferredReturnType(type: FunctionType, args?: ValidateArgTypeParams[]) { let returnType: Type | undefined; - // Don't attempt to infer the return type for a stub file or a py.typed module. - if (FunctionType.isStubDefinition(type) || FunctionType.isPyTypedDefinition(type)) { + // Don't attempt to infer the return type for a stub file. + if (FunctionType.isStubDefinition(type)) { return UnknownType.create(); } @@ -16907,22 +18484,40 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (type.inferredReturnType) { returnType = type.inferredReturnType; } else { - if (type.details.declaration) { + // Don't bother inferring the return type of __init__ because it's + // always None. + if (FunctionType.isInstanceMethod(type) && type.details.name === '__init__') { + returnType = NoneType.createInstance(); + } else if (type.details.declaration) { const functionNode = type.details.declaration.node; - // Temporarily disable speculative mode while we - // lazily evaluate the return type. - disableSpeculativeMode(() => { - returnType = inferFunctionReturnType(functionNode, FunctionType.isAbstractMethod(type)); - }); + // Skip return type inference if we are in "skip unannotated function" mode. + if (evaluatorOptions.analyzeUnannotatedFunctions) { + const codeFlowComplexity = AnalyzerNodeInfo.getCodeFlowComplexity(functionNode); + + // For very complex functions that have no annotated parameter types, + // don't attempt to infer the return type because it can be extremely + // expensive. + const parametersAreAnnotated = + type.details.parameters.length <= 1 || + type.details.parameters.some((param) => param.hasDeclaredType); + + if (parametersAreAnnotated || codeFlowComplexity < maxReturnTypeInferenceCodeFlowComplexity) { + // Temporarily disable speculative mode while we + // lazily evaluate the return type. + disableSpeculativeMode(() => { + returnType = inferFunctionReturnType(functionNode, FunctionType.isAbstractMethod(type)); + }); - // Do we need to wrap this in an awaitable? - if (returnType && FunctionType.isWrapReturnTypeInAwait(type)) { - returnType = createAwaitableReturnType( - functionNode, - returnType, - !!type.details.declaration?.isGenerator - ); + // Do we need to wrap this in an awaitable? + if (returnType && FunctionType.isWrapReturnTypeInAwait(type)) { + returnType = createAwaitableReturnType( + functionNode, + returnType, + !!type.details.declaration?.isGenerator + ); + } + } } } @@ -16938,6 +18533,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // params, try to analyze the function with the provided argument types and // attempt to do a better job at inference. if ( + evaluatorOptions.analyzeUnannotatedFunctions && isPartlyUnknown(returnType) && FunctionType.hasUnannotatedParams(type) && !FunctionType.isStubDefinition(type) && @@ -16946,7 +18542,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { const contextualReturnType = getFunctionInferredReturnTypeUsingArguments(type, args); if (contextualReturnType) { - returnType = removeNoReturnFromUnion(contextualReturnType); + returnType = contextualReturnType; } } @@ -17003,7 +18599,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const prevTypeCache = returnTypeInferenceTypeCache; returnTypeInferenceContextStack.push({ functionNode, - codeFlowAnalyzer: createCodeFlowAnalyzer(), + codeFlowAnalyzer: codeFlowEngine.createCodeFlowAnalyzer(), }); try { @@ -17043,7 +18639,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions paramType = UnknownType.create(); } - writeTypeCache(param.name, paramType, /* isIncomplete */ false); + writeTypeCache(param.name, paramType, EvaluatorFlags.None, /* isIncomplete */ false); } }); @@ -17062,7 +18658,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions contextualReturnType = removeUnbound(contextualReturnType); // Do we need to wrap this in an awaitable? - if (FunctionType.isWrapReturnTypeInAwait(type) && !isNoReturnType(contextualReturnType)) { + if (FunctionType.isWrapReturnTypeInAwait(type) && !isNever(contextualReturnType)) { contextualReturnType = createAwaitableReturnType( functionNode, contextualReturnType, @@ -17103,45 +18699,93 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return UnknownType.create(); } - function getTypeOfMemberInternal(node: ParseNode, member: ClassMember): TypeResult | undefined { + function getTypeOfMemberInternal( + node: ParseNode, + member: ClassMember, + selfClass: ClassType | undefined + ): TypeResult | undefined { if (isInstantiableClass(member.classType)) { const typeResult = getEffectiveTypeOfSymbolForUsage(member.symbol); + if (typeResult) { + // If the type is a function or overloaded function, infer + // and cache the return type if necessary. This needs to be done + // prior to specializing. + inferReturnTypeIfNecessary(typeResult.type); + return { node, - type: partiallySpecializeType(typeResult.type, member.classType), + type: partiallySpecializeType(typeResult.type, member.classType, selfClass), isIncomplete: !!typeResult.isIncomplete, }; } } + return undefined; } + // If treatSourceAsInstantiable is true, we're comparing the class object against the + // protocol. If it's false, we're comparing the class instance against the protocol. function canAssignClassToProtocol( destType: ClassType, srcType: ClassType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap | undefined, flags: CanAssignFlags, - allowMetaclassForProtocols: boolean, + treatSourceAsInstantiable: boolean, recursionCount: number ): boolean { if (recursionCount > maxTypeRecursionCount) { return true; } + recursionCount++; - const destClassFields = destType.details.fields; + // Use a stack of pending protocol class evaluations to detect recursion. + // This can happen when a protocol class refers to itself. + if ( + protocolAssignmentStack.some((entry) => { + return isTypeSame(entry.srcType, srcType) && isTypeSame(entry.destType, destType); + }) + ) { + return true; + } - // Some protocol definitions include recursive references to themselves. - // We need to protect against infinite recursion, so we'll check for that here. - if (ClassType.isSameGenericClass(srcType, destType)) { - if (isTypeSame(srcType, destType, /* ignorePseudoGeneric */ true)) { - return true; - } + protocolAssignmentStack.push({ srcType, destType }); + let isCompatible = true; - return verifyTypeArgumentsAssignable(destType, srcType, diag, typeVarMap, flags, recursionCount + 1); + try { + isCompatible = canAssignClassToProtocolInternal( + destType, + srcType, + diag, + typeVarMap, + flags, + treatSourceAsInstantiable, + recursionCount + ); + } catch (e) { + // We'd normally use "finally" here, but the TS debugger does such + // a poor job dealing with finally, we'll use a catch instead. + protocolAssignmentStack.pop(); + throw e; } + protocolAssignmentStack.pop(); + + return isCompatible; + } + + function canAssignClassToProtocolInternal( + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + typeVarMap: TypeVarMap | undefined, + flags: CanAssignFlags, + treatSourceAsInstantiable: boolean, + recursionCount: number + ): boolean { + const destClassFields = destType.details.fields; + // Strip the type arguments off the dest protocol if they are provided. const genericDestType = ClassType.cloneForSpecialization( destType, @@ -17150,23 +18794,52 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ); const genericDestTypeVarMap = new TypeVarMap(getTypeVarScopeId(destType)); + const selfTypeVarMap = new TypeVarMap(getTypeVarScopeId(destType)); + populateTypeVarMapForSelfType(selfTypeVarMap, destType, srcType); + + // If the source is a TypedDict, use the _TypedDict placeholder class + // instead. We don't want to use the TypedDict members for protocol + // comparison. + if (ClassType.isTypedDictClass(srcType)) { + if (typedDictClassType && isInstantiableClass(typedDictClassType)) { + srcType = typedDictClassType; + } + } + let typesAreConsistent = true; const srcClassTypeVarMap = buildTypeVarMapFromSpecializedClass(srcType); + const canAssignFlags = containsLiteralType(srcType, /* includeTypeArgs */ true) + ? CanAssignFlags.RetainLiteralsForTypeVar + : CanAssignFlags.Default; destClassFields.forEach((symbol, name) => { if (symbol.isClassMember() && !symbol.isIgnoredForProtocolMatch()) { let isMemberFromMetaclass = false; let srcMemberInfo: ClassMember | undefined; - // Look up in the metaclass first if allowed. + // Special-case the `__class_getitem__` for normal protocol comparison. + // This is a convention agreed upon by typeshed maintainers. + if (!treatSourceAsInstantiable && name === '__class_getitem__') { + return; + } + + // Special-case the `__slots__` entry for all protocol comparisons. + // This is a convention agreed upon by typeshed maintainers. + if (name === '__slots__') { + return; + } + + // Look in the metaclass first if we're treating the source as an instantiable class. if ( - allowMetaclassForProtocols && + treatSourceAsInstantiable && srcType.details.effectiveMetaclass && isInstantiableClass(srcType.details.effectiveMetaclass) ) { srcMemberInfo = lookUpClassMember(srcType.details.effectiveMetaclass, name); - srcClassTypeVarMap.addSolveForScope(getTypeVarScopeId(srcType.details.effectiveMetaclass)); - isMemberFromMetaclass = true; + if (srcMemberInfo) { + srcClassTypeVarMap.addSolveForScope(getTypeVarScopeId(srcType.details.effectiveMetaclass)); + isMemberFromMetaclass = true; + } } if (!srcMemberInfo) { @@ -17174,12 +18847,20 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (!srcMemberInfo) { - diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberMissing().format({ name })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberMissing().format({ name })); + } typesAreConsistent = false; } else { let destMemberType = getDeclaredTypeOfSymbol(symbol); if (destMemberType) { - let srcMemberType = getTypeOfMember(srcMemberInfo); + let srcMemberType = isInstantiableClass(srcMemberInfo.classType) + ? partiallySpecializeType( + getEffectiveTypeOfSymbol(srcMemberInfo.symbol), + srcMemberInfo.classType, + srcType + ) + : UnknownType.create(); if (isFunction(srcMemberType) || isOverloadedFunction(srcMemberType)) { if (isMemberFromMetaclass) { @@ -17188,12 +18869,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions srcMemberType, /* memberClass */ undefined, /* errorNode */ undefined, - recursionCount + 1, + recursionCount, /* treatConstructorAsClassMember */ false, srcType ); if (boundSrcFunction) { - srcMemberType = boundSrcFunction; + srcMemberType = removeParamSpecVariadicsFromSignature(boundSrcFunction); } if (isFunction(destMemberType) || isOverloadedFunction(destMemberType)) { @@ -17202,24 +18883,27 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions destMemberType, /* memberClass */ undefined, /* errorNode */ undefined, - recursionCount + 1, + recursionCount, /* treatConstructorAsClassMember */ false, srcType ); if (boundDeclaredType) { - destMemberType = boundDeclaredType; + destMemberType = removeParamSpecVariadicsFromSignature(boundDeclaredType); } } } else if (isInstantiableClass(srcMemberInfo.classType)) { + // Replace any "Self" TypeVar within the dest with the source type. + destMemberType = applySolvedTypeVars(destMemberType, selfTypeVarMap); + const boundSrcFunction = bindFunctionToClassOrObject( - ClassType.cloneAsInstance(srcType), + treatSourceAsInstantiable ? srcType : ClassType.cloneAsInstance(srcType), srcMemberType, srcMemberInfo.classType, /* errorNode */ undefined, - recursionCount + 1 + recursionCount ); if (boundSrcFunction) { - srcMemberType = boundSrcFunction; + srcMemberType = removeParamSpecVariadicsFromSignature(boundSrcFunction); } if (isFunction(destMemberType) || isOverloadedFunction(destMemberType)) { @@ -17228,48 +18912,83 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions destMemberType, srcMemberInfo.classType, /* errorNode */ undefined, - recursionCount + 1 + recursionCount ); if (boundDeclaredType) { - destMemberType = boundDeclaredType; + destMemberType = removeParamSpecVariadicsFromSignature(boundDeclaredType); } } } + } else { + // Replace any "Self" TypeVar within the dest with the source type. + destMemberType = applySolvedTypeVars(destMemberType, selfTypeVarMap); } - const subDiag = diag.createAddendum(); + const subDiag = diag?.createAddendum(); // Properties require special processing. - if ( - isClassInstance(destMemberType) && - ClassType.isPropertyClass(destMemberType) && - isClassInstance(srcMemberType) && - ClassType.isPropertyClass(srcMemberType) - ) { + if (isClassInstance(destMemberType) && ClassType.isPropertyClass(destMemberType)) { if ( - !canAssignProperty( - ClassType.cloneAsInstantiable(destMemberType), - ClassType.cloneAsInstantiable(srcMemberType), - srcType, - subDiag.createAddendum(), - genericDestTypeVarMap, - recursionCount + 1 - ) + isClassInstance(srcMemberType) && + ClassType.isPropertyClass(srcMemberType) && + !treatSourceAsInstantiable ) { - subDiag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); - typesAreConsistent = false; + if ( + !canAssignProperty( + evaluatorInterface, + ClassType.cloneAsInstantiable(destMemberType), + ClassType.cloneAsInstantiable(srcMemberType), + srcType, + subDiag?.createAddendum(), + genericDestTypeVarMap, + recursionCount + ) + ) { + if (subDiag) { + subDiag.addMessage( + Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name }) + ); + } + typesAreConsistent = false; + } + } else { + // Extract the property type from the property class. + const getterType = getGetterTypeFromProperty( + destMemberType, + /* inferTypeIfNeeded */ true + ); + if ( + !getterType || + !canAssignType( + getterType, + srcMemberType, + subDiag?.createAddendum(), + genericDestTypeVarMap, + canAssignFlags, + recursionCount + ) + ) { + if (subDiag) { + subDiag.addMessage( + Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name }) + ); + } + typesAreConsistent = false; + } } } else if ( !canAssignType( destMemberType, srcMemberType, - subDiag.createAddendum(), + subDiag?.createAddendum(), genericDestTypeVarMap, - CanAssignFlags.Default, - recursionCount + 1 + canAssignFlags, + recursionCount ) ) { - subDiag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + if (subDiag) { + subDiag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + } typesAreConsistent = false; } @@ -17282,20 +19001,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isDestFinal !== isSrcFinal) { if (isDestFinal) { - subDiag.addMessage( - Localizer.DiagnosticAddendum.memberIsFinalInProtocol().format({ name }) - ); + if (subDiag) { + subDiag.addMessage( + Localizer.DiagnosticAddendum.memberIsFinalInProtocol().format({ name }) + ); + } } else { - subDiag.addMessage( - Localizer.DiagnosticAddendum.memberIsNotFinalInProtocol().format({ name }) - ); + if (subDiag) { + subDiag.addMessage( + Localizer.DiagnosticAddendum.memberIsNotFinalInProtocol().format({ name }) + ); + } } typesAreConsistent = false; } } if (symbol.isClassVar() && !srcMemberInfo.symbol.isClassMember()) { - diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberClassVar().format({ name })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberClassVar().format({ name })); + } typesAreConsistent = false; } } @@ -17314,11 +19039,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !canAssignClassToProtocol( specializedBaseClass, srcType, - diag.createAddendum(), + diag?.createAddendum(), typeVarMap, flags, - allowMetaclassForProtocols, - recursionCount + 1 + treatSourceAsInstantiable, + recursionCount ) ) { typesAreConsistent = false; @@ -17339,7 +19064,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { typesAreConsistent = false; @@ -17352,7 +19077,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function canAssignModuleToProtocol( destType: ClassType, srcType: ModuleType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap | undefined, flags: CanAssignFlags, recursionCount: number @@ -17360,6 +19085,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (recursionCount > maxTypeRecursionCount) { return true; } + recursionCount++; let typesAreConsistent = true; const destClassFields = destType.details.fields; @@ -17377,7 +19103,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const memberSymbol = srcType.fields.get(name); if (!memberSymbol) { - diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberMissing().format({ name })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.protocolMemberMissing().format({ name })); + } typesAreConsistent = false; } else { let declaredType = getDeclaredTypeOfSymbol(symbol); @@ -17391,7 +19119,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions declaredType, destType, /* errorNode */ undefined, - recursionCount + 1 + recursionCount ); if (boundDeclaredType) { declaredType = boundDeclaredType; @@ -17399,19 +19127,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const subDiag = diag.createAddendum(); + const subDiag = diag?.createAddendum(); if ( !canAssignType( declaredType, srcMemberType, - subDiag.createAddendum(), + subDiag?.createAddendum(), genericDestTypeVarMap, CanAssignFlags.Default, - recursionCount + 1 + recursionCount ) ) { - subDiag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + if (subDiag) { + subDiag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + } typesAreConsistent = false; } } @@ -17431,10 +19161,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !canAssignModuleToProtocol( specializedBaseClass, srcType, - diag.createAddendum(), + diag?.createAddendum(), typeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { typesAreConsistent = false; @@ -17453,95 +19183,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions destType, specializedSrcProtocol, diag, - typeVarMap, - flags, - recursionCount - ) - ) { - typesAreConsistent = false; - } - } - - return typesAreConsistent; - } - - function canAssignProperty( - destPropertyType: ClassType, - srcPropertyType: ClassType, - srcClass: ClassType, - diag: DiagnosticAddendum, - typeVarMap?: TypeVarMap, - recursionCount = 0 - ): boolean { - const objectToBind = ClassType.cloneAsInstance(srcClass); - let isAssignable = true; - const accessors: { name: string; missingDiagMsg: () => string; incompatibleDiagMsg: () => string }[] = [ - { - name: 'fget', - missingDiagMsg: Localizer.DiagnosticAddendum.missingGetter, - incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleGetter, - }, - { - name: 'fset', - missingDiagMsg: Localizer.DiagnosticAddendum.missingSetter, - incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleSetter, - }, - { - name: 'fdel', - missingDiagMsg: Localizer.DiagnosticAddendum.missingDeleter, - incompatibleDiagMsg: Localizer.DiagnosticAddendum.incompatibleDeleter, - }, - ]; - - accessors.forEach((accessorInfo) => { - const destAccessSymbol = destPropertyType.details.fields.get(accessorInfo.name); - const destAccessType = destAccessSymbol ? getDeclaredTypeOfSymbol(destAccessSymbol) : undefined; - - if (destAccessType && isFunction(destAccessType)) { - const srcAccessSymbol = srcPropertyType.details.fields.get(accessorInfo.name); - const srcAccessType = srcAccessSymbol ? getDeclaredTypeOfSymbol(srcAccessSymbol) : undefined; - - if (!srcAccessType || !isFunction(srcAccessType)) { - diag.addMessage(accessorInfo.missingDiagMsg()); - isAssignable = false; - return; - } - - const boundDestAccessType = bindFunctionToClassOrObject( - objectToBind, - destAccessType, - /* memberClass */ undefined, - /* errorNode */ undefined, - recursionCount + 1 - ); - const boundSrcAccessType = bindFunctionToClassOrObject( - objectToBind, - srcAccessType, - /* memberClass */ undefined, - /* errorNode */ undefined, - recursionCount + 1 - ); - - if ( - !boundDestAccessType || - !boundSrcAccessType || - !canAssignType( - boundDestAccessType, - boundSrcAccessType, - diag.createAddendum(), - typeVarMap, - CanAssignFlags.Default, - recursionCount + 1 - ) - ) { - diag.addMessage('getter type is incompatible'); - isAssignable = false; - return; - } + typeVarMap, + flags, + recursionCount + ) + ) { + typesAreConsistent = false; } - }); + } - return isAssignable; + return typesAreConsistent; } // This function is used to validate the variance of type variables @@ -17575,12 +19226,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { if ( !canAssignProperty( + evaluatorInterface, ClassType.cloneAsInstantiable(destMemberType), ClassType.cloneAsInstantiable(srcMemberType), srcType, diag, typeVarMap, - recursionCount + 1 + recursionCount ) ) { isAssignable = false; @@ -17593,9 +19245,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions primaryDecl?.type === DeclarationType.Variable && !primaryDecl.isFinal ? CanAssignFlags.EnforceInvariance : CanAssignFlags.Default; - if ( - !canAssignType(destMemberType, srcMemberType, diag, typeVarMap, flags, recursionCount + 1) - ) { + if (!canAssignType(destMemberType, srcMemberType, diag, typeVarMap, flags, recursionCount)) { isAssignable = false; } } @@ -17614,9 +19264,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { const specializedDestBaseClass = specializeForBaseClass(destType, baseClass); const specializedSrcBaseClass = specializeForBaseClass(srcType, baseClass); - if ( - !canAssignProtocolClassToSelf(specializedDestBaseClass, specializedSrcBaseClass, recursionCount + 1) - ) { + if (!canAssignProtocolClassToSelf(specializedDestBaseClass, specializedSrcBaseClass, recursionCount)) { isAssignable = false; } } @@ -17628,22 +19276,47 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function canAssignClass( destType: ClassType, srcType: ClassType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap | undefined, flags: CanAssignFlags, recursionCount: number, - reportErrorsUsingObjType: boolean, - allowMetaclassForProtocols = false + reportErrorsUsingObjType: boolean ): boolean { // Handle typed dicts. They also use a form of structural typing for type // checking, as defined in PEP 589. if (ClassType.isTypedDictClass(destType) && ClassType.isTypedDictClass(srcType)) { - return canAssignTypedDict(evaluatorInterface, destType, srcType, diag, recursionCount); + if (!canAssignTypedDict(evaluatorInterface, destType, srcType, diag, recursionCount)) { + return false; + } + + if (ClassType.isFinal(destType) !== ClassType.isFinal(srcType)) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typedDictFinalMismatch().format({ + sourceType: printType(convertToInstance(srcType)), + destType: printType(convertToInstance(destType)), + }) + ); + } + return false; + } + + // If invariance is being enforced, the two TypedDicts must be assignable to each other. + if ((flags & CanAssignFlags.EnforceInvariance) !== 0 && !ClassType.isSameGenericClass(destType, srcType)) { + return canAssignTypedDict(evaluatorInterface, srcType, destType, /* diag */ undefined, recursionCount); + } + + return true; } // Handle special-case type promotions. const promotionList = typePromotions.get(destType.details.fullName); - if (promotionList && promotionList.some((srcName) => srcName === srcType.details.fullName)) { + if ( + promotionList && + promotionList.some((srcName) => + srcType.details.mro.some((mroClass) => isClass(mroClass) && srcName === mroClass.details.fullName) + ) + ) { if ((flags & CanAssignFlags.EnforceInvariance) === 0) { return true; } @@ -17657,24 +19330,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Use the slow path for protocols if the dest doesn't explicitly // derive from the source. We also need to use this path if we're // testing to see if the metaclass matches the protocol. - if (ClassType.isProtocolClass(destType) && (!isDerivedFrom || allowMetaclassForProtocols)) { + if (ClassType.isProtocolClass(destType) && !isDerivedFrom) { if ( !canAssignClassToProtocol( destType, srcType, - diag.createAddendum(), + diag?.createAddendum(), typeVarMap, flags, - allowMetaclassForProtocols, - recursionCount + 1 + /* treatSourceAsInstantiable */ false, + recursionCount ) ) { - diag.addMessage( - Localizer.DiagnosticAddendum.protocolIncompatible().format({ - sourceType: printType(convertToInstance(srcType)), - destType: printType(convertToInstance(destType)), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.protocolIncompatible().format({ + sourceType: printType(convertToInstance(srcType)), + destType: printType(convertToInstance(destType)), + }) + ); + } return false; } @@ -17692,7 +19367,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - recursionCount + 1 + recursionCount ); } } @@ -17716,22 +19391,152 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions srcErrorTypeText = srcType.details.fullName; } - diag.addMessage( - Localizer.DiagnosticAddendum.typeIncompatible().format({ - sourceType: srcErrorTypeText, - destType: destErrorTypeText, - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeIncompatible().format({ + sourceType: srcErrorTypeText, + destType: destErrorTypeText, + }) + ); + } return false; } + function canAssignTupleTypeArgs( + destType: ClassType, + srcType: ClassType, + diag: DiagnosticAddendum | undefined, + typeVarMap: TypeVarMap | undefined, + flags: CanAssignFlags, + recursionCount: number + ) { + const destTypeArgs = [...(destType.tupleTypeArguments ?? [])]; + const srcTypeArgs = [...(srcType.tupleTypeArguments ?? [])]; + + const destVariadicIndex = destTypeArgs.findIndex((t) => isVariadicTypeVar(t.type)); + const destUnboundedIndex = destTypeArgs.findIndex((t) => t.isUnbounded); + const srcUnboundedIndex = srcTypeArgs.findIndex((t) => t.isUnbounded); + + // If the source is unbounded, expand the unbounded argument to try + // to make the source and dest arg counts match. + if (srcUnboundedIndex >= 0) { + const requiredSrcArgCount = + destVariadicIndex >= 0 || destUnboundedIndex >= 0 ? destTypeArgs.length - 1 : destTypeArgs.length; + const typeToReplicate = srcTypeArgs.length > 0 ? srcTypeArgs[srcUnboundedIndex].type : AnyType.create(); + + while (srcTypeArgs.length < requiredSrcArgCount) { + srcTypeArgs.splice(srcUnboundedIndex, 0, { type: typeToReplicate, isUnbounded: false }); + } + } + + if (destVariadicIndex >= 0 && srcUnboundedIndex >= 0) { + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.typeVarTupleRequiresKnownLength()); + } + return false; + } + + // If the dest is unbounded or contains a variadic, determine which + // source args map to the unbounded or variadic arg. + if (destUnboundedIndex >= 0 || destVariadicIndex >= 0) { + // If there's a variadic within the destination, package up the corresponding + // source arguments into a tuple. + const srcArgsToCapture = srcTypeArgs.length - destTypeArgs.length + 1; + if (srcArgsToCapture >= 0) { + if (destVariadicIndex >= 0) { + if (tupleClassType && isInstantiableClass(tupleClassType)) { + const removedArgs = srcTypeArgs.splice(destVariadicIndex, srcArgsToCapture); + + // Package up the remaining type arguments into a tuple object. + const variadicTuple = convertToInstance( + specializeTupleClass( + tupleClassType, + removedArgs.map((typeArg) => { + return { type: stripLiteralValue(typeArg.type), isUnbounded: false }; + }), + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true, + /* isUnpackedTuple */ true + ) + ); + srcTypeArgs.splice(destVariadicIndex, 0, { + type: variadicTuple, + isUnbounded: false, + }); + } + } else { + const removedArgs = srcTypeArgs.splice(destUnboundedIndex, srcArgsToCapture); + srcTypeArgs.splice(destUnboundedIndex, 0, { + type: removedArgs.length > 0 ? combineTypes(removedArgs.map((t) => t.type)) : AnyType.create(), + isUnbounded: false, + }); + } + } + } + + if (srcTypeArgs.length === destTypeArgs.length) { + for (let argIndex = 0; argIndex < srcTypeArgs.length; argIndex++) { + const entryDiag = diag?.createAddendum(); + + if ( + !canAssignType( + destTypeArgs[argIndex].type, + srcTypeArgs[argIndex].type, + entryDiag?.createAddendum(), + typeVarMap, + flags | CanAssignFlags.RetainLiteralsForTypeVar, + recursionCount + ) + ) { + if (entryDiag) { + entryDiag.addMessage( + Localizer.DiagnosticAddendum.tupleEntryTypeMismatch().format({ + entry: argIndex + 1, + }) + ); + } + return false; + } + } + } else { + if (srcUnboundedIndex >= 0) { + // PEP 646 allows an indeterminate tuple type to be assigned to + // a determinate tuple type if it's associated with a TypeVarTuple. + if (!destType.isUnpacked) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.tupleSizeMismatchIndeterminate().format({ + expected: destTypeArgs.length, + }) + ); + } + + return false; + } + } else { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.tupleSizeMismatch().format({ + expected: destTypeArgs.length, + received: srcTypeArgs.length, + }) + ); + } + + return false; + } + } + + return true; + } + // Determines whether the specified type can be assigned to the // specified inheritance chain, taking into account its type arguments. function canAssignClassWithTypeArgs( destType: ClassType, srcType: ClassType, inheritanceChain: InheritanceChain, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap | undefined, flags: CanAssignFlags, recursionCount: number @@ -17765,128 +19570,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions curSrcType = specializeForBaseClass(curSrcType, ancestorType); } - // Do we need to do special-case processing for various built-in classes? - if (ancestorIndex === 0) { - // Handle built-in types that support arbitrary numbers - // of type parameters like Tuple. - if (ClassType.isTupleClass(destType)) { - if (destType.tupleTypeArguments && curSrcType.tupleTypeArguments) { - const destTypeArgs = destType.tupleTypeArguments; - let destArgCount = destTypeArgs.length; - - const isDestHomogenousType = destArgCount === 2 && isEllipsisType(destTypeArgs[1]); - if (isDestHomogenousType) { - destArgCount = 1; - } - - const isDestVariadic = destArgCount > 0 && isVariadicTypeVar(destTypeArgs[destArgCount - 1]); - - const srcTypeArgs = curSrcType.tupleTypeArguments; - let srcArgCount = srcTypeArgs.length; - const isSrcHomogeneousType = srcArgCount === 2 && isEllipsisType(srcTypeArgs[1]); - if (isSrcHomogeneousType) { - srcArgCount = 1; - } - - if (isDestVariadic && isSrcHomogeneousType) { - diag.addMessage(Localizer.DiagnosticAddendum.typeVarTupleRequiresKnownLength()); - return false; - } - - if ( - (srcTypeArgs.length === destArgCount && !isSrcHomogeneousType) || - isDestHomogenousType || - isDestVariadic - ) { - const maxArgCount = Math.max(destArgCount, srcArgCount); - for (let argIndex = 0; argIndex < maxArgCount; argIndex++) { - let srcTypeArgType: Type; - let destTypeArgType: Type; - let isSourceTypeMissing = false; - - if (isSrcHomogeneousType) { - srcTypeArgType = srcTypeArgs[0]; - } else if (argIndex < srcTypeArgs.length) { - srcTypeArgType = srcTypeArgs[argIndex]; - } else { - srcTypeArgType = AnyType.create(); - if (destType.isTypeArgumentExplicit) { - if (isDestVariadic && argIndex < destArgCount - 1 && !isDestHomogenousType) { - isSourceTypeMissing = true; - } - } - } - - let movePastSourceArgs = false; - if (isDestVariadic && argIndex >= destArgCount - 1) { - destTypeArgType = destTypeArgs[destArgCount - 1]; - if (tupleClassType && isInstantiableClass(tupleClassType)) { - // Package up the remaining type arguments into a tuple object. - const remainingSrcTypeArgs = srcTypeArgs.slice(argIndex); - srcTypeArgType = convertToInstance( - specializeTupleClass( - tupleClassType, - remainingSrcTypeArgs.map((type) => stripLiteralValue(type)), - /* isTypeArgumentExplicit */ true, - /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true - ) - ); - movePastSourceArgs = true; - } - } else if (isDestHomogenousType) { - destTypeArgType = destTypeArgs[0]; - } else { - destTypeArgType = - argIndex < destTypeArgs.length ? destTypeArgs[argIndex] : AnyType.create(); - } - - const entryDiag = diag.createAddendum(); - - if ( - isSourceTypeMissing || - !canAssignType( - destTypeArgType, - srcTypeArgType, - entryDiag.createAddendum(), - curTypeVarMap, - flags | CanAssignFlags.RetainLiteralsForTypeVar, - recursionCount + 1 - ) - ) { - entryDiag.addMessage( - Localizer.DiagnosticAddendum.tupleEntryTypeMismatch().format({ - entry: argIndex + 1, - }) - ); - return false; - } - - if (movePastSourceArgs) { - argIndex = srcArgCount; - } - } - } else { - if (isSrcHomogeneousType) { - diag.addMessage( - Localizer.DiagnosticAddendum.tupleSizeMismatchIndeterminate().format({ - expected: destArgCount, - }) - ); - } else { - diag.addMessage( - Localizer.DiagnosticAddendum.tupleSizeMismatch().format({ - expected: destArgCount, - received: srcTypeArgs.length, - }) - ); - } - return false; - } - } - - return true; - } + // Handle built-in types that support arbitrary numbers + // of type parameters like Tuple. + if (ancestorIndex === 0 && destType.tupleTypeArguments && curSrcType.tupleTypeArguments) { + return canAssignTupleTypeArgs(destType, curSrcType, diag, curTypeVarMap, flags, recursionCount); } // If there are no type parameters on this class, we're done. @@ -17937,14 +19624,6 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const typeArgType = i < srcTypeArgs.length ? srcTypeArgs[i] : UnknownType.create(); typeVarMap.setTypeVarType(destType.details.typeParameters[i], undefined, typeArgType); } - - if ( - ClassType.isTupleClass(curSrcType) && - curSrcType.tupleTypeArguments && - destType.details.typeParameters.length >= 1 - ) { - typeVarMap.setVariadicTypeVar(destType.details.typeParameters[0], curSrcType.tupleTypeArguments); - } } return true; @@ -17970,7 +19649,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function verifyTypeArgumentsAssignable( destType: ClassType, srcType: ClassType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap | undefined, flags: CanAssignFlags, recursionCount: number @@ -17988,8 +19667,8 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (ClassType.isTupleClass(destType)) { - destTypeArgs = destType.tupleTypeArguments || []; - srcTypeArgs = srcType.tupleTypeArguments; + destTypeArgs = destType.tupleTypeArguments?.map((t) => t.type) ?? []; + srcTypeArgs = srcType.tupleTypeArguments?.map((t) => t.type); } else { destTypeArgs = destType.typeArguments!; srcTypeArgs = srcType.typeArguments; @@ -18015,17 +19694,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignmentDiag, typeVarMap, flags | CanAssignFlags.RetainLiteralsForTypeVar, - recursionCount + 1 + recursionCount ) ) { if (destTypeParam) { - const childDiag = diag.createAddendum(); - childDiag.addMessage( - Localizer.DiagnosticAddendum.typeVarIsCovariant().format({ - name: TypeVarType.getReadableName(destTypeParam), - }) - ); - childDiag.addAddendum(assignmentDiag); + if (diag) { + const childDiag = diag.createAddendum(); + childDiag.addMessage( + Localizer.DiagnosticAddendum.typeVarIsCovariant().format({ + name: TypeVarType.getReadableName(destTypeParam), + }) + ); + childDiag.addAddendum(assignmentDiag); + } } return false; } @@ -18037,16 +19718,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions assignmentDiag, typeVarMap, (flags ^ CanAssignFlags.ReverseTypeVarMatching) | CanAssignFlags.RetainLiteralsForTypeVar, - recursionCount + 1 + recursionCount ) ) { - const childDiag = diag.createAddendum(); - childDiag.addMessage( - Localizer.DiagnosticAddendum.typeVarIsContravariant().format({ - name: TypeVarType.getReadableName(destTypeParam), - }) - ); - childDiag.addAddendum(assignmentDiag); + if (diag) { + const childDiag = diag.createAddendum(); + childDiag.addMessage( + Localizer.DiagnosticAddendum.typeVarIsContravariant().format({ + name: TypeVarType.getReadableName(destTypeParam), + }) + ); + childDiag.addAddendum(assignmentDiag); + } return false; } } else { @@ -18056,18 +19739,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions srcTypeArg, assignmentDiag, typeVarMap, - flags | CanAssignFlags.EnforceInvariance | CanAssignFlags.RetainLiteralsForTypeVar, - recursionCount + 1 + flags | CanAssignFlags.EnforceInvariance, + recursionCount ) ) { - const childDiag = diag.createAddendum(); - childDiag.addMessage( - Localizer.DiagnosticAddendum.typeVarIsInvariant().format({ - name: TypeVarType.getReadableName(destTypeParam), - }) - ); - childDiag.addAddendum(assignmentDiag); - return false; + // Don't report errors with type variables in "pseudo-random" + // classes since these type variables are not real. + if (!ClassType.isPseudoGenericClass(destType)) { + if (diag) { + const childDiag = diag.createAddendum(); + childDiag.addMessage( + Localizer.DiagnosticAddendum.typeVarIsInvariant().format({ + name: TypeVarType.getReadableName(destTypeParam), + }) + ); + childDiag.addAddendum(assignmentDiag); + } + return false; + } } } } @@ -18085,7 +19774,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function canAssignTypeToTypeVar( destType: TypeVarType, srcType: Type, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap, flags = CanAssignFlags.Default, recursionCount = 0 @@ -18107,95 +19796,52 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } - isTypeVarInScope = false; - if (!destType.details.isSynthesized) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); - return false; + // If we're in "reverse type var matching" mode, don't generate + // an error in this path. + if ((flags & CanAssignFlags.IgnoreTypeVarScope) !== 0) { + return true; } - } - - if (destType.details.isParamSpec) { - if (isTypeVar(srcType) && srcType.details.isParamSpec) { - const existingEntry = typeVarMap.getParamSpec(destType); - if (existingEntry) { - if (!existingEntry.concrete && existingEntry.paramSpec) { - // If there's an existing entry that matches, that's fine. - if (isTypeSame(existingEntry.paramSpec, srcType)) { - return true; - } - } - } else { - if (!typeVarMap.isLocked() && isTypeVarInScope) { - typeVarMap.setParamSpec(destType, { paramSpec: srcType }); - } - return true; - } - } else if (isFunction(srcType)) { - const functionSrcType = srcType; - const parameters = srcType.details.parameters.map((p, index) => { - const paramSpecEntry: ParamSpecEntry = { - category: p.category, - name: p.name, - hasDefault: !!p.hasDefault, - type: FunctionType.getEffectiveParameterType(functionSrcType, index), - }; - return paramSpecEntry; - }); - const existingEntry = typeVarMap.getParamSpec(destType); - if (existingEntry) { - // Verify that the existing entry matches the new entry. - if ( - existingEntry.concrete && - existingEntry.concrete.parameters.length === parameters.length && - !existingEntry.concrete.parameters.some((existingParam, index) => { - const newParam = parameters[index]; - return ( - existingParam.category !== newParam.category || - existingParam.name !== newParam.name || - existingParam.hasDefault !== newParam.hasDefault || - !isTypeSame(existingParam.type, newParam.type) - ); + isTypeVarInScope = false; + if (!destType.details.isSynthesized) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), }) - ) { - return true; - } - } else { - if (!typeVarMap.isLocked() && isTypeVarInScope) { - typeVarMap.setParamSpec(destType, { concrete: { parameters, flags: srcType.details.flags } }); - } - return true; + ); } + return false; } + } - diag.addMessage( - Localizer.DiagnosticAddendum.typeParamSpec().format({ - type: printType(srcType), - name: destType.details.name, - }) + if ((flags & CanAssignFlags.SkipSolveTypeVars) !== 0) { + return canAssignType( + makeTopLevelTypeVarsConcrete(destType), + makeTopLevelTypeVarsConcrete(srcType), + diag, + /* typeVarMap */ undefined, + flags, + recursionCount ); - return false; } - if (destType.details.isVariadic) { - const isVariadicTuple = - isClassInstance(srcType) && isTupleClass(srcType) && !!srcType.isTupleForUnpackedVariadicTypeVar; + if (destType.details.isParamSpec) { + return canAssignTypeToParamSpec(destType, srcType, diag, typeVarMap, recursionCount); + } - if (!isVariadicTypeVar(srcType) && !isVariadicTuple) { + if (destType.details.isVariadic) { + if (!isUnpacked(srcType)) { if (tupleClassType && isInstantiableClass(tupleClassType)) { // Package up the type into a tuple. srcType = convertToInstance( specializeTupleClass( tupleClassType, - [srcType], + [{ type: srcType, isUnbounded: false }], /* isTypeArgumentExplicit */ true, /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true + /* isUnpackedTuple */ true ) ); } else { @@ -18204,6 +19850,16 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } + // If we're attempting to assign `type` to Type[T], transform `type` into `Type[Any]`. + if ( + TypeBase.isInstantiable(destType) && + isClassInstance(srcType) && + ClassType.isBuiltIn(srcType, 'type') && + !srcType.typeArguments + ) { + srcType = AnyType.create(); + } + const curEntry = typeVarMap.getTypeVar(destType); const curNarrowTypeBound = curEntry?.narrowBound; const curWideTypeBound = curEntry?.wideBound ?? destType.details.boundType; @@ -18221,13 +19877,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( destType, concreteSrcType, - new DiagnosticAddendum(), + /* diag */ undefined, new TypeVarMap(destType.scopeId), /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { constrainedType = srcType; + + // If the source and dest are both instantiables (type[T]), then + // we need to convert to an instance (T) for the + if (TypeBase.isInstantiable(srcType)) { + constrainedType = convertToInstance(srcType); + } } } else { let isCompatible = true; @@ -18254,10 +19916,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( adjustedConstraint, srcSubtype, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { if ( @@ -18265,10 +19927,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( constrainedSubtype, adjustedConstraint, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { constrainedSubtype = addConditionToType(constraint, getTypeCondition(srcSubtype)); @@ -18316,10 +19978,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return canAssignType( adjustedConstraint, concreteSrcType, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, /* flags */ undefined, - recursionCount + 1 + recursionCount ); }); } @@ -18329,12 +19991,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // or there were multiple types that were assignable and they // are not conditional, it's an error. if (!constrainedType) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeConstrainedTypeVar().format({ - type: printType(srcType), - name: destType.details.name, - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeConstrainedTypeVar().format({ + type: printType(srcType), + name: destType.details.name, + }) + ); + } return false; } @@ -18343,10 +20007,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !canAssignType( curNarrowTypeBound, constrainedType, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { // Handle the case where one of the constrained types is a wider @@ -18356,22 +20020,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( constrainedType, curNarrowTypeBound, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { if (!typeVarMap.isLocked() && isTypeVarInScope) { typeVarMap.setTypeVarType(destType, constrainedType); } } else { - diag.addMessage( - Localizer.DiagnosticAddendum.typeConstrainedTypeVar().format({ - type: printType(constrainedType), - name: printType(curNarrowTypeBound), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeConstrainedTypeVar().format({ + type: printType(constrainedType), + name: printType(curNarrowTypeBound), + }) + ); + } return false; } } @@ -18388,7 +20054,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Handle the unconstrained (but possibly bound) case. let newNarrowTypeBound = curNarrowTypeBound; let newWideTypeBound = curWideTypeBound; - const diagAddendum = new DiagnosticAddendum(); + const diagAddendum = diag ? new DiagnosticAddendum() : undefined; // Strip literals if the existing value contains no literals. This allows // for explicit (but no implicit) literal specialization of a generic class. @@ -18400,15 +20066,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions let adjSrcType = retainLiterals ? srcType : stripLiteralValue(srcType); if (TypeBase.isInstantiable(destType)) { - if (TypeBase.isInstantiable(adjSrcType)) { + if (isEffectivelyInstantiable(adjSrcType)) { adjSrcType = convertToInstance(adjSrcType); } else { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(adjSrcType), - destType: printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(adjSrcType), + destType: printType(destType), + }) + ); + } return false; } } @@ -18417,15 +20085,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Update the wide type bound. if (!curWideTypeBound) { newWideTypeBound = adjSrcType; - } else if (!isTypeSame(curWideTypeBound, adjSrcType)) { + } else if ( + !isTypeSame( + curWideTypeBound, + adjSrcType, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { if ( canAssignType( curWideTypeBound, makeTopLevelTypeVarsConcrete(adjSrcType), diagAddendum, /* typeVarMap */ undefined, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { // The srcType is narrower than the current wideTypeBound, so replace it. @@ -18436,17 +20112,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions curWideTypeBound, diagAddendum, /* typeVarMap */ undefined, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(adjSrcType), - destType: printType(curWideTypeBound), - }) - ); - diag.addAddendum(diagAddendum); + if (diag && diagAddendum) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(adjSrcType), + destType: printType(curWideTypeBound), + }) + ); + diag.addAddendum(diagAddendum); + } return false; } } @@ -18457,19 +20135,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !canAssignType( newWideTypeBound!, curNarrowTypeBound, - new DiagnosticAddendum(), + /* diag */ undefined, /* typeVarMap */ undefined, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(adjSrcType), - destType: printType(curNarrowTypeBound), - }) - ); - diag.addAddendum(diagAddendum); + if (diag && diagAddendum) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(adjSrcType), + destType: printType(curNarrowTypeBound), + }) + ); + diag.addAddendum(diagAddendum); + } return false; } } @@ -18477,22 +20157,29 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (!curNarrowTypeBound) { // There was previously no narrow bound. We've now established one. newNarrowTypeBound = adjSrcType; - } else if (!isTypeSame(curNarrowTypeBound, adjSrcType)) { - if ( - canAssignType(curNarrowTypeBound, adjSrcType, diagAddendum, typeVarMap, flags, recursionCount + 1) - ) { + } else if ( + !isTypeSame( + curNarrowTypeBound, + adjSrcType, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { + if (canAssignType(curNarrowTypeBound, adjSrcType, diagAddendum, typeVarMap, flags, recursionCount)) { // No need to widen. Stick with the existing type unless it's unknown // or partly unknown, in which case we'll replace it with a known type // as long as it doesn't violate the current narrow bound. if ( isPartlyUnknown(curNarrowTypeBound) && + !isUnknown(adjSrcType) && canAssignType( adjSrcType, curNarrowTypeBound, - new DiagnosticAddendum(), + /* diag */ undefined, typeVarMap, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { newNarrowTypeBound = adjSrcType; @@ -18502,23 +20189,27 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } else { // We need to widen the type. if (typeVarMap.isLocked() || isTypeVar(adjSrcType)) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(curNarrowTypeBound), - destType: printType(adjSrcType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(curNarrowTypeBound), + destType: printType(adjSrcType), + }) + ); + } return false; } // Don't allow widening for variadic type variables. if (isVariadicTypeVar(destType)) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(curNarrowTypeBound), - destType: printType(adjSrcType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(curNarrowTypeBound), + destType: printType(adjSrcType), + }) + ); + } return false; } @@ -18526,10 +20217,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( adjSrcType, curNarrowTypeBound, - new DiagnosticAddendum(), + /* diag */ undefined, typeVarMap, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { newNarrowTypeBound = adjSrcType; @@ -18555,25 +20246,52 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Make sure we don't exceed the wide type bound. - if (curWideTypeBound) { + if (curWideTypeBound && newNarrowTypeBound) { if ( - !isTypeSame(curWideTypeBound, newNarrowTypeBound!) && - !canAssignType( - makeTopLevelTypeVarsConcrete(curWideTypeBound), - newNarrowTypeBound!, - new DiagnosticAddendum(), - typeVarMap, - /* flags */ undefined, - recursionCount + 1 + !isTypeSame( + curWideTypeBound, + newNarrowTypeBound, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount ) ) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(curWideTypeBound), - destType: printType(adjSrcType), - }) - ); - return false; + let makeConcrete = true; + + // Handle the case where the wide type is type T and the narrow type + // is type T | . In this case, it violates the + // wide type bound. + if (isTypeVar(curWideTypeBound)) { + if (isTypeSame(newNarrowTypeBound, curWideTypeBound)) { + makeConcrete = false; + } else if ( + isUnion(newNarrowTypeBound) && + newNarrowTypeBound.subtypes.some((subtype) => isTypeSame(subtype, curWideTypeBound)) + ) { + makeConcrete = false; + } + } + + if ( + !canAssignType( + makeConcrete ? makeTopLevelTypeVarsConcrete(curWideTypeBound) : curWideTypeBound, + newNarrowTypeBound, + /* diag */ undefined, + typeVarMap, + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount + ) + ) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(adjSrcType), + destType: printType(curWideTypeBound), + }) + ); + } + return false; + } } } } @@ -18593,22 +20311,24 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions !canAssignType( destType.details.boundType, makeTopLevelTypeVarsConcrete(updatedType), - diag.createAddendum(), + diag?.createAddendum(), typeVarMap, - /* flags */ undefined, - recursionCount + 1 + flags & CanAssignFlags.IgnoreTypeVarScope, + recursionCount ) ) { // Avoid adding a message that will confuse users if the TypeVar was // synthesized for internal purposes. if (!destType.details.isSynthesized) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeBound().format({ - sourceType: printType(updatedType), - destType: printType(destType.details.boundType), - name: TypeVarType.getReadableName(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeBound().format({ + sourceType: printType(updatedType), + destType: printType(destType.details.boundType), + name: TypeVarType.getReadableName(destType), + }) + ); + } } return false; } @@ -18621,6 +20341,106 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } + function canAssignTypeToParamSpec( + destType: TypeVarType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + typeVarMap: TypeVarMap, + recursionCount = 0 + ) { + if (isTypeVar(srcType) && srcType.details.isParamSpec) { + const existingEntry = typeVarMap.getParamSpec(destType); + if (existingEntry) { + if (existingEntry.parameters.length === 0 && existingEntry.paramSpec) { + // If there's an existing entry that matches, that's fine. + if ( + isTypeSame( + existingEntry.paramSpec, + srcType, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { + return true; + } + } + } else { + if (!typeVarMap.isLocked() && typeVarMap.hasSolveForScope(destType.scopeId)) { + typeVarMap.setParamSpec(destType, { + flags: FunctionTypeFlags.None, + parameters: [], + typeVarScopeId: undefined, + docString: undefined, + paramSpec: srcType, + }); + } + return true; + } + } else if (isFunction(srcType)) { + const functionSrcType = srcType; + const parameters = srcType.details.parameters.map((p, index) => { + const paramSpecEntry: ParamSpecEntry = { + category: p.category, + name: p.name, + isNameSynthesized: p.isNameSynthesized, + hasDefault: !!p.hasDefault, + type: FunctionType.getEffectiveParameterType(functionSrcType, index), + }; + return paramSpecEntry; + }); + + const existingEntry = typeVarMap.getParamSpec(destType); + if (existingEntry) { + // Verify that the existing entry matches the new entry. + if ( + !existingEntry.paramSpec && + existingEntry.parameters.length === parameters.length && + !existingEntry.parameters.some((existingParam, index) => { + const newParam = parameters[index]; + return ( + existingParam.category !== newParam.category || + existingParam.name !== newParam.name || + existingParam.hasDefault !== newParam.hasDefault || + !isTypeSame( + existingParam.type, + newParam.type, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ); + }) + ) { + return true; + } + } else { + if (!typeVarMap.isLocked() && typeVarMap.hasSolveForScope(destType.scopeId)) { + typeVarMap.setParamSpec(destType, { + parameters, + typeVarScopeId: srcType.details.typeVarScopeId, + flags: srcType.details.flags, + docString: srcType.details.docString, + paramSpec: undefined, + }); + } + return true; + } + } else if (isAnyOrUnknown(srcType)) { + return true; + } + + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeParamSpec().format({ + type: printType(srcType), + name: destType.details.name, + }) + ); + } + return false; + } + // Determines if the source type can be assigned to the dest type. // If typeVarMap is provided, type variables within the destType are // matched against existing type variables in the map. If a type variable @@ -18629,14 +20449,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions function canAssignType( destType: Type, srcType: Type, - diag: DiagnosticAddendum, + diag?: DiagnosticAddendum, typeVarMap?: TypeVarMap, flags = CanAssignFlags.Default, recursionCount = 0 ): boolean { - destType = transformPossibleRecursiveTypeAlias(destType); - srcType = transformPossibleRecursiveTypeAlias(srcType); - // If this is a one-element union that contains a variadic type variable, // pull out the subtype. if (isUnion(destType) && destType.subtypes.length === 1 && isVariadicTypeVar(destType.subtypes[0])) { @@ -18647,13 +20464,72 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions srcType = srcType.subtypes[0]; } - if (recursionCount > maxTypeRecursionCount) { + if (destType === srcType) { + // If the dest type is a TypeVar and a TypeVarMap was provided, we may + // need to assign the TypeVar to itself under certain circumstances. + // This is needed for cases where generic class A[T] calls its own + // constructor with an argument of type T. + if ( + isTypeVar(destType) && + !destType.details.isParamSpec && + !destType.details.isVariadic && + destType.scopeType === TypeVarScopeType.Class && + typeVarMap && + !typeVarMap.isLocked() && + typeVarMap.hasSolveForScope(destType.scopeId) && + !typeVarMap.getTypeVar(destType) && + (flags & (CanAssignFlags.SkipSolveTypeVars | CanAssignFlags.ReverseTypeVarMatching)) === 0 + ) { + typeVarMap.setTypeVarType(destType, srcType); + } + return true; } - if (destType === srcType) { + if (recursionCount > maxTypeRecursionCount) { return true; } + recursionCount++; + + // If the source and dest refer to the recursive type aliases, handle + // the case specially to avoid recursing down both type aliases. + if ( + isTypeVar(destType) && + destType.details.recursiveTypeAliasScopeId && + isTypeVar(srcType) && + srcType.details.recursiveTypeAliasScopeId + ) { + // Do the source and dest refer to the same recursive type alias? + if ( + destType.typeAliasInfo?.typeArguments && + srcType.typeAliasInfo?.typeArguments && + destType.details.recursiveTypeAliasScopeId === srcType.details.recursiveTypeAliasScopeId + ) { + let isAssignable = true; + const srcTypeArgs = srcType.typeAliasInfo.typeArguments; + destType.typeAliasInfo.typeArguments.forEach((destTypeArg, index) => { + const srcTypeArg = index < srcTypeArgs.length ? srcTypeArgs[index] : UnknownType.create(); + if (!canAssignType(destTypeArg, srcTypeArg, diag, typeVarMap, flags, recursionCount)) { + isAssignable = false; + } + }); + + return isAssignable; + } else { + // Have we already recursed once? + if ((flags & CanAssignFlags.SkipRecursiveTypeCheck) !== 0) { + return true; + } + + // Note that we are comparing two recursive types and do + // not recursive more than once. + flags |= CanAssignFlags.SkipRecursiveTypeCheck; + } + } + + // Transform recursive type aliases if necessary. + destType = transformPossibleRecursiveTypeAlias(destType); + srcType = transformPossibleRecursiveTypeAlias(srcType); // If the source or dest is unbound, allow the assignment. The // error will be reported elsewhere. @@ -18661,6 +20537,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } + // If we're in "overload overlap" mode, convert top-level type variables + // to their concrete forms in the source. + if ((flags & CanAssignFlags.OverloadOverlapCheck) !== 0) { + srcType = makeTopLevelTypeVarsConcrete(srcType); + } + // Strip a few of the flags we don't want to propagate to other calls. const originalFlags = flags; flags &= ~(CanAssignFlags.AllowBoolTypeGuard | CanAssignFlags.AllowTypeVarNarrowing); @@ -18668,88 +20550,75 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Before performing any other checks, see if the dest type is a // TypeVar that we are attempting to match. if (isTypeVar(destType)) { - // If it's an exact match, no need to do any more work. + // If the dest is a constrained or bound type variable and all of the + // types in the source are conditioned on that same type variable + // and have compatible types, we'll consider it assignable. + if (canAssignConditionalTypeToTypeVar(destType, srcType, recursionCount)) { + return true; + } + if (isTypeSame(destType, srcType)) { return true; } - // If the dest is a constrained type variable and all of the types in - // the source are constrained using that same type variable and have - // compatible types, we'll consider it assignable. - const destTypeVar = destType; + // Handle the special case where both types are Self types. We'll allow + // them to be treated as equivalent to handle certain common idioms. if ( - findSubtype(srcType, (srcSubtype) => { - if (isTypeSame(destTypeVar, srcSubtype, /* ignorePseudoGeneric */ true)) { - return false; - } - - if ( - getTypeCondition(srcSubtype)?.find( - (constraint) => constraint.typeVarName === TypeVarType.getNameWithScope(destTypeVar) - ) - ) { - if ( - destTypeVar.details.constraints.length === 0 || - destTypeVar.details.constraints.some((constraintType) => { - return canAssignType(constraintType, srcSubtype, new DiagnosticAddendum()); - }) - ) { - return false; - } - } - - return true; - }) === undefined + isTypeVar(srcType) && + srcType.details.isSynthesizedSelf && + srcType.details.boundType && + destType.details.isSynthesizedSelf && + destType.details.boundType ) { + if ((flags & CanAssignFlags.ReverseTypeVarMatching) === 0 && typeVarMap) { + canAssignTypeToTypeVar(destType, srcType, diag, typeVarMap, originalFlags, recursionCount); + } return true; } // If the dest is a variadic type variable, and the source is a tuple // with a single entry that is the same variadic type variable, it's a match. if ( - isVariadicTypeVar(destTypeVar) && + isVariadicTypeVar(destType) && isClassInstance(srcType) && isTupleClass(srcType) && srcType.tupleTypeArguments && srcType.tupleTypeArguments.length === 1 ) { - if (isTypeSame(destTypeVar, srcType.tupleTypeArguments[0])) { + if ( + isTypeSame( + destType, + srcType.tupleTypeArguments[0].type, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { return true; } } // If we're using ReverseTypeVarMatching and the source is a TypeVar, // the logic below will handle this case. - if ((flags & CanAssignFlags.ReverseTypeVarMatching) === 0 || !isTypeVar(srcType)) { - if (flags & CanAssignFlags.SkipSolveTypeVars) { - return canAssignType( - makeTopLevelTypeVarsConcrete(destType), - makeTopLevelTypeVarsConcrete(srcType), - diag, - /* typeVarMap */ undefined, - originalFlags, - recursionCount + 1 - ); - } else { - if ( - !canAssignTypeToTypeVar( - destType, - srcType, - diag, - typeVarMap ?? new TypeVarMap(), - originalFlags, - recursionCount + 1 - ) - ) { - return false; - } - - if (isAnyOrUnknown(srcType) && (flags & CanAssignFlags.DisallowAssignFromAny) !== 0) { - return false; - } + if ((flags & CanAssignFlags.ReverseTypeVarMatching) === 0 || !isTypeVar(srcType)) { + if ( + !canAssignTypeToTypeVar( + destType, + srcType, + diag, + typeVarMap ?? new TypeVarMap(), + originalFlags, + recursionCount + ) + ) { + return false; + } - return true; + if (isAnyOrUnknown(srcType) && (flags & CanAssignFlags.OverloadOverlapCheck) !== 0) { + return false; } + + return true; } } @@ -18766,19 +20635,32 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, /* typeVarMap */ undefined, originalFlags, - recursionCount + 1 + recursionCount ); } else { // Reverse the order of assignment to populate the TypeVarMap for - // the source TypeVar. + // the source TypeVar. Normally we set the AllowTypeVarNarrowing flag + // so the wide type bound of the TypeVar is set rather than the narrow + // type bound. This allows the type to be further narrowed through other + // assignments. However, if we're populating the expected type in the + // TypeVarMap, we don't want to allow further narrowing. + let effectiveFlags = originalFlags; + if ((originalFlags & CanAssignFlags.PopulatingExpectedType) !== 0) { + effectiveFlags &= ~( + CanAssignFlags.ReverseTypeVarMatching | CanAssignFlags.AllowTypeVarNarrowing + ); + } else { + effectiveFlags |= CanAssignFlags.AllowTypeVarNarrowing; + } + if ( canAssignTypeToTypeVar( srcType as TypeVarType, destType, diag, typeVarMap, - originalFlags | CanAssignFlags.AllowTypeVarNarrowing, - recursionCount + 1 + effectiveFlags, + recursionCount ) ) { return true; @@ -18795,7 +20677,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, originalFlags | CanAssignFlags.AllowTypeVarNarrowing, - recursionCount + 1 + recursionCount ) ) { isAssignable = true; @@ -18807,15 +20689,31 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if ((flags & CanAssignFlags.EnforceInvariance) !== 0) { - if (!isAnyOrUnknown(destType)) { + if (isAnyOrUnknown(destType)) { + return true; + } + + // If the source is a ParamSpec and the dest is a "...", this is + // effectively like an "Any" signature, so we'll treat it as though + // it's Any. + if ( + isParamSpec(srcType) && + isFunction(destType) && + FunctionType.shouldSkipArgsKwargsCompatibilityCheck(destType) && + destType.details.parameters.length <= 2 + ) { + return true; + } + + if (diag) { diag.addMessage( Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ sourceType: printType(srcType), destType: printType(destType), }) ); - return false; } + return false; } } @@ -18829,286 +20727,81 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // type. These are functionally equivalent, but "Any" looks // better in the text representation. const typeVarSubstitution = isEllipsisType(srcType) ? AnyType.create() : srcType; - setTypeArgumentsRecursive(destType, typeVarSubstitution, typeVarMap); + setTypeArgumentsRecursive(destType, typeVarSubstitution, typeVarMap, recursionCount); } - if ((flags & CanAssignFlags.DisallowAssignFromAny) === 0) { + if ((flags & CanAssignFlags.OverloadOverlapCheck) === 0) { return true; } } if (isNever(srcType)) { if (typeVarMap) { - setTypeArgumentsRecursive(destType, UnknownType.create(), typeVarMap); + setTypeArgumentsRecursive(destType, UnknownType.create(), typeVarMap, recursionCount); } return true; } - const expandedSrcType = makeTopLevelTypeVarsConcrete(srcType); - if (isUnion(expandedSrcType)) { - // Start by checking for an exact match. This is needed to handle unions - // that contain recursive type aliases. - if (isTypeSame(expandedSrcType, destType)) { - return true; - } - - // Handle the case where the source and dest are both unions and - // invariance is being enforced and the dest contains type variables. - if (flags & CanAssignFlags.EnforceInvariance) { - if (isUnion(destType)) { - const remainingDestSubtypes: Type[] = []; - let remainingSrcSubtypes: Type[] = [...expandedSrcType.subtypes]; - let isIncompatible = false; - - // First attempt to match all of the non-generic types in the dest - // to non-generic types in the source. - destType.subtypes.forEach((destSubtype) => { - if (requiresSpecialization(destSubtype)) { - remainingDestSubtypes.push(destSubtype); - } else { - const srcTypeIndex = remainingSrcSubtypes.findIndex((srcSubtype) => - isTypeSame(srcSubtype, destSubtype) - ); - if (srcTypeIndex >= 0) { - remainingSrcSubtypes.splice(srcTypeIndex, 1); - } else { - isIncompatible = true; - } - } - }); - - // For all remaining source subtypes, attempt to find a dest subtype - // whose primary type matches. - if (!isIncompatible) { - [...remainingSrcSubtypes].forEach((srcSubtype) => { - const destTypeIndex = remainingDestSubtypes.findIndex( - (destSubtype) => - isClass(srcSubtype) && - isClass(destSubtype) && - TypeBase.isInstance(srcSubtype) === TypeBase.isInstance(destSubtype) && - ClassType.isSameGenericClass(srcSubtype, destSubtype) - ); - if (destTypeIndex >= 0) { - if ( - !canAssignType( - remainingDestSubtypes[destTypeIndex], - srcSubtype, - diag.createAddendum(), - typeVarMap, - flags, - recursionCount + 1 - ) - ) { - isIncompatible = true; - } - - remainingDestSubtypes.splice(destTypeIndex, 1); - remainingSrcSubtypes = remainingSrcSubtypes.filter((t) => t !== srcSubtype); - } - }); - } - - // If there is a remaining dest subtype and it's a type variable, attempt - // to assign the remaining source subtypes to it. - if (!isIncompatible && (remainingDestSubtypes.length !== 0 || remainingSrcSubtypes.length !== 0)) { - if ( - remainingDestSubtypes.length !== 1 || - !isTypeVar(remainingDestSubtypes[0]) || - !canAssignType( - remainingDestSubtypes[0], - combineTypes(remainingSrcSubtypes), - diag.createAddendum(), - typeVarMap, - flags, - recursionCount + 1 - ) - ) { - isIncompatible = true; - } - } + // Handle the special case where the expression is an actual + // UnionType special form. + if (isUnion(srcType) && TypeBase.isSpecialForm(srcType)) { + srcType = unionType || objectType || AnyType.create(); + } - if (!isIncompatible) { - return true; - } + if (isUnion(destType)) { + if (isUnion(srcType)) { + if ( + canAssignFromUnionType( + destType, + srcType, + /* diag */ undefined, + typeVarMap, + originalFlags, + recursionCount + ) + ) { + return true; } - } - - // For union sources, all of the types need to be assignable to the dest. - let isIncompatible = false; - doForEachSubtype(expandedSrcType, (subtype) => { + } else { + const clonedTypeVarMap = typeVarMap ? typeVarMap.clone() : undefined; if ( - !canAssignType(destType, subtype, new DiagnosticAddendum(), typeVarMap, flags, recursionCount + 1) + canAssignToUnionType( + destType, + srcType, + /* diag */ undefined, + clonedTypeVarMap, + originalFlags, + recursionCount + ) ) { - // That didn't work, so try again with concrete versions. - if ( - !canAssignType( - destType, - makeTopLevelTypeVarsConcrete(subtype), - diag.createAddendum(), - typeVarMap, - flags, - recursionCount + 1 - ) - ) { - isIncompatible = true; + if (typeVarMap && clonedTypeVarMap) { + typeVarMap.copyFromClone(clonedTypeVarMap); } + return true; } - }); - - if (isIncompatible) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); - return false; } + } - return true; + const expandedSrcType = makeTopLevelTypeVarsConcrete(srcType); + if (isUnion(expandedSrcType)) { + return canAssignFromUnionType(destType, expandedSrcType, diag, typeVarMap, originalFlags, recursionCount); } if (isUnion(destType)) { - // If we need to enforce invariance, the source needs to be compatible - // with all subtypes in the dest, unless those subtypes are subclasses - // of other subtypes. - if (flags & CanAssignFlags.EnforceInvariance) { - let isIncompatible = false; - - doForEachSubtype(destType, (subtype, index) => { - if ( - !isIncompatible && - !canAssignType(subtype, srcType, diag.createAddendum(), typeVarMap, flags, recursionCount + 1) - ) { - // Determine whether this subtype is assignable to - // another subtype elsewhere in the union. If so, we can ignore - // the incompatibility. - let skipSubtype = false; - if (!isAnyOrUnknown(subtype)) { - doForEachSubtype(destType, (otherSubtype, otherIndex) => { - if (index !== otherIndex && !skipSubtype) { - if ( - canAssignType( - otherSubtype, - subtype, - new DiagnosticAddendum(), - /* typeVarMap */ undefined, - CanAssignFlags.Default, - recursionCount + 1 - ) - ) { - skipSubtype = true; - } - } - }); - } - if (!skipSubtype) { - isIncompatible = true; - } - } - }); + return canAssignToUnionType(destType, srcType, diag, typeVarMap, originalFlags, recursionCount); + } - if (isIncompatible) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); - return false; - } + if (isNoneInstance(destType) && isNoneInstance(srcType)) { + return true; + } + if (isNoneTypeClass(destType)) { + if (isNoneTypeClass(srcType)) { return true; } - // For union destinations, we just need to match one of the types. - const diagAddendum = new DiagnosticAddendum(); - - let foundMatch = false; - // Run through all subtypes in the union. Don't stop at the first - // match we find because we may need to match TypeVars in other - // subtypes. We special-case "None" so we can handle Optional[T] - // without matching the None to the type var. - if (isNone(srcType) && isOptionalType(destType)) { - foundMatch = true; - } else { - let bestTypeVarMap: TypeVarMap | undefined; - let bestTypeVarMapScore: number | undefined; - - // If the srcType is a literal, try to use the fast-path lookup - // in case the destType is a union with hundreds of literals. - if ( - isClassInstance(srcType) && - isLiteralType(srcType) && - UnionType.containsType(destType, srcType, /* constraints */ undefined) - ) { - return true; - } - - doForEachSubtype(destType, (subtype) => { - // Make a temporary clone of the typeVarMap. We don't want to modify - // the original typeVarMap until we find the "optimal" typeVar mapping. - const typeVarMapClone = typeVarMap?.clone(); - if ( - canAssignType( - subtype, - srcType, - diagAddendum.createAddendum(), - typeVarMapClone, - flags, - recursionCount + 1 - ) - ) { - foundMatch = true; - - if (typeVarMapClone) { - // Ask the typeVarMap to compute a "score" for the current - // contents of the table. - const typeVarMapScore = typeVarMapClone.getScore(); - if (bestTypeVarMapScore === undefined || bestTypeVarMapScore <= typeVarMapScore) { - // We found a typeVar mapping with a higher score than before. - bestTypeVarMapScore = typeVarMapScore; - bestTypeVarMap = typeVarMapClone; - } - } - } - }); - - // If we found a winning type var mapping, copy it back to typeVarMap. - if (typeVarMap && bestTypeVarMap) { - typeVarMap.copyFromClone(bestTypeVarMap); - } - } - - // If the source is a constrained TypeVar, see if we can assign all of the - // constraints to the union. - if (!foundMatch) { - if (isTypeVar(srcType) && srcType.details.constraints.length > 0) { - foundMatch = canAssignType( - destType, - makeTopLevelTypeVarsConcrete(srcType), - diagAddendum.createAddendum(), - typeVarMap, - flags, - recursionCount + 1 - ); - } - } - - if (!foundMatch) { - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); - diag.addAddendum(diagAddendum); - return false; + if (isInstantiableClass(srcType) && ClassType.isBuiltIn(srcType, 'NoneType')) { + return true; } - return true; - } - - if (isNone(destType) && isNone(srcType)) { - return TypeBase.isInstance(destType) === TypeBase.isInstance(srcType); } // Is the src a specialized "Type" object? @@ -19127,21 +20820,23 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( destType, convertToInstantiable(srcTypeArgs[0]), - diag.createAddendum(), + diag?.createAddendum(), typeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { return true; } - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } return false; } } @@ -19150,6 +20845,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isInstantiableClass(destType)) { const concreteSrcType = makeTopLevelTypeVarsConcrete(srcType); if (isInstantiableClass(concreteSrcType)) { + // PEP 544 says that if the dest type is a Type[Proto] class, + // the source must be a "concrete" (non-protocol) class. + if (ClassType.isProtocolClass(destType)) { + if ( + ClassType.isProtocolClass(concreteSrcType) && + isInstantiableClass(srcType) && + !srcType.includeSubclasses + ) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.protocolSourceIsNotConcrete().format({ + sourceType: printType(convertToInstance(srcType)), + destType: printType(destType), + }) + ); + } + return false; + } + } + if ( canAssignClass( destType, @@ -19157,19 +20872,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - recursionCount + 1, + recursionCount, /* reportErrorsUsingObjType */ false ) ) { return true; } - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } return false; } } @@ -19186,7 +20903,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - recursionCount + 1 + recursionCount ); } } @@ -19194,9 +20911,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Is the dest a "type" object? Assume that all instantiable // types are assignable to "type". if (TypeBase.isInstantiable(srcType)) { - return true; + const isLiteral = isClass(srcType) && srcType.literalValue !== undefined; + return !isLiteral; } - } else if (ClassType.isBuiltIn(destType, 'TypeGuard')) { + } else if (ClassType.isBuiltIn(destType, ['TypeGuard', 'StrictTypeGuard'])) { // All the source to be a "bool". if ((originalFlags & CanAssignFlags.AllowBoolTypeGuard) !== 0) { if (isClassInstance(srcType) && ClassType.isBuiltIn(srcType, 'bool')) { @@ -19210,17 +20928,32 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (destType.literalValue !== undefined) { const srcLiteral = concreteSrcType.literalValue; if (srcLiteral === undefined || !ClassType.isLiteralValueSame(concreteSrcType, destType)) { - diag.addMessage( - Localizer.DiagnosticAddendum.literalAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.literalAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } return false; } } + // Handle LiteralString special form. + if (ClassType.isBuiltIn(destType, 'LiteralString') && ClassType.isBuiltIn(concreteSrcType, 'str')) { + if (concreteSrcType.literalValue !== undefined) { + return true; + } + } else if ( + ClassType.isBuiltIn(concreteSrcType, 'LiteralString') && + ClassType.isBuiltIn(destType, 'str') && + destType.literalValue === undefined + ) { + return true; + } + if ( !canAssignClass( ClassType.cloneAsInstantiable(destType), @@ -19228,7 +20961,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - recursionCount + 1, + recursionCount, /* reportErrorsUsingObjType */ true ) ) { @@ -19240,19 +20973,12 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // Is the destination a callback protocol (defined in PEP 544)? const destCallbackType = getCallbackProtocolType(destType); if (destCallbackType) { - return canAssignType( - destCallbackType, - concreteSrcType, - diag, - typeVarMap, - flags, - recursionCount + 1 - ); + return canAssignType(destCallbackType, concreteSrcType, diag, typeVarMap, flags, recursionCount); } // All functions are objects, so try to assign as an object. if (objectType && isClassInstance(objectType)) { - return canAssignType(destType, objectType, diag, typeVarMap, flags, recursionCount + 1); + return canAssignType(destType, objectType, diag, typeVarMap, flags, recursionCount); } } else if (isModule(concreteSrcType)) { // Is the destination the built-in "ModuleType"? @@ -19261,22 +20987,36 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (ClassType.isProtocolClass(destType)) { - return canAssignModuleToProtocol( + return canAssignModuleToProtocol( + ClassType.cloneAsInstantiable(destType), + concreteSrcType, + diag, + typeVarMap, + flags, + recursionCount + ); + } + } else if (isInstantiableClass(concreteSrcType)) { + // See if the destType is an instantiation of a Protocol + // class that is effectively a function. + const callbackType = getCallbackProtocolType(destType); + if (callbackType) { + return canAssignType(callbackType, concreteSrcType, diag, typeVarMap, flags, recursionCount); + } + + // If the destType is an instantiation of a Protocol, + // see if the class type itself satisfies the protocol. + if (ClassType.isProtocolClass(destType)) { + return canAssignClassToProtocol( ClassType.cloneAsInstantiable(destType), concreteSrcType, diag, typeVarMap, flags, - recursionCount + 1 + /* treatSourceAsInstantiable */ true, + recursionCount ); } - } else if (isInstantiableClass(concreteSrcType)) { - // See if the destType is an instantiation of a Protocol - // class that is effectively a function. - const callbackType = getCallbackProtocolType(destType); - if (callbackType) { - return canAssignType(callbackType, concreteSrcType, diag, typeVarMap, flags, recursionCount + 1); - } // Determine if the metaclass can be assigned to the object. const metaclass = concreteSrcType.details.effectiveMetaclass; @@ -19286,20 +21026,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } else { return canAssignClass( ClassType.cloneAsInstantiable(destType), - ClassType.isProtocolClass(destType) ? concreteSrcType : metaclass, + metaclass, diag, typeVarMap, flags, - recursionCount + 1, - /* reportErrorsUsingObjType */ false, - /* allowMetaclassForProtocols */ true + recursionCount, + /* reportErrorsUsingObjType */ false ); } } } else if (isAnyOrUnknown(concreteSrcType)) { - return (flags & CanAssignFlags.DisallowAssignFromAny) === 0; + return (flags & CanAssignFlags.OverloadOverlapCheck) === 0; } else if (isUnion(concreteSrcType)) { - return canAssignType(destType, concreteSrcType, diag, typeVarMap, flags, recursionCount + 1); + return canAssignType(destType, concreteSrcType, diag, typeVarMap, flags, recursionCount); } } @@ -19317,10 +21056,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions memberType, /* memberClass */ undefined, /* errorNode */ undefined, - recursionCount + 1 + recursionCount ); if (boundMethod) { - concreteSrcType = boundMethod; + concreteSrcType = removeParamSpecVariadicsFromSignature(boundMethod); } } } @@ -19328,7 +21067,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If it's a class, use the constructor for type compatibility checking. if (isInstantiableClass(concreteSrcType) && concreteSrcType.literalValue === undefined) { - const constructor = createFunctionFromConstructor(concreteSrcType); + const constructor = createFunctionFromConstructor(concreteSrcType, recursionCount); if (constructor) { concreteSrcType = constructor; } @@ -19337,7 +21076,9 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (isOverloadedFunction(concreteSrcType)) { // Overloads are not compatible with ParamSpec. if (destType.details.paramSpec) { - diag.addMessage(Localizer.DiagnosticAddendum.paramSpecOverload()); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.paramSpecOverload()); + } return false; } @@ -19353,24 +21094,26 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return canAssignType( destType, overload, - diag.createAddendum(), + diag?.createAddendum(), typeVarMapClone, flags, - recursionCount + 1 + recursionCount ); }); if (overloadIndex < 0) { - diag.addMessage( - Localizer.DiagnosticAddendum.noOverloadAssignable().format({ type: printType(destType) }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.noOverloadAssignable().format({ type: printType(destType) }) + ); + } return false; } srcFunction = overloads[overloadIndex]; } else if (isFunction(concreteSrcType)) { srcFunction = concreteSrcType; } else if (isAnyOrUnknown(concreteSrcType)) { - return (flags & CanAssignFlags.DisallowAssignFromAny) === 0; + return (flags & CanAssignFlags.OverloadOverlapCheck) === 0; } if (srcFunction) { @@ -19378,10 +21121,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignFunction( destType, srcFunction, - diag.createAddendum(), + diag?.createAddendum(), typeVarMap ?? new TypeVarMap(getTypeVarScopeId(destType)), flags, - recursionCount + 1 + recursionCount ) ) { return true; @@ -19390,34 +21133,37 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (isOverloadedFunction(destType)) { - const overloadDiag = diag.createAddendum(); + const overloadDiag = diag?.createAddendum(); // All overloads in the dest must be assignable. - const isAssignable = !destType.overloads.some((destOverload) => { + const isAssignable = destType.overloads.every((destOverload) => { if (!FunctionType.isOverloaded(destOverload)) { - return false; + return true; } if (typeVarMap) { typeVarMap.addSolveForScope(getTypeVarScopeId(destOverload)); } - return !canAssignType( + const result = canAssignType( destOverload, srcType, - overloadDiag.createAddendum(), + overloadDiag?.createAddendum(), typeVarMap || new TypeVarMap(getTypeVarScopeId(destOverload)), flags, - recursionCount + 1 + recursionCount ); + return result; }); if (!isAssignable) { - overloadDiag.addMessage( - Localizer.DiagnosticAddendum.overloadNotAssignable().format({ - name: destType.overloads[0].details.name, - }) - ); + if (overloadDiag) { + overloadDiag.addMessage( + Localizer.DiagnosticAddendum.overloadNotAssignable().format({ + name: destType.overloads[0].details.name, + }) + ); + } return false; } @@ -19432,7 +21178,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Are we trying to assign None to a protocol? - if (isNone(srcType) && isClassInstance(destType) && ClassType.isProtocolClass(destType)) { + if (isNoneInstance(srcType) && isClassInstance(destType) && ClassType.isProtocolClass(destType)) { if (noneType && isInstantiableClass(noneType)) { return canAssignClassToProtocol( ClassType.cloneAsInstantiable(destType), @@ -19440,30 +21186,408 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, flags, - /* allowMetaclassForProtocols */ false, - recursionCount + 1 + /* treatSourceAsInstantiable */ false, + recursionCount ); } } - if (isNone(destType)) { - diag.addMessage(Localizer.DiagnosticAddendum.assignToNone()); + if (isNoneInstance(destType)) { + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.assignToNone()); + } return false; } - diag.addMessage( - Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ - sourceType: printType(srcType), - destType: printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } return false; } + function canAssignFromUnionType( + destType: Type, + srcType: UnionType, + diag: DiagnosticAddendum | undefined, + typeVarMap: TypeVarMap | undefined, + flags: CanAssignFlags, + recursionCount: number + ): boolean { + // Start by checking for an exact match. This is needed to handle unions + // that contain recursive type aliases. + if ( + isTypeSame( + srcType, + destType, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { + return true; + } + + // Handle the case where the source and dest are both unions. Try + // to eliminate as many exact type matches between the src and dest. + if (isUnion(destType)) { + // Handle the special case where the dest is a union of Any and + // a type variable and CanAssignFlags.AllowTypeVarNarrowing is + // in effect. This occurs, for example, with the return type of + // the getattr function. + if ((flags & CanAssignFlags.AllowTypeVarNarrowing) !== 0) { + const nonAnySubtypes = destType.subtypes.filter((t) => !isAnyOrUnknown(t)); + if (nonAnySubtypes.length === 1 && isTypeVar(nonAnySubtypes[0])) { + canAssignType(nonAnySubtypes[0], srcType, /* diag */ undefined, typeVarMap, flags, recursionCount); + + // This always succeeds because the destination contains Any. + return true; + } + } + + const remainingDestSubtypes: Type[] = []; + let remainingSrcSubtypes: Type[] = [...srcType.subtypes]; + let isIncompatible = false; + + // First attempt to match all of the non-generic types in the dest + // to non-generic types in the source. + destType.subtypes.forEach((destSubtype) => { + if (requiresSpecialization(destSubtype)) { + remainingDestSubtypes.push(destSubtype); + } else { + const srcTypeIndex = remainingSrcSubtypes.findIndex((srcSubtype) => + isTypeSame( + srcSubtype, + destSubtype, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ); + if (srcTypeIndex >= 0) { + remainingSrcSubtypes.splice(srcTypeIndex, 1); + } else { + isIncompatible = true; + } + } + }); + + // For all remaining source subtypes, attempt to find a dest subtype + // whose primary type matches. + if (!isIncompatible) { + [...remainingSrcSubtypes].forEach((srcSubtype) => { + const destTypeIndex = remainingDestSubtypes.findIndex( + (destSubtype) => + isClass(srcSubtype) && + isClass(destSubtype) && + TypeBase.isInstance(srcSubtype) === TypeBase.isInstance(destSubtype) && + ClassType.isSameGenericClass(srcSubtype, destSubtype) + ); + if (destTypeIndex >= 0) { + if ( + !canAssignType( + remainingDestSubtypes[destTypeIndex], + srcSubtype, + diag?.createAddendum(), + typeVarMap, + flags, + recursionCount + ) + ) { + isIncompatible = true; + } + + remainingDestSubtypes.splice(destTypeIndex, 1); + remainingSrcSubtypes = remainingSrcSubtypes.filter((t) => t !== srcSubtype); + } + }); + } + + // If there is a remaining dest subtype and it's a type variable, attempt + // to assign the remaining source subtypes to it. + if (!isIncompatible && (remainingDestSubtypes.length !== 0 || remainingSrcSubtypes.length !== 0)) { + if ( + remainingDestSubtypes.length !== 1 || + !isTypeVar(remainingDestSubtypes[0]) || + !canAssignType( + remainingDestSubtypes[0], + combineTypes(remainingSrcSubtypes), + diag?.createAddendum(), + typeVarMap, + flags, + recursionCount + ) + ) { + isIncompatible = true; + } + } + + if (!isIncompatible) { + return true; + } + } + + // For union sources, all of the types need to be assignable to the dest. + let isIncompatible = false; + doForEachSubtype(srcType, (subtype) => { + if (!canAssignType(destType, subtype, /* diag */ undefined, typeVarMap, flags, recursionCount)) { + // That didn't work, so try again with concrete versions. + if ( + !canAssignType( + destType, + makeTopLevelTypeVarsConcrete(subtype), + diag?.createAddendum(), + typeVarMap, + flags, + recursionCount + ) + ) { + isIncompatible = true; + } + } + }); + + if (isIncompatible) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } + return false; + } + + return true; + } + + function canAssignToUnionType( + destType: UnionType, + srcType: Type, + diag: DiagnosticAddendum | undefined, + typeVarMap: TypeVarMap | undefined, + flags: CanAssignFlags, + recursionCount: number + ): boolean { + // If we need to enforce invariance, the source needs to be compatible + // with all subtypes in the dest, unless those subtypes are subclasses + // of other subtypes. + if (flags & CanAssignFlags.EnforceInvariance) { + let isIncompatible = false; + + doForEachSubtype(destType, (subtype, index) => { + if ( + !isIncompatible && + !canAssignType(subtype, srcType, diag?.createAddendum(), typeVarMap, flags, recursionCount) + ) { + // Determine whether this subtype is assignable to + // another subtype elsewhere in the union. If so, we can ignore + // the incompatibility. + let skipSubtype = false; + if (!isAnyOrUnknown(subtype)) { + doForEachSubtype(destType, (otherSubtype, otherIndex) => { + if (index !== otherIndex && !skipSubtype) { + if ( + canAssignType( + otherSubtype, + subtype, + /* diag */ undefined, + /* typeVarMap */ undefined, + CanAssignFlags.Default, + recursionCount + ) + ) { + skipSubtype = true; + } + } + }); + } + if (!skipSubtype) { + isIncompatible = true; + } + } + }); + + if (isIncompatible) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + } + return false; + } + + return true; + } + + // For union destinations, we just need to match one of the types. + const diagAddendum = diag ? new DiagnosticAddendum() : undefined; + + let foundMatch = false; + // Run through all subtypes in the union. Don't stop at the first + // match we find because we may need to match TypeVars in other + // subtypes. We special-case "None" so we can handle Optional[T] + // without matching the None to the type var. + if (isNoneInstance(srcType) && isOptionalType(destType)) { + foundMatch = true; + } else { + let bestTypeVarMap: TypeVarMap | undefined; + let bestTypeVarMapScore: number | undefined; + + // If the srcType is a literal, try to use the fast-path lookup + // in case the destType is a union with hundreds of literals. + if ( + isClassInstance(srcType) && + isLiteralType(srcType) && + UnionType.containsType(destType, srcType, recursionCount) + ) { + return true; + } + + doForEachSubtype(destType, (subtype) => { + // Make a temporary clone of the typeVarMap. We don't want to modify + // the original typeVarMap until we find the "optimal" typeVar mapping. + const typeVarMapClone = typeVarMap?.clone(); + if ( + canAssignType( + subtype, + srcType, + diagAddendum?.createAddendum(), + typeVarMapClone, + flags, + recursionCount + ) + ) { + foundMatch = true; + + if (typeVarMapClone) { + // Ask the typeVarMap to compute a "score" for the current + // contents of the table. + const typeVarMapScore = typeVarMapClone.getScore(); + if (bestTypeVarMapScore === undefined || bestTypeVarMapScore <= typeVarMapScore) { + // We found a typeVar mapping with a higher score than before. + bestTypeVarMapScore = typeVarMapScore; + bestTypeVarMap = typeVarMapClone; + } + } + } + }); + + // If we found a winning type var mapping, copy it back to typeVarMap. + if (typeVarMap && bestTypeVarMap) { + typeVarMap.copyFromClone(bestTypeVarMap); + } + } + + // If the source is a constrained TypeVar, see if we can assign all of the + // constraints to the union. + if (!foundMatch) { + if (isTypeVar(srcType) && srcType.details.constraints.length > 0) { + foundMatch = canAssignType( + destType, + makeTopLevelTypeVarsConcrete(srcType), + diagAddendum?.createAddendum(), + typeVarMap, + flags, + recursionCount + ); + } + } + + if (!foundMatch) { + if (diag && diagAddendum) { + diag.addMessage( + Localizer.DiagnosticAddendum.typeAssignmentMismatch().format({ + sourceType: printType(srcType), + destType: printType(destType), + }) + ); + diag.addAddendum(diagAddendum); + } + return false; + } + return true; + } + + function canAssignConditionalTypeToTypeVar(destType: TypeVarType, srcType: Type, recursionCount: number): boolean { + // The srcType is assignable only if all of its subtypes are assignable. + return !findSubtype(srcType, (srcSubtype) => { + if ( + isTypeSame( + destType, + srcSubtype, + /* ignorePseudoGeneric */ true, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { + return false; + } + + const destTypeVarName = TypeVarType.getNameWithScope(destType); + + // Determine which conditions on this type apply to this type variable. + // There might be more than one of them. + const applicableConditions = (getTypeCondition(srcSubtype) ?? []).filter( + (constraint) => constraint.typeVarName === destTypeVarName + ); + + // If there are no applicable conditions, it's not assignable. + if (applicableConditions.length === 0) { + return true; + } + + return !applicableConditions.some((condition) => { + if (destType.details.boundType) { + assert(condition.constraintIndex === 0, 'Expected constraint for bound TypeVar to have index of 0'); + + return canAssignType( + destType.details.boundType, + srcSubtype, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ); + } + + if (destType.details.constraints.length > 0) { + assert( + condition.constraintIndex < destType.details.constraints.length, + 'Constraint for constrained TypeVar is out of bounds' + ); + + return canAssignType( + destType.details.constraints[condition.constraintIndex], + srcSubtype, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ); + } + + // This is a non-bound and non-constrained type variable with a matching condition. + return true; + }); + }); + } + // Synthesize a function that represents the constructor for this class // taking into consideration the __init__ and __new__ methods. - function createFunctionFromConstructor(classType: ClassType): FunctionType | OverloadedFunctionType | undefined { + function createFunctionFromConstructor( + classType: ClassType, + recursionCount = 0 + ): FunctionType | OverloadedFunctionType | undefined { // Use the __init__ method if available. It's usually more detailed. const initInfo = lookUpClassMember( classType, @@ -19476,9 +21600,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const objectType = ClassType.cloneAsInstance(classType); const convertInitToConstructor = (initSubtype: FunctionType) => { - let constructorFunction = bindFunctionToClassOrObject(objectType, initSubtype) as - | FunctionType - | undefined; + let constructorFunction = bindFunctionToClassOrObject( + objectType, + initSubtype, + /* memberClass */ undefined, + /* errorNode */ undefined, + recursionCount + ) as FunctionType | undefined; if (constructorFunction) { constructorFunction = FunctionType.clone(constructorFunction); constructorFunction.details.declaredReturnType = objectType; @@ -19564,7 +21692,37 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return constructorFunction; } - function getCallbackProtocolType(objType: ClassType): FunctionType | OverloadedFunctionType | undefined { + // If the class is a protocol and it has a `__call__` method but no other methods + // or attributes that would be incompatible with a function, this method returns + // the signature of the call implied by the `__call__` method. Otherwise it returns + // undefined. + function getCallbackProtocolType(objType: ClassType): FunctionType | OverloadedFunctionType | undefined { + if (!isClassInstance(objType) || !ClassType.isProtocolClass(objType)) { + return undefined; + } + + // Make sure that the protocol class doesn't define any fields that + // a normal function wouldn't be compatible with. + for (const mroClass of objType.details.mro) { + if (isClass(mroClass) && ClassType.isProtocolClass(mroClass)) { + for (const field of mroClass.details.fields) { + if (field[0] !== '__call__' && !field[1].isIgnoredForProtocolMatch()) { + let fieldIsPartOfFunction = false; + + if (functionObj && isClass(functionObj)) { + if (functionObj.details.fields.has(field[0])) { + fieldIsPartOfFunction = true; + } + } + + if (!fieldIsPartOfFunction) { + return undefined; + } + } + } + } + } + const callMember = lookUpObjectMember(objType, '__call__'); if (!callMember) { return undefined; @@ -19586,7 +21744,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions destType: Type, srcType: Type, paramIndex: number, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, destTypeVarMap: TypeVarMap, srcTypeVarMap: TypeVarMap, flags: CanAssignFlags, @@ -19604,61 +21762,217 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return true; } - let specializedDestType = applySolvedTypeVars(destType, destTypeVarMap); + if (isVariadicTypeVar(destType) && !isUnpacked(srcType)) { + return false; + } - // If the destination includes type variables that still need to be solved, - // call canAssignType with ReverseTypeVarMatching to populate destTypeVarMap. - if (requiresSpecialization(specializedDestType)) { - if ( - !canAssignType( - srcType, + // We may need to reverse the type var mapping to populate the type + // var map of the + let specializedSrcType = srcType; + let specializedDestType = destType; + let reverseMatchingFailed = false; + + if ((flags & CanAssignFlags.ReverseTypeVarMatching) === 0) { + specializedDestType = applySolvedTypeVars(destType, destTypeVarMap); + + if (requiresSpecialization(specializedDestType)) { + reverseMatchingFailed = !canAssignType( + specializedSrcType, specializedDestType, - new DiagnosticAddendum(), + /* diag */ undefined, destTypeVarMap, - flags ^ CanAssignFlags.ReverseTypeVarMatching, - recursionCount + 1 - ) - ) { - diag.addMessage( - Localizer.DiagnosticAddendum.paramAssignment().format({ - index: paramIndex + 1, - sourceType: printType(destType), - destType: printType(srcType), - }) + flags | + CanAssignFlags.ReverseTypeVarMatching | + CanAssignFlags.IgnoreTypeVarScope | + CanAssignFlags.RetainLiteralsForTypeVar, + recursionCount ); + + specializedDestType = applySolvedTypeVars(destType, destTypeVarMap); + } + } else { + specializedSrcType = applySolvedTypeVars(srcType, srcTypeVarMap); + + if (requiresSpecialization(specializedSrcType)) { + if (requiresSpecialization(specializedSrcType)) { + reverseMatchingFailed = !canAssignType( + specializedSrcType, + specializedDestType, + /* diag */ undefined, + srcTypeVarMap, + (flags & ~CanAssignFlags.ReverseTypeVarMatching) | CanAssignFlags.IgnoreTypeVarScope, + recursionCount + ); + + specializedSrcType = applySolvedTypeVars(srcType, srcTypeVarMap); + } + } + + if (reverseMatchingFailed) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.paramAssignment().format({ + index: paramIndex + 1, + sourceType: printType(destType), + destType: printType(srcType), + }) + ); + } + return false; } + } - specializedDestType = applySolvedTypeVars(destType, destTypeVarMap); + // Handle the special case where the source is a Self type and the + // destination is not. + if (!isTypeVar(specializedDestType) || !specializedDestType.details.isSynthesizedSelf) { + if ( + isTypeVar(specializedSrcType) && + specializedSrcType.details.isSynthesizedSelf && + specializedSrcType.details.boundType + ) { + specializedSrcType = applySolvedTypeVars( + specializedSrcType.details.boundType, + new TypeVarMap(getTypeVarScopeId(specializedSrcType)), + /* unknownIfNotFound */ true + ); + } } if ( !canAssignType( - srcType, + specializedSrcType, specializedDestType, - diag.createAddendum(), - srcTypeVarMap, + diag?.createAddendum(), + (flags & CanAssignFlags.ReverseTypeVarMatching) === 0 ? srcTypeVarMap : destTypeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { - diag.addMessage( - Localizer.DiagnosticAddendum.paramAssignment().format({ - index: paramIndex + 1, - sourceType: printType(destType), - destType: printType(srcType), - }) - ); - return false; + // There are cases involving lambdas where the parameter types are type + // variables and match exactly but fail the assignment check because the + // TypeVars are out of scope. This happens because parameter types assigned + // to lambdas during bidirectional inference do not match the TypeVar scope + // of the lambda itself. + if (!isTypeSame(destType, srcType)) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.paramAssignment().format({ + index: paramIndex + 1, + sourceType: printType(destType), + destType: printType(srcType), + }) + ); + } + + return false; + } } return true; } + // Determines whether we need to pack some of the source positionals + // into a tuple that matches a variadic *args parameter in the destination. + function adjustSourceParamDetailsForDestVariadic( + srcDetails: ParameterListDetails, + destDetails: ParameterListDetails + ) { + // If there is no *args parameter in the dest, we have nothing to do. + if (destDetails.argsIndex === undefined) { + return; + } + + // If the source doesn't have enough positional parameters, we have nothing to do. + if (srcDetails.params.length < destDetails.argsIndex) { + return; + } + + let srcLastToPackIndex = srcDetails.params.findIndex( + (p, i) => i >= destDetails.argsIndex! && p.source === ParameterSource.KeywordOnly + ); + if (srcLastToPackIndex < 0) { + srcLastToPackIndex = srcDetails.params.length; + } + + const destFirstNonPositional = destDetails.firstKeywordOnlyIndex ?? destDetails.params.length; + const suffixLength = destFirstNonPositional - destDetails.argsIndex - 1; + const srcPositionalsToPack = srcDetails.params.slice(destDetails.argsIndex, srcLastToPackIndex - suffixLength); + const srcTupleTypes: TupleTypeArgument[] = []; + srcPositionalsToPack.forEach((entry) => { + if (entry.param.category === ParameterCategory.VarArgList) { + if (isUnpackedVariadicTypeVar(entry.type)) { + srcTupleTypes.push({ type: entry.type, isUnbounded: false }); + } else if (isUnpackedClass(entry.type) && entry.type.tupleTypeArguments) { + srcTupleTypes.push(...entry.type.tupleTypeArguments); + } else { + srcTupleTypes.push({ type: entry.type, isUnbounded: true }); + } + } else { + srcTupleTypes.push({ type: entry.type, isUnbounded: false }); + } + }); + + if (srcTupleTypes.length !== 1 || !isVariadicTypeVar(srcTupleTypes[0].type)) { + let srcPositionalsType: Type; + if (tupleClassType && isInstantiableClass(tupleClassType)) { + srcPositionalsType = convertToInstance( + specializeTupleClass( + tupleClassType, + srcTupleTypes, + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true, + /* isUnpackedTuple */ true + ) + ); + } else { + srcPositionalsType = UnknownType.create(); + } + + // Snip out the portion of the source positionals that map to the variadic + // dest parameter and replace it with a single parameter that is typed as a + // tuple containing the individual types of the replaced parameters. + srcDetails.params = [ + ...srcDetails.params.slice(0, destDetails.argsIndex), + { + param: { + category: ParameterCategory.VarArgList, + name: '_arg_combined', + isNameSynthesized: true, + hasDeclaredType: true, + type: srcPositionalsType, + }, + type: srcPositionalsType, + index: -1, + source: ParameterSource.PositionOrKeyword, + }, + ...srcDetails.params.slice( + destDetails.argsIndex + srcPositionalsToPack.length, + srcDetails.params.length + ), + ]; + + const argsIndex = srcDetails.params.findIndex( + (param) => param.param.category === ParameterCategory.VarArgList + ); + srcDetails.argsIndex = argsIndex >= 0 ? argsIndex : undefined; + + const kwargsIndex = srcDetails.params.findIndex( + (param) => param.param.category === ParameterCategory.VarArgDictionary + ); + srcDetails.kwargsIndex = kwargsIndex >= 0 ? kwargsIndex : undefined; + + const firstKeywordOnlyIndex = srcDetails.params.findIndex( + (param) => param.source === ParameterSource.KeywordOnly + ); + srcDetails.firstKeywordOnlyIndex = firstKeywordOnlyIndex >= 0 ? firstKeywordOnlyIndex : undefined; + } + } + function canAssignFunction( destType: FunctionType, srcType: FunctionType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, typeVarMap: TypeVarMap, flags: CanAssignFlags, recursionCount: number @@ -19667,479 +21981,412 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const checkReturnType = (flags & CanAssignFlags.SkipFunctionReturnTypeCheck) === 0; flags &= ~CanAssignFlags.SkipFunctionReturnTypeCheck; - const srcParams = srcType.details.parameters; - const destParams = destType.details.parameters; - - let srcStartOfNamed = srcParams.findIndex( - (p, index) => - p.category === ParameterCategory.VarArgDictionary || - (p.category === ParameterCategory.VarArgList && !p.name) || - (index > 0 && srcParams[index - 1].category === ParameterCategory.VarArgList) - ); - let srcPositionals = srcStartOfNamed < 0 ? srcParams : srcParams.slice(0, srcStartOfNamed); - const srcArgsIndex = srcPositionals.findIndex((p) => p.category === ParameterCategory.VarArgList && p.name); - srcPositionals = srcPositionals.filter((p) => p.category === ParameterCategory.Simple && p.name); - - const destStartOfNamed = destParams.findIndex( - (p, index) => - p.category === ParameterCategory.VarArgDictionary || - (p.category === ParameterCategory.VarArgList && !p.name) || - (index > 0 && destParams[index - 1].category === ParameterCategory.VarArgList) - ); - let destPositionals = destStartOfNamed < 0 ? destParams : destParams.slice(0, destStartOfNamed); - const destArgsIndex = destPositionals.findIndex((p) => p.category === ParameterCategory.VarArgList && p.name); - - // Is there a *args parameter annotated with a TypeVarTuple? - const destVariadicArgsList = - destArgsIndex >= 0 && isVariadicTypeVar(destPositionals[destArgsIndex].type) - ? destPositionals[destArgsIndex] - : undefined; - destPositionals = destPositionals.filter((p) => p.category === ParameterCategory.Simple && p.name); - - // Is there a simple (non-*args) parameter with a TypeVarTuple? If so, - // we may need to pack zero or more source positionals into a tuple. - const destVariadicParamIndex = destPositionals.findIndex( - (p) => p.category === ParameterCategory.Simple && isVariadicTypeVar(p.type) - ); - if (destVariadicParamIndex >= 0) { - const srcPositionalsToPack = srcPositionals.slice( - destVariadicParamIndex, - destVariadicParamIndex + 1 + srcPositionals.length - destPositionals.length - ); - - // Don't try to pack *args parameters. They are not allowed to be matched against - // a variadic type variable. - if (srcArgsIndex < 0) { - const srcTupleTypes: Type[] = srcPositionalsToPack.map((entry) => { - const srcParamIndex = srcParams.findIndex((p) => p === entry); - return FunctionType.getEffectiveParameterType(srcType, srcParamIndex); - }); - - if (srcTupleTypes.length !== 1 || !isVariadicTypeVar(srcTupleTypes[0])) { - let srcPositionalsType: Type; - if (tupleClassType && isInstantiableClass(tupleClassType)) { - srcPositionalsType = convertToInstance( - specializeTupleClass( - tupleClassType, - srcTupleTypes, - /* isTypeArgumentExplicit */ true, - /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true - ) - ); - } else { - srcPositionalsType = UnknownType.create(); - } - - // Snip out the portion of the source positionals that map to the variadic - // dest parameter and replace it with a single parameter that is typed as a - // tuple containing the individual types of the replaced parameters. - srcPositionals = [ - ...srcPositionals.slice(0, destVariadicParamIndex), - { - category: ParameterCategory.Simple, - name: '_arg_combined', - isNameSynthesized: true, - hasDeclaredType: true, - type: srcPositionalsType, - }, - ...srcPositionals.slice( - destVariadicParamIndex + 1 + srcPositionals.length - destPositionals.length, - srcPositionals.length - ), - ]; - } - } - } - - const positionalsToMatch = Math.min(srcPositionals.length, destPositionals.length); - const srcTypeVarMap = new TypeVarMap(getTypeVarScopeId(srcType)); + destType = removeParamSpecVariadicsFromFunction(destType); + srcType = removeParamSpecVariadicsFromFunction(srcType); - const srcKwargsIndex = srcParams.findIndex((p) => p.category === ParameterCategory.VarArgDictionary && p.name); - const destKwargsIndex = destParams.findIndex( - (p) => p.category === ParameterCategory.VarArgDictionary && p.name - ); + const destParamDetails = getParameterListDetails(destType); + const srcParamDetails = getParameterListDetails(srcType); + adjustSourceParamDetailsForDestVariadic(srcParamDetails, destParamDetails); - const destPositionalOnlyIndex = destParams.findIndex((p) => p.category === ParameterCategory.Simple && !p.name); + // The input typeVarMap normally corresponds to the destType, but it + // is reversed if the ReverseTypeVarMatching flag is set. + const destTypeVarMap = + (flags & CanAssignFlags.ReverseTypeVarMatching) === 0 + ? typeVarMap + : new TypeVarMap(getTypeVarScopeId(destType)); + const srcTypeVarMap = + (flags & CanAssignFlags.ReverseTypeVarMatching) !== 0 + ? typeVarMap + : new TypeVarMap(getTypeVarScopeId(srcType)); - const isParamSpecInvolved = + const targetIncludesParamSpec = (flags & CanAssignFlags.ReverseTypeVarMatching) !== 0 ? !!srcType.details.paramSpec : !!destType.details.paramSpec; - if (!FunctionType.shouldSkipParamCompatibilityCheck(destType)) { - // Match positional parameters. - for (let paramIndex = 0; paramIndex < positionalsToMatch; paramIndex++) { - // Find the original index of this source param. If we synthesized it above (for - // a variadic parameter), it may not be found. - const srcParamIndex = srcParams.findIndex((p) => p === srcPositionals[paramIndex]); - const srcParamType = - srcParamIndex >= 0 - ? FunctionType.getEffectiveParameterType(srcType, srcParamIndex) - : srcPositionals[paramIndex].type; - const destParamIndex = destParams.findIndex((p) => p === destPositionals[paramIndex]); - const destParamType = FunctionType.getEffectiveParameterType(destType, destParamIndex); - - const destParamName = destPositionals[paramIndex].name; - const srcParamName = srcPositionals[paramIndex].name || ''; + const destPositionalCount = + destParamDetails.argsIndex ?? destParamDetails.firstKeywordOnlyIndex ?? destParamDetails.params.length; + const srcPositionalCount = + srcParamDetails.argsIndex ?? srcParamDetails.firstKeywordOnlyIndex ?? srcParamDetails.params.length; + const positionalsToMatch = Math.min(destPositionalCount, srcPositionalCount); + + // Match positional parameters. + for (let paramIndex = 0; paramIndex < positionalsToMatch; paramIndex++) { + const destParam = destParamDetails.params[paramIndex]; + const srcParam = srcParamDetails.params[paramIndex]; + + // Find the original index of this source param. If we synthesized it above (for + // a variadic parameter), it may not be found. + const srcParamType = srcParam.type; + const destParamType = destParam.type; + + const destParamName = destParam.param.name ?? ''; + const srcParamName = srcParam.param.name ?? ''; + if (destParamName && !isPrivateOrProtectedName(destParamName) && !isPrivateOrProtectedName(srcParamName)) { + const isDestPositionalOnly = destParam.source === ParameterSource.PositionOnly; if ( - destParamName && - !isPrivateOrProtectedName(destParamName) && - !isPrivateOrProtectedName(srcParamName) + !isDestPositionalOnly && + destParam.param.category !== ParameterCategory.VarArgList && + srcParam.param.category !== ParameterCategory.VarArgList && + destParamName !== srcParamName ) { - const isPositionalOnly = destPositionalOnlyIndex >= 0 && paramIndex < destPositionalOnlyIndex; - if (!isPositionalOnly && destParamName !== srcParamName) { + if (diag) { diag.createAddendum().addMessage( Localizer.DiagnosticAddendum.functionParamName().format({ srcName: srcParamName, destName: destParamName, }) ); - canAssign = false; } + canAssign = false; } + } - // Handle the special case of an overloaded __init__ method whose self - // parameter is annotated. - if ( - paramIndex === 0 && - srcType.details.name === '__init__' && - FunctionType.isInstanceMethod(srcType) && - destType.details.name === '__init__' && - FunctionType.isInstanceMethod(destType) && - FunctionType.isOverloaded(destType) && - destPositionals[paramIndex].hasDeclaredType - ) { - continue; + if (!!destParam.param.hasDefault && !srcParam.param.hasDefault) { + if (diag) { + diag.createAddendum().addMessage( + Localizer.DiagnosticAddendum.functionParamDefaultMissing().format({ + name: srcParamName, + }) + ); } + canAssign = false; + } + + // Handle the special case of an overloaded __init__ method whose self + // parameter is annotated. + if ( + paramIndex === 0 && + srcType.details.name === '__init__' && + FunctionType.isInstanceMethod(srcType) && + destType.details.name === '__init__' && + FunctionType.isInstanceMethod(destType) && + FunctionType.isOverloaded(destType) && + destParam.param.hasDeclaredType + ) { + continue; + } + if ( + !canAssignFunctionParameter( + destParamType, + srcParamType, + paramIndex, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, + flags, + recursionCount + ) + ) { + // Handle the special case where the source parameter is a synthesized + // TypeVar for "self" or "cls". if ( - !canAssignFunctionParameter( - destParamType, - srcParamType, - paramIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) + (flags & CanAssignFlags.SkipSelfClsTypeCheck) === 0 || + !isTypeVar(srcParamType) || + !srcParamType.details.isSynthesized ) { - // Handle the special case where the source parameter is a synthesized - // TypeVar for "self" or "cls". - if (!isTypeVar(srcParamType) || !srcParamType.details.isSynthesized) { - canAssign = false; - } + canAssign = false; } } + } - if (destVariadicArgsList) { - // Package up the remaining source positional parameters - // and assign them to the variadic. - const remainingSrcPositionals = srcPositionals.slice(destPositionals.length).map((param) => param.type); - let isSourceNonVariadicArgs = false; - if (srcArgsIndex >= 0) { - const srcArgsType = FunctionType.getEffectiveParameterType(srcType, srcArgsIndex); - if (isVariadicTypeVar(srcArgsType)) { - remainingSrcPositionals.push(srcArgsType); - } else { - isSourceNonVariadicArgs = true; - } - } + if ( + !FunctionType.shouldSkipArgsKwargsCompatibilityCheck(destType) && + destParamDetails.firstPositionOrKeywordIndex < srcParamDetails.positionOnlyParamCount && + !targetIncludesParamSpec + ) { + if (diag) { + diag.createAddendum().addMessage( + Localizer.DiagnosticAddendum.argsPositionOnly().format({ + expected: srcParamDetails.positionOnlyParamCount, + received: destParamDetails.firstPositionOrKeywordIndex, + }) + ); + } + canAssign = false; + } - let srcPositionalsType: Type; - if (remainingSrcPositionals.length === 1 && isVariadicTypeVar(remainingSrcPositionals[0])) { - // Handle the special case where we're assigning a variadic type - // variable to a variadic type variable. - srcPositionalsType = remainingSrcPositionals[0]; - } else { - if (tupleClassType && isInstantiableClass(tupleClassType)) { - srcPositionalsType = convertToInstance( - specializeTupleClass( - tupleClassType, - remainingSrcPositionals, - /* isTypeArgumentExplicit */ true, - /* stripLiterals */ true, - /* isForUnpackedVariadicTypeVar */ true - ) - ); - } else { - srcPositionalsType = UnknownType.create(); - } - } + if (destPositionalCount < srcPositionalCount) { + // If the dest type includes a ParamSpec, the additional parameters + // can be assigned to it, so no need to report an error here. + if (!targetIncludesParamSpec) { + const nonDefaultSrcParamCount = srcParamDetails.params.filter( + (p) => !!p.param.name && !p.param.hasDefault && p.param.category === ParameterCategory.Simple + ).length; - if (isSourceNonVariadicArgs) { - diag.createAddendum().addMessage( - Localizer.DiagnosticAddendum.argsParamWithVariadic().format({ - paramName: srcParams[srcArgsIndex].name!, - }) - ); - canAssign = false; - } else if ( - !canAssignFunctionParameter( - FunctionType.getEffectiveParameterType(destType, destArgsIndex), - srcPositionalsType, - destArgsIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; - } - } else if (destPositionals.length < srcPositionals.length) { - // If the dest type includes a ParamSpec, the additional parameters - // can be assigned to it, so no need to report an error here. - if (!isParamSpecInvolved) { - const nonDefaultSrcParamCount = srcParams.filter( - (p) => !!p.name && !p.hasDefault && p.category === ParameterCategory.Simple - ).length; - if (destArgsIndex < 0) { - if (destPositionals.length < nonDefaultSrcParamCount) { - if (destPositionalOnlyIndex >= 0 && destPositionalOnlyIndex < srcPositionals.length) { + if (destParamDetails.argsIndex === undefined) { + if (destPositionalCount < nonDefaultSrcParamCount) { + if ( + destParamDetails.firstPositionOrKeywordIndex > 0 && + destParamDetails.firstPositionOrKeywordIndex < srcPositionalCount + ) { + if (diag) { diag.createAddendum().addMessage( Localizer.DiagnosticAddendum.functionTooFewParams().format({ expected: nonDefaultSrcParamCount, - received: destPositionals.length, + received: destPositionalCount, }) ); - canAssign = false; } + canAssign = false; } - } else { - // Make sure the remaining positional arguments are of the - // correct type for the *args parameter. - const destArgsType = FunctionType.getEffectiveParameterType(destType, destArgsIndex); - if (!isAnyOrUnknown(destArgsType)) { - for ( - let paramIndex = destPositionals.length; - paramIndex < srcPositionals.length; - paramIndex++ + } + } else { + // Make sure the remaining positional arguments are of the + // correct type for the *args parameter. + const destArgsType = destParamDetails.params[destParamDetails.argsIndex].type; + if (!isAnyOrUnknown(destArgsType)) { + for (let paramIndex = destPositionalCount; paramIndex < srcPositionalCount; paramIndex++) { + const srcParamType = srcParamDetails.params[paramIndex].type; + if ( + !canAssignFunctionParameter( + destArgsType, + srcParamType, + paramIndex, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, + flags, + recursionCount + ) ) { - const srcParamType = FunctionType.getEffectiveParameterType( - srcType, - srcParams.findIndex((p) => p === srcPositionals[paramIndex]) - ); - if ( - !canAssignFunctionParameter( - destArgsType, - srcParamType, - paramIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; - } + canAssign = false; } } } } - } else if (srcPositionals.length < destPositionals.length) { - if (srcArgsIndex >= 0) { - // Make sure the remaining dest parameters can be assigned to the source - // *args parameter type. - const srcArgsType = FunctionType.getEffectiveParameterType(srcType, srcArgsIndex); - for (let paramIndex = srcPositionals.length; paramIndex < destPositionals.length; paramIndex++) { - const destParamType = FunctionType.getEffectiveParameterType( - destType, - destParams.findIndex((p) => p === destPositionals[paramIndex]) - ); - if (isVariadicTypeVar(destParamType) && !isVariadicTypeVar(srcArgsType)) { + } + } else if (srcPositionalCount < destPositionalCount) { + if (srcParamDetails.argsIndex !== undefined) { + // Make sure the remaining dest parameters can be assigned to the source + // *args parameter type. + const srcArgsType = srcParamDetails.params[srcParamDetails.argsIndex].type; + for (let paramIndex = srcPositionalCount; paramIndex < destPositionalCount; paramIndex++) { + const destParamType = destParamDetails.params[paramIndex].type; + if (isVariadicTypeVar(destParamType) && !isVariadicTypeVar(srcArgsType)) { + if (diag) { diag.addMessage(Localizer.DiagnosticAddendum.typeVarTupleRequiresKnownLength()); - canAssign = false; - } else if ( - !canAssignFunctionParameter( - destParamType, - srcArgsType, - paramIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; } + canAssign = false; + } else if ( + !canAssignFunctionParameter( + destParamType, + srcArgsType, + paramIndex, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, + flags, + recursionCount + ) + ) { + canAssign = false; } - } else { + } + } else { + if (diag) { diag.addMessage( Localizer.DiagnosticAddendum.functionTooManyParams().format({ - expected: srcPositionals.length, - received: destPositionals.length, + expected: srcPositionalCount, + received: destPositionalCount, }) ); - canAssign = false; + } + canAssign = false; + } + } + + // If both src and dest have an "*args" parameter, make sure + // their types are compatible. + if ( + srcParamDetails.argsIndex !== undefined && + destParamDetails.argsIndex !== undefined && + !FunctionType.shouldSkipArgsKwargsCompatibilityCheck(destType) + ) { + let destArgsType = destParamDetails.params[destParamDetails.argsIndex].type; + let srcArgsType = srcParamDetails.params[srcParamDetails.argsIndex].type; + + if (tupleClassType && isInstantiableClass(tupleClassType)) { + if (!isUnpacked(destArgsType)) { + destArgsType = ClassType.cloneForUnpacked( + ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + [{ type: destArgsType, isUnbounded: true }], + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true + ) + ) + ); + } + + if (!isUnpacked(srcArgsType)) { + srcArgsType = ClassType.cloneForUnpacked( + ClassType.cloneAsInstance( + specializeTupleClass( + tupleClassType, + [{ type: srcArgsType, isUnbounded: true }], + /* isTypeArgumentExplicit */ true, + /* stripLiterals */ true + ) + ) + ); } } - // If both src and dest have an "*args" parameter, make sure - // their types are compatible. - if (srcArgsIndex >= 0 && destArgsIndex >= 0) { - const srcArgsType = FunctionType.getEffectiveParameterType(srcType, srcArgsIndex); - const destArgsType = FunctionType.getEffectiveParameterType(destType, destArgsIndex); - if ( - !canAssignFunctionParameter( - destArgsType, - srcArgsType, - destArgsIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; - } + if ( + !canAssignFunctionParameter( + destArgsType, + srcArgsType, + destParamDetails.params[destParamDetails.argsIndex].index, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, + flags, + recursionCount + ) + ) { + canAssign = false; } + } - // If the dest has an "*args" but the source doesn't, report the incompatibility. - // The converse situation is OK. - if (srcArgsIndex < 0 && destArgsIndex >= 0 && !destVariadicArgsList) { + // If the dest has an "*args" but the source doesn't, report the incompatibility. + // The converse situation is OK. + if ( + !FunctionType.shouldSkipArgsKwargsCompatibilityCheck(destType) && + srcParamDetails.argsIndex === undefined && + destParamDetails.argsIndex !== undefined && + !destParamDetails.hasUnpackedVariadicTypeVar && + !targetIncludesParamSpec + ) { + if (diag) { diag.createAddendum().addMessage( Localizer.DiagnosticAddendum.argsParamMissing().format({ - paramName: destParams[destArgsIndex].name!, + paramName: destParamDetails.params[destParamDetails.argsIndex].param.name ?? '', }) ); - canAssign = false; } + canAssign = false; + } - // Handle matching of named (keyword) parameters. - if (!isParamSpecInvolved) { - // Build a dictionary of named parameters in the dest. - const destParamMap = new Map(); - let destHasKwargsParam = false; - if (destStartOfNamed >= 0) { - destParams.forEach((param, index) => { - if (index >= destStartOfNamed) { - if (param.category === ParameterCategory.VarArgDictionary) { - destHasKwargsParam = true; - } else if (param.name && param.category === ParameterCategory.Simple) { - destParamMap.set(param.name, param); - } + // Handle matching of named (keyword) parameters. + if (!targetIncludesParamSpec) { + // Build a dictionary of named parameters in the dest. + const destParamMap = new Map(); + + if (destParamDetails.firstKeywordOnlyIndex !== undefined) { + destParamDetails.params.forEach((param, index) => { + if (index >= destParamDetails.firstKeywordOnlyIndex!) { + if (param.param.name && param.param.category === ParameterCategory.Simple) { + destParamMap.set(param.param.name, param); } - }); - } + } + }); + } - // If the dest has fewer positional arguments than the source, the remaining - // positional arguments in the source can be treated as named arguments. - if (destPositionals.length < srcPositionals.length && destArgsIndex < 0) { - srcStartOfNamed = destPositionals.length; - } + // If the dest has fewer positional arguments than the source, the remaining + // positional arguments in the source can be treated as named arguments. + let srcStartOfNamed = + srcParamDetails.firstKeywordOnlyIndex !== undefined + ? srcParamDetails.firstKeywordOnlyIndex + : srcParamDetails.params.length; + if (destPositionalCount < srcPositionalCount && destParamDetails.argsIndex === undefined) { + srcStartOfNamed = destPositionalCount; + } - if (srcStartOfNamed >= 0) { - srcParams.forEach((srcParam, index) => { - if (index >= srcStartOfNamed) { - if (srcParam.name && srcParam.category === ParameterCategory.Simple) { - const destParam = destParamMap.get(srcParam.name); - const paramDiag = diag.createAddendum(); - if (!destParam) { - if (!destHasKwargsParam && !srcParam.hasDefault) { + if (srcStartOfNamed >= 0) { + srcParamDetails.params.forEach((srcParamInfo, index) => { + if (index >= srcStartOfNamed) { + if (srcParamInfo.param.name && srcParamInfo.param.category === ParameterCategory.Simple) { + const destParamInfo = destParamMap.get(srcParamInfo.param.name); + const paramDiag = diag?.createAddendum(); + const srcParamType = srcParamInfo.type; + + if (!destParamInfo) { + if (destParamDetails.kwargsIndex === undefined && !srcParamInfo.param.hasDefault) { + if (paramDiag) { paramDiag.addMessage( Localizer.DiagnosticAddendum.namedParamMissingInDest().format({ - name: srcParam.name, + name: srcParamInfo.param.name, }) ); - canAssign = false; - } else if (destHasKwargsParam) { - // Make sure we can assign the type to the Kwargs. - const destKwargsType = FunctionType.getEffectiveParameterType( - destType, - destKwargsIndex - ); - if ( - !canAssignFunctionParameter( - destKwargsType, - srcParam.type, - destKwargsIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; - } } - } else { - const specializedDestParamType = typeVarMap - ? applySolvedTypeVars(destParam.type, typeVarMap) - : destParam.type; + canAssign = false; + } else if (destParamDetails.kwargsIndex !== undefined) { + // Make sure we can assign the type to the Kwargs. if ( - !canAssignType( - srcParam.type, - specializedDestParamType, - paramDiag.createAddendum(), - undefined, + !canAssignFunctionParameter( + destParamDetails.params[destParamDetails.kwargsIndex].type, + srcParamType, + destParamDetails.params[destParamDetails.kwargsIndex].index, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { + canAssign = false; + } + } + } else { + const destParamType = destParamInfo.type; + const specializedDestParamType = destTypeVarMap + ? applySolvedTypeVars(destParamType, destTypeVarMap) + : destParamType; + + if ( + !canAssignType( + srcParamType, + specializedDestParamType, + paramDiag?.createAddendum(), + undefined, + flags, + recursionCount + ) + ) { + if (paramDiag) { paramDiag.addMessage( Localizer.DiagnosticAddendum.namedParamTypeMismatch().format({ - name: srcParam.name, + name: srcParamInfo.param.name, sourceType: printType(specializedDestParamType), - destType: printType(srcParam.type), + destType: printType(srcParamType), }) ); - canAssign = false; } - destParamMap.delete(srcParam.name); + canAssign = false; } - } - } - }); - } - // See if there are any unmatched named parameters. - destParamMap.forEach((destParam, paramName) => { - if (srcKwargsIndex >= 0 && destParam.name) { - // Make sure the dest kwargs type is compatible. - const srcKwargsType = FunctionType.getEffectiveParameterType(srcType, srcKwargsIndex); - if ( - !canAssignFunctionParameter( - destParam.type, - srcKwargsType, - destKwargsIndex, - diag.createAddendum(), - typeVarMap, - srcTypeVarMap, - flags, - recursionCount - ) - ) { - canAssign = false; + if (!!destParamInfo.param.hasDefault && !srcParamInfo.param.hasDefault) { + if (diag) { + diag.createAddendum().addMessage( + Localizer.DiagnosticAddendum.functionParamDefaultMissing().format({ + name: srcParamInfo.param.name, + }) + ); + } + canAssign = false; + } + + destParamMap.delete(srcParamInfo.param.name); + } } - destParamMap.delete(destParam.name); - } else { - const paramDiag = diag.createAddendum(); - paramDiag.addMessage( - Localizer.DiagnosticAddendum.namedParamMissingInSource().format({ name: paramName }) - ); - canAssign = false; } }); + } - // If both src and dest have a "*kwargs" parameter, make sure - // their types are compatible. - if (srcKwargsIndex >= 0 && destKwargsIndex >= 0) { - const srcKwargsType = FunctionType.getEffectiveParameterType(srcType, srcKwargsIndex); - const destKwargsType = FunctionType.getEffectiveParameterType(destType, destKwargsIndex); + // See if there are any unmatched named parameters. + destParamMap.forEach((destParamInfo, paramName) => { + if (srcParamDetails.kwargsIndex !== undefined && destParamInfo.param.name) { + // Make sure the src kwargs type is compatible. if ( !canAssignFunctionParameter( - destKwargsType, - srcKwargsType, - destKwargsIndex, - diag.createAddendum(), - typeVarMap, + destParamInfo.param.type, + srcParamDetails.params[srcParamDetails.kwargsIndex].type, + destParamInfo.index, + diag?.createAddendum(), + destTypeVarMap, srcTypeVarMap, flags, recursionCount @@ -20147,30 +22394,89 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions ) { canAssign = false; } + destParamMap.delete(paramName); + } else { + if (diag) { + diag.createAddendum().addMessage( + Localizer.DiagnosticAddendum.namedParamMissingInSource().format({ name: paramName }) + ); + } + canAssign = false; + } + }); + + // If both src and dest have a "*kwargs" parameter, make sure their types are compatible. + if (srcParamDetails.kwargsIndex !== undefined && destParamDetails.kwargsIndex !== undefined) { + if ( + !canAssignFunctionParameter( + destParamDetails.params[destParamDetails.kwargsIndex].type, + srcParamDetails.params[srcParamDetails.kwargsIndex].type, + destParamDetails.params[destParamDetails.kwargsIndex].index, + diag?.createAddendum(), + destTypeVarMap, + srcTypeVarMap, + flags, + recursionCount + ) + ) { + canAssign = false; } + } - // If the dest has a "**kwargs" but the source doesn't, report the incompatibility. - // The converse situation is OK. - if (srcKwargsIndex < 0 && destKwargsIndex >= 0) { + // If the dest has a "**kwargs" but the source doesn't, report the incompatibility. + // The converse situation is OK. + if ( + !FunctionType.shouldSkipArgsKwargsCompatibilityCheck(destType) && + srcParamDetails.kwargsIndex === undefined && + destParamDetails.kwargsIndex !== undefined + ) { + if (diag) { diag.createAddendum().addMessage( Localizer.DiagnosticAddendum.kwargsParamMissing().format({ - paramName: destParams[destKwargsIndex].name!, + paramName: destParamDetails.params[destParamDetails.kwargsIndex].param.name!, }) ); + } + canAssign = false; + } + } + + // If the source and the dest are using the same ParamSpec, any additional + // concatenated parameters must match. + if ( + targetIncludesParamSpec && + srcType.details.paramSpec?.nameWithScope === destType.details.paramSpec?.nameWithScope + ) { + const srcParamCount = srcType.details.parameters.length; + const destParamCount = destType.details.parameters.length; + + if (srcParamCount !== destParamCount) { + // If the dest has an extra position-only parameter separator appended + // to the end of the signature, it's OK. + if ( + srcParamCount !== destParamCount - 1 || + destType.details.parameters[destParamCount - 1].category !== ParameterCategory.Simple || + !!destType.details.parameters[destParamCount - 1].name + ) { canAssign = false; } } } if (typeVarMap && !typeVarMap.isLocked()) { - // If the source function was generic and we solved some of the type variables + const effectiveSrcTypeVarMap = + (flags & CanAssignFlags.ReverseTypeVarMatching) === 0 ? srcTypeVarMap : destTypeVarMap; + + // If the target function was generic and we solved some of the type variables // in that generic type, assign them back to the destination typeVar. - srcTypeVarMap.getTypeVars().forEach((typeVarEntry) => { + effectiveSrcTypeVarMap.getTypeVars().forEach((typeVarEntry) => { canAssignType( typeVarEntry.typeVar, - srcTypeVarMap.getTypeVarType(typeVarEntry.typeVar)!, - new DiagnosticAddendum(), - typeVarMap + effectiveSrcTypeVarMap.getTypeVarType(typeVarEntry.typeVar)!, + /* diag */ undefined, + typeVarMap, + /* flags */ undefined, + recursionCount ); }); @@ -20186,30 +22492,53 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }); // Are we assigning to a function with a ParamSpec? - if (isParamSpecInvolved) { + if (targetIncludesParamSpec) { const effectiveDestType = (flags & CanAssignFlags.ReverseTypeVarMatching) === 0 ? destType : srcType; const effectiveSrcType = (flags & CanAssignFlags.ReverseTypeVarMatching) === 0 ? srcType : destType; if (effectiveDestType.details.paramSpec) { + const requiredMatchParamCount = effectiveDestType.details.parameters.filter((p) => { + if (!p.name) { + return false; + } + if (p.category === ParameterCategory.Simple && isParamSpec(p.type)) { + return false; + } + return true; + }).length; + let matchedParamCount = 0; + const remainingParams: ParamSpecEntry[] = []; + + // If there are parameters in the source that are not matched + // to parameters in the dest, assume these are concatenated on + // to the ParamSpec. + effectiveSrcType.details.parameters.forEach((p, index) => { + if (matchedParamCount < requiredMatchParamCount) { + if (p.name) { + matchedParamCount++; + } + } else if (!p.name && p.category === ParameterCategory.Simple && remainingParams.length === 0) { + // Don't bother pushing a position-only separator if it + // is the first remaining param. + } else { + remainingParams.push({ + category: p.category, + name: p.name, + isNameSynthesized: p.isNameSynthesized, + hasDefault: !!p.hasDefault, + type: FunctionType.getEffectiveParameterType(effectiveSrcType, index), + }); + } + }); + typeVarMap.setParamSpec(effectiveDestType.details.paramSpec, { - concrete: { - parameters: effectiveSrcType.details.parameters - .map((p, index) => { - const paramSpecEntry: ParamSpecEntry = { - category: p.category, - name: p.name, - hasDefault: !!p.hasDefault, - type: FunctionType.getEffectiveParameterType(effectiveSrcType, index), - }; - return paramSpecEntry; - }) - .slice( - // Skip position-only and keyword-only separators. - effectiveDestType.details.parameters.filter((p) => p.name).length, - effectiveSrcType.details.parameters.length - ), - flags: effectiveSrcType.details.flags, - }, + parameters: remainingParams, + typeVarScopeId: effectiveSrcType.details.typeVarScopeId, + docString: effectiveSrcType.details.docString, + flags: effectiveSrcType.details.flags, + paramSpec: effectiveSrcType.details.paramSpec + ? (convertToInstance(effectiveSrcType.details.paramSpec) as TypeVarType) + : undefined, }); } } @@ -20220,11 +22549,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const destReturnType = getFunctionEffectiveReturnType(destType); if (!isAnyOrUnknown(destReturnType)) { const srcReturnType = applySolvedTypeVars(getFunctionEffectiveReturnType(srcType), srcTypeVarMap); - const returnDiag = diag.createAddendum(); + const returnDiag = diag?.createAddendum(); let isReturnTypeCompatible = false; - if (isNoReturnType(srcReturnType)) { + if (isNever(srcReturnType)) { // We'll allow any function that returns NoReturn to match any // function return type, consistent with other type checkers. isReturnTypeCompatible = true; @@ -20232,10 +22561,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( destReturnType, srcReturnType, - returnDiag.createAddendum(), + returnDiag?.createAddendum(), typeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { isReturnTypeCompatible = true; @@ -20244,7 +22573,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // This should also act as a bool, since that's its type at runtime. if ( isClassInstance(srcReturnType) && - ClassType.isBuiltIn(srcReturnType, 'TypeGuard') && + ClassType.isBuiltIn(srcReturnType, ['TypeGuard', 'StrictTypeGuard']) && boolClassType && isInstantiableClass(boolClassType) ) { @@ -20252,10 +22581,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions canAssignType( destReturnType, ClassType.cloneAsInstance(boolClassType), - returnDiag.createAddendum(), + returnDiag?.createAddendum(), typeVarMap, flags, - recursionCount + 1 + recursionCount ) ) { isReturnTypeCompatible = true; @@ -20264,12 +22593,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } if (!isReturnTypeCompatible) { - returnDiag.addMessage( - Localizer.DiagnosticAddendum.functionReturnTypeMismatch().format({ - sourceType: printType(srcReturnType), - destType: printType(destReturnType), - }) - ); + if (returnDiag) { + returnDiag.addMessage( + Localizer.DiagnosticAddendum.functionReturnTypeMismatch().format({ + sourceType: printType(srcReturnType), + destType: printType(destReturnType), + }) + ); + } canAssign = false; } } @@ -20296,7 +22627,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions /* typeArguments */ undefined, /* isTypeArgumentExplicit */ false ), - ClassType.cloneAsInstance(declaredType), + declaredType, typeVarMap, [] ); @@ -20352,9 +22683,13 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (result) { assignedSubtype = ClassType.cloneAsInstance(result); } + } else if (!isTypeVar(declaredSubtype) && isTypeVar(assignedSubtype)) { + // If the source is an unsolved TypeVar but the declared type is concrete, + // use the concrete type. + return declaredSubtype; } else if (isAnyOrUnknown(assignedSubtype)) { // Any or Unknown do not narrow because they're assignable to all types. - return declaredType; + return declaredSubtype; } return assignedSubtype; @@ -20389,6 +22724,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions // If we're overriding an overloaded method, uses the last overload. if (isOverloadedFunction(baseMethod)) { baseMethod = baseMethod.overloads[baseMethod.overloads.length - 1]; + + // If the overloaded method doesn't have an implementation, skip the check. + if (FunctionType.isOverloaded(baseMethod)) { + return true; + } } // If we're overriding a non-method with a method, report it as an error. @@ -20398,30 +22738,25 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return false; } + const baseParamDetails = getParameterListDetails(baseMethod); + const overrideParamDetails = getParameterListDetails(overrideMethod); + let canOverride = true; - const baseParams = baseMethod.details.parameters; - const overrideParams = overrideMethod.details.parameters; - const overrideArgsParam = overrideParams.find( - (param) => param.category === ParameterCategory.VarArgList && !!param.name - ); - const overrideKwargsParam = overrideParams.find( - (param) => param.category === ParameterCategory.VarArgDictionary && !!param.name - ); // Verify that the param count matches exactly or that the override // adds only params that preserve the original signature. let foundParamCountMismatch = false; - if (overrideParams.length < baseParams.length) { - if (!overrideArgsParam || !overrideKwargsParam) { + if (overrideParamDetails.params.length < baseParamDetails.params.length) { + if (overrideParamDetails.argsIndex === undefined && overrideParamDetails.kwargsIndex === undefined) { foundParamCountMismatch = true; } - } else if (overrideParams.length > baseParams.length) { + } else if (overrideParamDetails.params.length > baseParamDetails.params.length) { // Verify that all of the override parameters that extend the // signature are either *args, **kwargs or parameters with // default values. - for (let i = baseParams.length; i < overrideParams.length; i++) { - const overrideParam = overrideParams[i]; + for (let i = baseParamDetails.params.length; i < overrideParamDetails.params.length; i++) { + const overrideParam = overrideParamDetails.params[i].param; if ( overrideParam.category === ParameterCategory.Simple && @@ -20436,17 +22771,14 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions if (foundParamCountMismatch) { diag.addMessage( Localizer.DiagnosticAddendum.overrideParamCount().format({ - baseCount: baseParams.length, - overrideCount: overrideParams.length, + baseCount: baseParamDetails.params.length, + overrideCount: overrideParamDetails.params.length, }) ); canOverride = false; } - const paramCount = Math.min(baseParams.length, overrideParams.length); - const positionOnlyIndex = baseParams.findIndex( - (param) => !param.name && param.category === ParameterCategory.Simple - ); + const paramCount = Math.min(baseParamDetails.params.length, overrideParamDetails.params.length); for (let i = 0; i < paramCount; i++) { // If the first parameter is a "self" or "cls" parameter, skip the @@ -20462,24 +22794,33 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } } - const baseParam = baseParams[i]; - const overrideParam = overrideParams[i]; + const baseParam = baseParamDetails.params[i].param; + const overrideParam = overrideParamDetails.params[i].param; if ( - i > positionOnlyIndex && + i >= baseParamDetails.positionOnlyParamCount && !isPrivateOrProtectedName(baseParam.name || '') && baseParam.category === ParameterCategory.Simple && baseParam.name !== overrideParam.name ) { if (overrideParam.category === ParameterCategory.Simple) { if (enforceParamNames) { - diag.addMessage( - Localizer.DiagnosticAddendum.overrideParamName().format({ - index: i + 1, - baseName: baseParam.name || '*', - overrideName: overrideParam.name || '*', - }) - ); + if (overrideParamDetails.params[i].source === ParameterSource.PositionOnly) { + diag.addMessage( + Localizer.DiagnosticAddendum.overrideParamNamePositionOnly().format({ + index: i + 1, + baseName: baseParam.name || '*', + }) + ); + } else { + diag.addMessage( + Localizer.DiagnosticAddendum.overrideParamName().format({ + index: i + 1, + baseName: baseParam.name || '*', + overrideName: overrideParam.name || '*', + }) + ); + } canOverride = false; } } @@ -20497,7 +22838,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions overrideParamType, baseParamType, diag.createAddendum(), - /* typeVarMap */ undefined, + new TypeVarMap(getTypeVarScopeId(overrideMethod)), CanAssignFlags.SkipSolveTypeVars ) ) { @@ -20521,7 +22862,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions baseReturnType, overrideReturnType, diag.createAddendum(), - /* typeVarMap */ undefined, + new TypeVarMap(getTypeVarScopeId(baseMethod)), CanAssignFlags.SkipSolveTypeVars ) ) { @@ -20539,37 +22880,53 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions } // Validates that the specified source type matches the constraints - // of the type variable. - function canAssignToTypeVar( + // of the type variable. If successful, it returns the constraint + // type that applies. If unsuccessful, it returns undefined. + function applyTypeArgToTypeVar( destType: TypeVarType, srcType: Type, diag: DiagnosticAddendum, flags = CanAssignFlags.Default, recursionCount = 0 - ): boolean { + ): Type | undefined { if (recursionCount > maxTypeRecursionCount) { - return true; + return srcType; } + recursionCount++; if (isAnyOrUnknown(srcType)) { - return true; + return srcType; } let effectiveSrcType: Type = srcType; if (isTypeVar(srcType)) { - if (isTypeSame(srcType, destType)) { - return true; + if ( + isTypeSame( + srcType, + destType, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) + ) { + return srcType; } effectiveSrcType = makeTopLevelTypeVarsConcrete(srcType); } // If there's a bound type, make sure the source is derived from it. - const boundType = destType.details.boundType; - if (boundType) { + if (destType.details.boundType) { if ( - !canAssignType(boundType, effectiveSrcType, diag.createAddendum(), undefined, flags, recursionCount + 1) + !canAssignType( + destType.details.boundType, + effectiveSrcType, + diag.createAddendum(), + undefined, + flags, + recursionCount + ) ) { // Avoid adding a message that will confuse users if the TypeVar was // synthesized for internal purposes. @@ -20577,48 +22934,101 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag.addMessage( Localizer.DiagnosticAddendum.typeBound().format({ sourceType: printType(effectiveSrcType), - destType: printType(boundType), + destType: printType(destType.details.boundType), name: TypeVarType.getReadableName(destType), }) ); } - return false; + return undefined; } } if (destType.details.isParamSpec) { if (isParamSpec(srcType)) { - return true; + return srcType; + } + + if (isFunction(srcType) && FunctionType.isParamSpecValue(srcType)) { + return srcType; + } + + if (isClassInstance(srcType) && ClassType.isBuiltIn(srcType, 'Concatenate')) { + return srcType; } - } else if (isTypeVar(srcType) && srcType.details.isParamSpec) { + + diag.addMessage( + Localizer.DiagnosticAddendum.typeParamSpec().format({ + type: printType(srcType), + name: TypeVarType.getReadableName(destType), + }) + ); + + return undefined; + } + + if (isTypeVar(srcType) && srcType.details.isParamSpec) { diag.addMessage(Localizer.Diagnostic.paramSpecContext()); - return false; - } else { - // If there are no constraints, we're done. - const constraints = destType.details.constraints; - if (constraints.length === 0) { - return true; + return undefined; + } + + // If there are no constraints, we're done. + const constraints = destType.details.constraints; + if (constraints.length === 0) { + return srcType; + } + + if (isTypeVar(srcType) && srcType.details.constraints.length > 0) { + // Make sure all the source constraint types map to constraint types in the dest. + if ( + srcType.details.constraints.every((sourceConstraint) => { + return constraints.some((destConstraint) => + canAssignType( + destConstraint, + sourceConstraint, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ) + ); + }) + ) { + return srcType; } + } else { + let bestConstraintSoFar: Type | undefined; - if (isTypeVar(srcType) && srcType.details.constraints.length > 0) { - // Make sure all the source constraint types map to constraint types in the dest. + // Try to find the best (narrowest) match among the constraints. + for (const constraint of constraints) { if ( - srcType.details.constraints.every((sourceConstraint) => { - return constraints.some((destConstraint) => - canAssignType(destConstraint, sourceConstraint, new DiagnosticAddendum()) - ); - }) + canAssignType( + constraint, + effectiveSrcType, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ) ) { - return true; - } - } else { - // Try to find a match among the constraints. - for (const constraint of constraints) { - if (canAssignType(constraint, effectiveSrcType, new DiagnosticAddendum())) { - return true; + if ( + !bestConstraintSoFar || + canAssignType( + bestConstraintSoFar, + constraint, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ) + ) { + bestConstraintSoFar = constraint; } } } + + if (bestConstraintSoFar) { + return bestConstraintSoFar; + } } diag.addMessage( @@ -20628,7 +23038,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions }) ); - return false; + return undefined; } function getAbstractMethods(classType: ClassType): AbstractMethod[] { @@ -20713,7 +23123,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions memberType, memberClass || ClassType.cloneAsInstantiable(baseObj), errorNode, - recursionCount + 1, + recursionCount, firstParamType || baseObj, /* stripFirstParam */ isClassInstance(baseType) ); @@ -20734,11 +23144,11 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions : baseClass; return partiallySpecializeFunctionForBoundClassOrObject( - baseType, + TypeBase.isInstance(baseType) ? ClassType.cloneAsInstantiable(baseType) : baseType, memberType, memberClass || baseClass, errorNode, - recursionCount + 1, + recursionCount, effectiveFirstParamType, /* stripFirstParam */ true ); @@ -20748,17 +23158,17 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions const baseClass = isInstantiableClass(baseType) ? baseType : ClassType.cloneAsInstantiable(baseType); return partiallySpecializeFunctionForBoundClassOrObject( - baseType, + TypeBase.isInstance(baseType) ? ClassType.cloneAsInstantiable(baseType) : baseType, memberType, memberClass || baseClass, errorNode, - recursionCount + 1, + recursionCount, /* effectiveFirstParamType */ undefined, /* stripFirstParam */ false ); } } else if (isOverloadedFunction(memberType)) { - const newOverloadType = OverloadedFunctionType.create(); + const newOverloadType = OverloadedFunctionType.create([]); memberType.overloads.forEach((overload) => { if (FunctionType.isOverloaded(overload)) { const boundMethod = bindFunctionToClassOrObject( @@ -20766,7 +23176,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions overload, memberClass, /* errorNode */ undefined, - recursionCount + 1, + recursionCount, treatConstructorAsClassMember, firstParamType ); @@ -20788,7 +23198,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions overload, memberClass, errorNode, - recursionCount + 1, + recursionCount, treatConstructorAsClassMember, firstParamType ); @@ -20857,7 +23267,7 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions diag, typeVarMap, /* flags */ undefined, - recursionCount + 1 + recursionCount ) ) { if ( @@ -20943,7 +23353,10 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions fileInfo.fileContents, valueOffset, textValue.length, - parseOptions + parseOptions, + /* parseTextMode */ undefined, + /* initialParenDepth */ undefined, + fileInfo.typingSymbolAliases ); if (parseResults.parseTree) { @@ -20958,6 +23371,19 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions return undefined; } + // Given a code flow node and a constrained TypeVar, determines whether that type + // var can be "narrowed" to a single one of its constraints based on isinstance + // checks within the code flow. + function narrowConstrainedTypeVar(node: ParseNode, typeVar: TypeVarType): Type | undefined { + const flowNode = AnalyzerNodeInfo.getFlowNode(node); + + if (!flowNode) { + return undefined; + } + + return codeFlowEngine.narrowConstrainedTypeVar(flowNode, typeVar); + } + const evaluatorInterface: TypeEvaluator = { runWithCancellationToken, getType, @@ -20966,12 +23392,21 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions getTypeOfClass, getTypeOfFunction, getTypeForExpressionExpectingType, + getExpectedType, + evaluateTypeForSubnode, evaluateTypesForStatement, - getDeclaredTypeForExpression, + evaluateTypesForMatchNode, + evaluateTypesForCaseNode, + evaluateTypeOfParameter, + canBeTruthy, + canBeFalsy, + removeTruthinessFromType, + removeFalsinessFromType, verifyRaiseExceptionType, verifyDeleteExpression, isAfterNodeReachable, isNodeReachable, + isAsymmetricDescriptorAssignment, suppressDiagnostics, getDeclarationsForNameNode, getTypeForDeclaration, @@ -20980,12 +23415,18 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions getTypeFromIterable, getTypeFromIterator, getGetterTypeFromProperty, + getTypeForArgument, markNamesAccessed, getScopeIdForNode, makeTopLevelTypeVarsConcrete, mapSubtypesExpandTypeVars, + populateTypeVarMapBasedOnExpectedType, + lookUpSymbolRecursive, + getDeclaredTypeOfSymbol, getEffectiveTypeOfSymbol, getEffectiveTypeOfSymbolForUsage, + getInferredTypeOfDeclaration, + getDeclaredTypeForExpression, getFunctionDeclaredReturnType, getFunctionInferredReturnType, getBestOverloadForArguments, @@ -20998,23 +23439,30 @@ export function createTypeEvaluator(importLookup: ImportLookup, evaluatorOptions getCallSignatureInfo, getTypeAnnotationForParameter, getAbstractMethods, + narrowConstrainedTypeVar, canAssignType, canOverrideMethod, canAssignProtocolClassToSelf, assignTypeToExpression, getBuiltInObject, getTypingType, + inferReturnTypeIfNecessary, addError, addWarning, addInformation, addUnusedCode, + addDeprecated, addDiagnostic, addDiagnosticForTextRange, printType, printFunctionParts, getTypeCacheSize, useSpeculativeMode, + setTypeForNode, + checkForCancellation, }; + const codeFlowEngine = getCodeFlowEngine(evaluatorInterface, speculativeTypeTracker); + return evaluatorInterface; } diff --git a/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts b/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts index d31a26f8eba3..1cbb1583ade7 100644 --- a/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts +++ b/packages/pyright-internal/src/analyzer/typeEvaluatorTypes.ts @@ -17,17 +17,21 @@ import { ArgumentCategory, ArgumentNode, CallNode, + CaseNode, ClassNode, ExpressionNode, FunctionNode, + MatchNode, NameNode, ParameterCategory, + ParameterNode, ParseNode, RaiseNode, } from '../parser/parseNodes'; import * as DeclarationUtils from './aliasDeclarationUtils'; import { AnalyzerFileInfo } from './analyzerFileInfo'; import { Declaration } from './declaration'; +import { SymbolWithScope } from './scope'; import { Symbol } from './symbol'; import { ClassType, @@ -36,6 +40,7 @@ import { OverloadedFunctionType, Type, TypeCondition, + TypeVarScopeId, TypeVarType, } from './types'; import { CanAssignFlags, ClassMember } from './typeUtils'; @@ -76,7 +81,7 @@ export const enum EvaluatorFlags { // The Generic class type is allowed in this context. It is // normally not allowed if ExpectingType is set. - GenericClassTypeAllowed = 1 << 9, + AllowGenericClassType = 1 << 9, // A type annotation restricts the types of expressions that are // allowed. If this flag is set, illegal type expressions are @@ -96,9 +101,6 @@ export const enum EvaluatorFlags { // the containing function's scope. AssociateTypeVarsWithCurrentScope = 1 << 13, - // Do not emit an error if the symbol is potentially unbound - SkipUnboundCheck = 1 << 14, - // Used for PEP 526-style variable type annotations VariableTypeAnnotation = 1 << 15, @@ -108,6 +110,32 @@ export const enum EvaluatorFlags { // 'ClassVar' is not allowed in this context. ClassVarDisallowed = 1 << 17, + + // 'Generic' cannot be used without type arguments in this context. + DisallowNakedGeneric = 1 << 18, + + // The node is not parsed by the interpreter because it is within + // a comment or a string literal. + NotParsedByInterpreter = 1 << 19, + + // Required and NotRequired are allowed in this context. + RequiredAllowed = 1 << 20, + + // Allow Unpack annotation for a tuple or TypeVarTuple. + AllowUnpackedTupleOrTypeVarTuple = 1 << 21, + + // Even though an expression is enclosed in a string literal, + // the interpreter (within a source file, not a stub) still + // parses the expression and generates parse errors. + InterpreterParsesStringLiteral = 1 << 22, + + // Allow Unpack annotation for TypedDict. + AllowUnpackedTypedDict = 1 << 23, +} + +export interface TypeArgumentResult { + type: Type; + isIncomplete?: boolean | undefined; } export interface TypeResult { @@ -135,6 +163,17 @@ export interface TypeResult { // "super" call can specify a different class or object to // bind. bindToType?: ClassType | TypeVarType | undefined; + + // Indicates that the type comes from a super() call. + isSuperCall?: boolean; + + // Is member a descriptor object that is asymmetric with respect + // to __get__ and __set__ types? + isAsymmetricDescriptor?: boolean; + + // Is the type wrapped in a "Required" or "NotRequired" class? + isRequired?: boolean; + isNotRequired?: boolean; } export interface EvaluatorUsage { @@ -210,6 +249,7 @@ export interface ValidateArgTypeParams { errorNode: ExpressionNode; paramName?: string | undefined; mapsToVarArgList?: boolean | undefined; + expectingType?: boolean; } export interface AnnotationTypeOptions { @@ -218,7 +258,22 @@ export interface AnnotationTypeOptions { allowClassVar?: boolean; associateTypeVarsWithScope?: boolean; allowTypeVarTuple?: boolean; + allowParamSpec?: boolean; disallowRecursiveTypeAlias?: boolean; + allowUnpackedTypedDict?: boolean; + allowUnpackedTuple?: boolean; + notParsedByInterpreter?: boolean; +} + +export interface ExpectedTypeResult { + type: Type; + node: ParseNode; +} + +export interface FunctionResult { + returnType: Type; + argumentErrors: boolean; + isTypeIncomplete: boolean; } export interface TypeEvaluator { @@ -229,15 +284,29 @@ export interface TypeEvaluator { getTypeOfAnnotation: (node: ExpressionNode, options?: AnnotationTypeOptions) => Type; getTypeOfClass: (node: ClassNode) => ClassTypeResult | undefined; getTypeOfFunction: (node: FunctionNode) => FunctionTypeResult | undefined; - getTypeForExpressionExpectingType: (node: ExpressionNode, allowFinal: boolean) => Type; + getTypeForExpressionExpectingType: ( + node: ExpressionNode, + allowFinal?: boolean, + allowRequired?: boolean + ) => TypeResult; + evaluateTypeForSubnode: (subnode: ParseNode, callback: () => void) => TypeResult | undefined; evaluateTypesForStatement: (node: ParseNode) => void; + evaluateTypesForMatchNode: (node: MatchNode) => void; + evaluateTypesForCaseNode: (node: CaseNode) => void; + evaluateTypeOfParameter: (node: ParameterNode) => void; + + canBeTruthy: (type: Type) => boolean; + canBeFalsy: (type: Type) => boolean; + removeTruthinessFromType: (type: Type) => Type; + removeFalsinessFromType: (type: Type) => Type; - getDeclaredTypeForExpression: (expression: ExpressionNode) => Type | undefined; + getExpectedType: (node: ExpressionNode) => ExpectedTypeResult | undefined; verifyRaiseExceptionType: (node: RaiseNode) => void; verifyDeleteExpression: (node: ExpressionNode) => void; isAfterNodeReachable: (node: ParseNode) => boolean; - isNodeReachable: (node: ParseNode) => boolean; + isNodeReachable: (node: ParseNode, sourceNode: ParseNode | undefined) => boolean; + isAsymmetricDescriptorAssignment: (node: ParseNode) => boolean; suppressDiagnostics: (node: ParseNode, callback: () => void) => void; getDeclarationsForNameNode: (node: NameNode) => Declaration[] | undefined; @@ -255,6 +324,7 @@ export interface TypeEvaluator { getTypeFromIterable: (type: Type, isAsync: boolean, errorNode: ParseNode | undefined) => Type | undefined; getTypeFromIterator: (type: Type, isAsync: boolean, errorNode: ParseNode | undefined) => Type | undefined; getGetterTypeFromProperty: (propertyClass: ClassType, inferTypeIfNeeded: boolean) => Type | undefined; + getTypeForArgument: (arg: FunctionArgument) => TypeArgumentResult; markNamesAccessed: (node: ParseNode, names: string[]) => void; getScopeIdForNode: (node: ParseNode) => string; makeTopLevelTypeVarsConcrete: (type: Type) => Type; @@ -263,12 +333,22 @@ export interface TypeEvaluator { conditionFilter: TypeCondition[] | undefined, callback: (expandedSubtype: Type, unexpandedSubtype: Type) => Type | undefined ) => Type; + populateTypeVarMapBasedOnExpectedType: ( + type: ClassType, + expectedType: Type, + typeVarMap: TypeVarMap, + liveTypeVarScopes: TypeVarScopeId[] | undefined + ) => boolean; + lookUpSymbolRecursive: (node: ParseNode, name: string, honorCodeFlow: boolean) => SymbolWithScope | undefined; + getDeclaredTypeOfSymbol: (symbol: Symbol) => Type | undefined; getEffectiveTypeOfSymbol: (symbol: Symbol) => Type; getEffectiveTypeOfSymbolForUsage: ( symbol: Symbol, usageNode?: NameNode, useLastDecl?: boolean ) => EffectiveTypeResult; + getInferredTypeOfDeclaration: (symbol: Symbol, decl: Declaration) => Type | undefined; + getDeclaredTypeForExpression: (expression: ExpressionNode, usage?: EvaluatorUsage) => Type | undefined; getFunctionDeclaredReturnType: (node: FunctionNode) => Type | undefined; getFunctionInferredReturnType: (type: FunctionType, args?: ValidateArgTypeParams[]) => Type; getBestOverloadForArguments: ( @@ -297,18 +377,25 @@ export interface TypeEvaluator { ) => Type | undefined; bindFunctionToClassOrObject: ( baseType: ClassType | undefined, - memberType: FunctionType | OverloadedFunctionType + memberType: FunctionType | OverloadedFunctionType, + memberClass?: ClassType, + errorNode?: ParseNode, + recursionCount?: number, + treatConstructorAsClassMember?: boolean, + firstParamType?: ClassType | TypeVarType ) => FunctionType | OverloadedFunctionType | undefined; getCallSignatureInfo: (node: CallNode, activeIndex: number, activeOrFake: boolean) => CallSignatureInfo | undefined; getTypeAnnotationForParameter: (node: FunctionNode, paramIndex: number) => ExpressionNode | undefined; getAbstractMethods: (classType: ClassType) => AbstractMethod[]; + narrowConstrainedTypeVar: (node: ParseNode, typeVar: TypeVarType) => Type | undefined; canAssignType: ( destType: Type, srcType: Type, - diag: DiagnosticAddendum, + diag?: DiagnosticAddendum, typeVarMap?: TypeVarMap, - flags?: CanAssignFlags + flags?: CanAssignFlags, + recursionCount?: number ) => boolean; canOverrideMethod: ( baseMethod: Type, @@ -325,11 +412,13 @@ export interface TypeEvaluator { ) => void; getBuiltInObject: (node: ParseNode, name: string, typeArguments?: Type[]) => Type; getTypingType: (node: ParseNode, symbolName: string) => Type | undefined; + inferReturnTypeIfNecessary: (type: Type) => void; addError: (message: string, node: ParseNode) => Diagnostic | undefined; addWarning: (message: string, node: ParseNode) => Diagnostic | undefined; addInformation: (message: string, node: ParseNode) => Diagnostic | undefined; addUnusedCode: (node: ParseNode, textRange: TextRange) => void; + addDeprecated: (message: string, node: ParseNode) => void; addDiagnostic: ( diagLevel: DiagnosticLevel, @@ -350,4 +439,7 @@ export interface TypeEvaluator { getTypeCacheSize: () => number; useSpeculativeMode: (speculativeNode: ParseNode, callback: () => T) => T; + setTypeForNode: (node: ParseNode, type?: Type, flags?: EvaluatorFlags) => void; + + checkForCancellation: () => void; } diff --git a/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts b/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts index e720870635e7..f27ff0683715 100644 --- a/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts +++ b/packages/pyright-internal/src/analyzer/typeEvaluatorWithTracker.ts @@ -67,15 +67,23 @@ export function createTypeEvaluatorWithTracker( getTypeOfClass: (n) => run('getTypeOfClass', () => typeEvaluator.getTypeOfClass(n), n), getTypeOfFunction: (n) => run('getTypeOfFunction', () => typeEvaluator.getTypeOfFunction(n), n), getTypeForExpressionExpectingType: typeEvaluator.getTypeForExpressionExpectingType, + evaluateTypeForSubnode: typeEvaluator.evaluateTypeForSubnode, evaluateTypesForStatement: (n) => run('evaluateTypesForStatement', () => typeEvaluator.evaluateTypesForStatement(n), n), - getDeclaredTypeForExpression: (n) => - run('getDeclaredTypeForExpression', () => typeEvaluator.getDeclaredTypeForExpression(n), n), + evaluateTypesForMatchNode: typeEvaluator.evaluateTypesForMatchNode, + evaluateTypesForCaseNode: typeEvaluator.evaluateTypesForCaseNode, + evaluateTypeOfParameter: typeEvaluator.evaluateTypeOfParameter, + canBeTruthy: typeEvaluator.canBeTruthy, + canBeFalsy: typeEvaluator.canBeFalsy, + removeTruthinessFromType: typeEvaluator.removeTruthinessFromType, + removeFalsinessFromType: typeEvaluator.removeFalsinessFromType, + getExpectedType: (n) => run('getExpectedType', () => typeEvaluator.getExpectedType(n), n), verifyRaiseExceptionType: (n) => run('verifyRaiseExceptionType', () => typeEvaluator.verifyRaiseExceptionType(n), n), verifyDeleteExpression: (n) => run('verifyDeleteExpression', () => typeEvaluator.verifyDeleteExpression(n), n), isAfterNodeReachable: (n) => run('isAfterNodeReachable', () => typeEvaluator.isAfterNodeReachable(n), n), - isNodeReachable: (n) => run('isNodeReachable', () => typeEvaluator.isNodeReachable(n), n), + isNodeReachable: (n, s) => run('isNodeReachable', () => typeEvaluator.isNodeReachable(n, s), n), + isAsymmetricDescriptorAssignment: typeEvaluator.isAsymmetricDescriptorAssignment, suppressDiagnostics: (node, callback) => run('suppressDiagnostics', () => typeEvaluator.suppressDiagnostics(node, callback)), getDeclarationsForNameNode: (n) => @@ -91,15 +99,21 @@ export function createTypeEvaluatorWithTracker( run('getTypeFromIterator', () => typeEvaluator.getTypeFromIterator(t, a, e), t), getGetterTypeFromProperty: (p, i) => run('getGetterTypeFromProperty', () => typeEvaluator.getGetterTypeFromProperty(p, i), p), + getTypeForArgument: typeEvaluator.getTypeForArgument, markNamesAccessed: (n, a) => run('markNamesAccessed', () => typeEvaluator.markNamesAccessed(n, a), n), getScopeIdForNode: typeEvaluator.getScopeIdForNode, makeTopLevelTypeVarsConcrete: (t) => run('makeTopLevelTypeVarsConcrete', () => typeEvaluator.makeTopLevelTypeVarsConcrete(t), t), mapSubtypesExpandTypeVars: typeEvaluator.mapSubtypesExpandTypeVars, + populateTypeVarMapBasedOnExpectedType: typeEvaluator.populateTypeVarMapBasedOnExpectedType, + lookUpSymbolRecursive: typeEvaluator.lookUpSymbolRecursive, + getDeclaredTypeOfSymbol: typeEvaluator.getDeclaredTypeOfSymbol, getEffectiveTypeOfSymbol: (s) => run('getEffectiveTypeOfSymbol', () => typeEvaluator.getEffectiveTypeOfSymbol(s), s), getEffectiveTypeOfSymbolForUsage: (s, u, d) => run('getEffectiveTypeOfSymbolForUsage', () => typeEvaluator.getEffectiveTypeOfSymbolForUsage(s, u, d), s), + getInferredTypeOfDeclaration: typeEvaluator.getInferredTypeOfDeclaration, + getDeclaredTypeForExpression: typeEvaluator.getDeclaredTypeForExpression, getFunctionDeclaredReturnType: (n) => run('getFunctionDeclaredReturnType', () => typeEvaluator.getFunctionDeclaredReturnType(n), n), getFunctionInferredReturnType: (t, a) => @@ -110,13 +124,13 @@ export function createTypeEvaluatorWithTracker( getTypeFromObjectMember: typeEvaluator.getTypeFromObjectMember, getBoundMethod: typeEvaluator.getBoundMethod, getTypeFromMagicMethodReturn: typeEvaluator.getTypeFromMagicMethodReturn, - bindFunctionToClassOrObject: (b, m) => - run('bindFunctionToClassOrObject', () => typeEvaluator.bindFunctionToClassOrObject(b, m), m), + bindFunctionToClassOrObject: typeEvaluator.bindFunctionToClassOrObject, getCallSignatureInfo: (n, i, a) => run('getCallSignatureInfo', () => typeEvaluator.getCallSignatureInfo(n, i, a), n), getTypeAnnotationForParameter: (n, p) => run('getTypeAnnotationForParameter', () => typeEvaluator.getTypeAnnotationForParameter(n, p), n), getAbstractMethods: (c) => run('getAbstractMethods', () => typeEvaluator.getAbstractMethods(c), c), + narrowConstrainedTypeVar: typeEvaluator.narrowConstrainedTypeVar, canAssignType: (d, s, a, m, f) => run('canAssignType', () => typeEvaluator.canAssignType(d, s, a, m, f), d), canOverrideMethod: (b, o, d, e) => run('canOverrideMethod', () => typeEvaluator.canOverrideMethod(b, o, d, e), o), @@ -125,10 +139,12 @@ export function createTypeEvaluatorWithTracker( assignTypeToExpression: typeEvaluator.assignTypeToExpression, getBuiltInObject: typeEvaluator.getBuiltInObject, getTypingType: typeEvaluator.getTypingType, + inferReturnTypeIfNecessary: typeEvaluator.inferReturnTypeIfNecessary, addError: (m, n) => run('addError', () => typeEvaluator.addError(m, n), n), addWarning: (m, n) => run('addWarning', () => typeEvaluator.addWarning(m, n), n), addInformation: (m, n) => run('addInformation', () => typeEvaluator.addInformation(m, n), n), addUnusedCode: (n, t) => run('addUnusedCode', () => typeEvaluator.addUnusedCode(n, t), n), + addDeprecated: (m, n) => run('addDeprecated', () => typeEvaluator.addDeprecated(m, n), n), addDiagnostic: (d, r, m, n) => run('addDiagnostic', () => typeEvaluator.addDiagnostic(d, r, m, n), n), addDiagnosticForTextRange: (f, d, r, m, g) => run('addDiagnosticForTextRange', () => typeEvaluator.addDiagnosticForTextRange(f, d, r, m, g)), @@ -136,6 +152,8 @@ export function createTypeEvaluatorWithTracker( printFunctionParts: (t) => run('printFunctionParts', () => typeEvaluator.printFunctionParts(t), t), getTypeCacheSize: typeEvaluator.getTypeCacheSize, useSpeculativeMode: typeEvaluator.useSpeculativeMode, + setTypeForNode: typeEvaluator.setTypeForNode, + checkForCancellation: typeEvaluator.checkForCancellation, }; return withTracker; diff --git a/packages/pyright-internal/src/analyzer/typeGuards.ts b/packages/pyright-internal/src/analyzer/typeGuards.ts index 5acd6fdd9bdc..8be520b14bff 100644 --- a/packages/pyright-internal/src/analyzer/typeGuards.ts +++ b/packages/pyright-internal/src/analyzer/typeGuards.ts @@ -9,12 +9,21 @@ * negative ("else") narrowing cases. */ -import { DiagnosticAddendum } from '../common/diagnostic'; -import { ArgumentCategory, ExpressionNode, ParameterCategory, ParseNodeType } from '../parser/parseNodes'; +import { + ArgumentCategory, + ExpressionNode, + isExpressionNode, + NameNode, + ParameterCategory, + ParseNode, + ParseNodeType, +} from '../parser/parseNodes'; import { KeywordType, OperatorType } from '../parser/tokenizerTypes'; import { getFileInfo } from './analyzerNodeInfo'; -import { FlowCondition, FlowFlags } from './codeFlow'; +import { Declaration, DeclarationType } from './declaration'; import * as ParseTreeUtils from './parseTreeUtils'; +import { ScopeType } from './scope'; +import { getScopeForNode } from './scopeUtils'; import { Symbol, SymbolFlags } from './symbol'; import { getTypedDictMembersForClass } from './typedDicts'; import { EvaluatorFlags, TypeEvaluator } from './typeEvaluatorTypes'; @@ -32,8 +41,10 @@ import { isInstantiableClass, isModule, isNever, - isNone, + isNoneInstance, + isNoneTypeClass, isOverloadedFunction, + isSameWithoutLiteralValue, isTypeSame, isTypeVar, NoneType, @@ -48,46 +59,43 @@ import { } from './types'; import { addConditionToType, - canBeFalsy, - canBeTruthy, + applySolvedTypeVars, ClassMember, computeMroLinearization, convertToInstance, convertToInstantiable, doForEachSubtype, getTypeCondition, + getTypeVarScopeId, isLiteralType, isLiteralTypeOrUnion, - isOpenEndedTupleClass, isTupleClass, + isUnboundedTupleClass, lookUpClassMember, lookUpObjectMember, mapSubtypes, - removeFalsinessFromType, - removeTruthinessFromType, + stripLiteralValue, transformPossibleRecursiveTypeAlias, } from './typeUtils'; +import { TypeVarMap } from './typeVarMap'; export type TypeNarrowingCallback = (type: Type) => Type | undefined; -// Given a reference expression and a flow node, returns a callback that -// can be used to narrow the type described by the target expression. -// If the specified flow node is not associated with the target expression, +// Given a reference expression and a test expression, returns a callback that +// can be used to narrow the type described by the reference expression. +// If the specified flow node is not associated with the test expression, // it returns undefined. export function getTypeNarrowingCallback( evaluator: TypeEvaluator, reference: ExpressionNode, - flowNode: FlowCondition + testExpression: ExpressionNode, + isPositiveTest: boolean ): TypeNarrowingCallback | undefined { - let testExpression = flowNode.expression; - const isPositiveTest = !!(flowNode.flags & (FlowFlags.TrueCondition | FlowFlags.TrueNeverCondition)); - if (testExpression.nodeType === ParseNodeType.AssignmentExpression) { - if (ParseTreeUtils.isMatchingExpression(reference, testExpression.rightExpression)) { - testExpression = testExpression.rightExpression; - } else if (ParseTreeUtils.isMatchingExpression(reference, testExpression.name)) { - testExpression = testExpression.name; - } + return ( + getTypeNarrowingCallback(evaluator, reference, testExpression.rightExpression, isPositiveTest) ?? + getTypeNarrowingCallback(evaluator, reference, testExpression.name, isPositiveTest) + ); } if (testExpression.nodeType === ParseNodeType.BinaryOperation) { @@ -117,49 +125,39 @@ export function getTypeNarrowingCallback( } if (ParseTreeUtils.isMatchingExpression(reference, leftExpression)) { - // Narrow the type by filtering on "None". return (type: Type) => { - const expandedType = mapSubtypes(type, (subtype) => { - return transformPossibleRecursiveTypeAlias(subtype); - }); - return evaluator.mapSubtypesExpandTypeVars( - expandedType, - /* conditionFilter */ undefined, - (subtype, unexpandedSubtype) => { - if (isAnyOrUnknown(subtype)) { - // We need to assume that "Any" is always both None and not None, - // so it matches regardless of whether the test is positive or negative. - return subtype; - } - - // If this is a TypeVar that isn't constrained, use the unexpanded - // TypeVar. For all other cases (including constrained TypeVars), - // use the expanded subtype. - const adjustedSubtype = - isTypeVar(unexpandedSubtype) && unexpandedSubtype.details.constraints.length === 0 - ? unexpandedSubtype - : subtype; - - // See if it's a match for object. - if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'object')) { - return adjIsPositiveTest ? NoneType.createInstance() : adjustedSubtype; - } - - // See if it's a match for None. - if (isNone(subtype) === adjIsPositiveTest) { - return adjustedSubtype; - } - - return undefined; - } - ); + return narrowTypeForIsNone(evaluator, type, adjIsPositiveTest); }; } + + if ( + leftExpression.nodeType === ParseNodeType.Index && + ParseTreeUtils.isMatchingExpression(reference, leftExpression.baseExpression) && + leftExpression.items.length === 1 && + !leftExpression.trailingComma && + leftExpression.items[0].argumentCategory === ArgumentCategory.Simple && + !leftExpression.items[0].name && + leftExpression.items[0].valueExpression.nodeType === ParseNodeType.Number && + leftExpression.items[0].valueExpression.isInteger && + !leftExpression.items[0].valueExpression.isImaginary + ) { + const indexValue = leftExpression.items[0].valueExpression.value; + if (typeof indexValue === 'number') { + return (type: Type) => { + return narrowTupleTypeForIsNone(evaluator, type, adjIsPositiveTest, indexValue); + }; + } + } } // Look for "type(X) is Y" or "type(X) is not Y". if (isOrIsNotOperator && testExpression.leftExpression.nodeType === ParseNodeType.Call) { - const callType = evaluator.getTypeOfExpression(testExpression.leftExpression.leftExpression).type; + const callType = evaluator.getTypeOfExpression( + testExpression.leftExpression.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; + if ( isInstantiableClass(callType) && ClassType.isBuiltIn(callType, 'type') && @@ -168,7 +166,9 @@ export function getTypeNarrowingCallback( ) { const arg0Expr = testExpression.leftExpression.arguments[0].valueExpression; if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { - const classType = evaluator.getTypeOfExpression(testExpression.rightExpression).type; + const classType = evaluator.makeTopLevelTypeVarsConcrete( + evaluator.getTypeOfExpression(testExpression.rightExpression).type + ); if (isInstantiableClass(classType)) { return (type: Type) => { @@ -179,7 +179,7 @@ export function getTypeNarrowingCallback( } } - // Look for "X is Y" or "X is not Y" where Y is a an enum or False or True. + // Look for "X is Y" or "X is not Y" where Y is a an enum or bool literal. if (isOrIsNotOperator) { if (ParseTreeUtils.isMatchingExpression(reference, testExpression.leftExpression)) { const rightType = evaluator.getTypeOfExpression(testExpression.rightExpression).type; @@ -221,6 +221,7 @@ export function getTypeNarrowingCallback( } } + // Look for == X or != X if (ParseTreeUtils.isMatchingExpression(reference, testExpression.rightExpression)) { const leftType = evaluator.getTypeOfExpression(testExpression.leftExpression).type; if (isClassInstance(leftType) && leftType.literalValue !== undefined) { @@ -236,26 +237,6 @@ export function getTypeNarrowingCallback( } } - // Look for X.Y == or X.Y != - if ( - testExpression.leftExpression.nodeType === ParseNodeType.MemberAccess && - ParseTreeUtils.isMatchingExpression(reference, testExpression.leftExpression.leftExpression) - ) { - const rightType = evaluator.getTypeOfExpression(testExpression.rightExpression).type; - const memberName = testExpression.leftExpression.memberName; - if (isClassInstance(rightType) && rightType.literalValue !== undefined) { - return (type: Type) => { - return narrowTypeForDiscriminatedFieldComparison( - evaluator, - type, - memberName.value, - rightType, - adjIsPositiveTest - ); - }; - } - } - // Look for X[] == or X[] != if ( testExpression.leftExpression.nodeType === ParseNodeType.Index && @@ -299,6 +280,81 @@ export function getTypeNarrowingCallback( } } } + + // Look for len(x) == or len(x) != + if ( + equalsOrNotEqualsOperator && + testExpression.leftExpression.nodeType === ParseNodeType.Call && + testExpression.leftExpression.arguments.length === 1 && + testExpression.rightExpression.nodeType === ParseNodeType.Number && + testExpression.rightExpression.isInteger + ) { + const arg0Expr = testExpression.leftExpression.arguments[0].valueExpression; + + if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { + const callType = evaluator.getTypeOfExpression( + testExpression.leftExpression.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; + + if (isFunction(callType) && callType.details.fullName === 'builtins.len') { + const tupleLength = testExpression.rightExpression.value; + + if (typeof tupleLength === 'number') { + return (type: Type) => { + return narrowTypeForTupleLength(evaluator, type, tupleLength, adjIsPositiveTest); + }; + } + } + } + } + + // Look for X.Y == or X.Y != + if ( + equalsOrNotEqualsOperator && + testExpression.leftExpression.nodeType === ParseNodeType.MemberAccess && + ParseTreeUtils.isMatchingExpression(reference, testExpression.leftExpression.leftExpression) + ) { + const rightType = evaluator.getTypeOfExpression(testExpression.rightExpression).type; + const memberName = testExpression.leftExpression.memberName; + if (isClassInstance(rightType) && rightType.literalValue !== undefined) { + return (type: Type) => { + return narrowTypeForDiscriminatedFieldComparison( + evaluator, + type, + memberName.value, + rightType, + adjIsPositiveTest + ); + }; + } + } + + // Look for X.Y is or X.Y is not where is + // an enum or bool literal + if ( + testExpression.leftExpression.nodeType === ParseNodeType.MemberAccess && + ParseTreeUtils.isMatchingExpression(reference, testExpression.leftExpression.leftExpression) + ) { + const rightType = evaluator.getTypeOfExpression(testExpression.rightExpression).type; + const memberName = testExpression.leftExpression.memberName; + if ( + isClassInstance(rightType) && + (ClassType.isEnumClass(rightType) || ClassType.isBuiltIn(rightType, 'bool')) && + rightType.literalValue !== undefined + ) { + return (type: Type) => { + return narrowTypeForDiscriminatedFieldComparison( + evaluator, + type, + memberName.value, + rightType, + adjIsPositiveTest + ); + }; + } + } } if (testExpression.operator === OperatorType.In) { @@ -333,95 +389,121 @@ export function getTypeNarrowingCallback( } if (testExpression.nodeType === ParseNodeType.Call) { - if (testExpression.leftExpression.nodeType === ParseNodeType.Name) { - // Look for "isinstance(X, Y)" or "issubclass(X, Y)". - if ( - (testExpression.leftExpression.value === 'isinstance' || - testExpression.leftExpression.value === 'issubclass') && - testExpression.arguments.length === 2 - ) { - // Make sure the first parameter is a supported expression type - // and the second parameter is a valid class type or a tuple - // of valid class types. - const isInstanceCheck = testExpression.leftExpression.value === 'isinstance'; - const arg0Expr = testExpression.arguments[0].valueExpression; - const arg1Expr = testExpression.arguments[1].valueExpression; - if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { - const arg1Type = evaluator.getTypeOfExpression( - arg1Expr, - undefined, - EvaluatorFlags.EvaluateStringLiteralAsType | - EvaluatorFlags.ParamSpecDisallowed | - EvaluatorFlags.TypeVarTupleDisallowed - ).type; - const classTypeList = getIsInstanceClassTypes(arg1Type); - if (classTypeList) { - return (type: Type) => { - const narrowedType = narrowTypeForIsInstance( - evaluator, - type, - classTypeList, - isInstanceCheck, - isPositiveTest, - /* allowIntersections */ false, - testExpression - ); - if (!isNever(narrowedType)) { - return narrowedType; - } + const callType = evaluator.getTypeOfExpression( + testExpression.leftExpression, + /* expectedType */ undefined, + EvaluatorFlags.DoNotSpecialize + ).type; + + // Look for "isinstance(X, Y)" or "issubclass(X, Y)". + if ( + isFunction(callType) && + (callType.details.builtInName === 'isinstance' || callType.details.builtInName === 'issubclass') && + testExpression.arguments.length === 2 + ) { + // Make sure the first parameter is a supported expression type + // and the second parameter is a valid class type or a tuple + // of valid class types. + const isInstanceCheck = callType.details.builtInName === 'isinstance'; + const arg0Expr = testExpression.arguments[0].valueExpression; + const arg1Expr = testExpression.arguments[1].valueExpression; + if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { + const arg1Type = evaluator.getTypeOfExpression( + arg1Expr, + undefined, + EvaluatorFlags.EvaluateStringLiteralAsType | + EvaluatorFlags.ParamSpecDisallowed | + EvaluatorFlags.TypeVarTupleDisallowed + ).type; - // Try again with intersection types allowed. - return narrowTypeForIsInstance( - evaluator, - type, - classTypeList, - isInstanceCheck, - isPositiveTest, - /* allowIntersections */ true, - testExpression - ); - }; - } - } - } else if (testExpression.leftExpression.value === 'callable' && testExpression.arguments.length === 1) { - const arg0Expr = testExpression.arguments[0].valueExpression; - if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { + const classTypeList = getIsInstanceClassTypes(arg1Type); + + if (classTypeList) { return (type: Type) => { - let narrowedType = narrowTypeForCallable( + const narrowedType = narrowTypeForIsInstance( evaluator, type, + classTypeList, + isInstanceCheck, isPositiveTest, - testExpression, - /* allowIntersections */ false + /* allowIntersections */ false, + testExpression ); - if (isPositiveTest && isNever(narrowedType)) { - // Try again with intersections allowed. - narrowedType = narrowTypeForCallable( - evaluator, - type, - isPositiveTest, - testExpression, - /* allowIntersections */ true - ); + if (!isNever(narrowedType)) { + return narrowedType; } - return narrowedType; + // Try again with intersection types allowed. + return narrowTypeForIsInstance( + evaluator, + type, + classTypeList, + isInstanceCheck, + isPositiveTest, + /* allowIntersections */ true, + testExpression + ); }; } } } - if (testExpression.arguments.length >= 1) { + // Look for "callable(X)" + if ( + isFunction(callType) && + callType.details.builtInName === 'callable' && + testExpression.arguments.length === 1 + ) { const arg0Expr = testExpression.arguments[0].valueExpression; if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { - const functionType = evaluator.getTypeOfExpression(testExpression.leftExpression).type; + return (type: Type) => { + let narrowedType = narrowTypeForCallable( + evaluator, + type, + isPositiveTest, + testExpression, + /* allowIntersections */ false + ); + if (isPositiveTest && isNever(narrowedType)) { + // Try again with intersections allowed. + narrowedType = narrowTypeForCallable( + evaluator, + type, + isPositiveTest, + testExpression, + /* allowIntersections */ true + ); + } + return narrowedType; + }; + } + } + + // Look for "bool(X)" + if ( + isInstantiableClass(callType) && + ClassType.isBuiltIn(callType, 'bool') && + testExpression.arguments.length === 1 && + !testExpression.arguments[0].name + ) { + if (ParseTreeUtils.isMatchingExpression(reference, testExpression.arguments[0].valueExpression)) { + return (type: Type) => { + return narrowTypeForTruthiness(evaluator, type, isPositiveTest); + }; + } + } + + // Look for a TypeGuard function. + if (testExpression.arguments.length >= 1) { + const arg0Expr = testExpression.arguments[0].valueExpression; + if (ParseTreeUtils.isMatchingExpression(reference, arg0Expr)) { // Does this look like it's a custom type guard function? if ( - isFunction(functionType) && - functionType.details.declaredReturnType && - isClassInstance(functionType.details.declaredReturnType) && - ClassType.isBuiltIn(functionType.details.declaredReturnType, 'TypeGuard') + isFunction(callType) && + callType.details.declaredReturnType && + isClassInstance(callType.details.declaredReturnType) && + ClassType.isBuiltIn(callType.details.declaredReturnType, ['TypeGuard', 'StrictTypeGuard']) ) { // Evaluate the type guard call expression. const functionReturnType = evaluator.getTypeOfExpression(testExpression).type; @@ -430,8 +512,20 @@ export function getTypeNarrowingCallback( ClassType.isBuiltIn(functionReturnType, 'bool') && functionReturnType.typeGuardType ) { + const isStrictTypeGuard = ClassType.isBuiltIn( + callType.details.declaredReturnType, + 'StrictTypeGuard' + ); + const typeGuardType = functionReturnType.typeGuardType; + return (type: Type) => { - return isPositiveTest ? functionReturnType.typeGuardType : type; + return narrowTypeForUserDefinedTypeGuard( + evaluator, + type, + typeGuardType, + isPositiveTest, + isStrictTypeGuard + ); }; } } @@ -441,25 +535,252 @@ export function getTypeNarrowingCallback( if (ParseTreeUtils.isMatchingExpression(reference, testExpression)) { return (type: Type) => { - // Narrow the type based on whether the subtype can be true or false. - return mapSubtypes(type, (subtype) => { - if (isPositiveTest) { - if (canBeTruthy(subtype)) { - return removeFalsinessFromType(subtype); - } - } else { - if (canBeFalsy(subtype)) { - return removeTruthinessFromType(subtype); + return narrowTypeForTruthiness(evaluator, type, isPositiveTest); + }; + } + + // Is this a reference to an aliased conditional expression (a local variable + // that was assigned a value that can inform type narrowing of the reference expression)? + if ( + testExpression.nodeType === ParseNodeType.Name && + reference.nodeType === ParseNodeType.Name && + testExpression !== reference + ) { + // Make sure the reference expression is a constant parameter or variable. + // If the reference expression is modified within the scope multiple times, + // we need to validate that it is not modified between the test expression + // evaluation and the conditional check. + const testExprDecl = getDeclsForLocalVar(evaluator, testExpression, testExpression); + if (testExprDecl && testExprDecl.length === 1 && testExprDecl[0].type === DeclarationType.Variable) { + const referenceDecls = getDeclsForLocalVar(evaluator, reference, testExpression); + + if (referenceDecls) { + let modifyingDecls: Declaration[] = []; + + if (referenceDecls.length > 1) { + // If there is more than one assignment to the reference variable within + // the local scope, make sure that none of these assignments are done + // after the test expression but before the condition check. + // + // This is OK: + // val = None + // is_none = val is None + // if is_none: ... + // + // This is not OK: + // val = None + // is_none = val is None + // val = 1 + // if is_none: ... + modifyingDecls = referenceDecls.filter((decl) => { + return ( + evaluator.isNodeReachable(testExpression, decl.node) && + evaluator.isNodeReachable(decl.node, testExprDecl[0].node) + ); + }); + } + + if (modifyingDecls.length === 0) { + const initNode = testExprDecl[0].inferredTypeSource; + + if ( + initNode && + !ParseTreeUtils.isNodeContainedWithin(testExpression, initNode) && + isExpressionNode(initNode) + ) { + return getTypeNarrowingCallback(evaluator, reference, initNode, isPositiveTest); } } - return undefined; - }); - }; + } + } + } + + // We normally won't find a "not" operator here because they are stripped out + // by the binder when it creates condition flow nodes, but we can find this + // in the case of local variables type narrowing. + if (testExpression.nodeType === ParseNodeType.UnaryOperation) { + if (testExpression.operator === OperatorType.Not) { + return getTypeNarrowingCallback(evaluator, reference, testExpression.expression, !isPositiveTest); + } } return undefined; } +// Determines whether the symbol is a local variable or parameter within +// the current scope. +function getDeclsForLocalVar( + evaluator: TypeEvaluator, + name: NameNode, + reachableFrom: ParseNode +): Declaration[] | undefined { + const scope = getScopeForNode(name); + if (scope?.type !== ScopeType.Function && scope?.type !== ScopeType.Module) { + return undefined; + } + + const symbol = scope.lookUpSymbol(name.value); + if (!symbol) { + return undefined; + } + + const decls = symbol.getDeclarations(); + if ( + decls.length === 0 || + decls.some((decl) => decl.type !== DeclarationType.Variable && decl.type !== DeclarationType.Parameter) + ) { + return undefined; + } + + // If there are any assignments within different scopes (e.g. via a "global" or + // "nonlocal" reference), don't consider it a local variable. + let prevDeclScope: ParseNode | undefined; + if ( + decls.some((decl) => { + const nodeToConsider = decl.type === DeclarationType.Parameter ? decl.node.name! : decl.node; + const declScopeNode = ParseTreeUtils.getExecutionScopeNode(nodeToConsider); + if (prevDeclScope && declScopeNode !== prevDeclScope) { + return true; + } + prevDeclScope = declScopeNode; + return false; + }) + ) { + return undefined; + } + + const reachableDecls = decls.filter((decl) => evaluator.isNodeReachable(reachableFrom, decl.node)); + + return reachableDecls.length > 0 ? reachableDecls : undefined; +} + +function narrowTypeForUserDefinedTypeGuard( + evaluator: TypeEvaluator, + type: Type, + typeGuardType: Type, + isPositiveTest: boolean, + isStrictTypeGuard: boolean +): Type { + // For non-strict type guards, always narrow to the typeGuardType + // in the positive case and don't narrow in the negative case. + if (!isStrictTypeGuard) { + return isPositiveTest ? typeGuardType : type; + } + + // For strict type guards, narrow the current type. + return mapSubtypes(type, (subtype) => { + return mapSubtypes(typeGuardType, (typeGuardSubtype) => { + const isSubType = evaluator.canAssignType(typeGuardType, subtype); + const isSuperType = evaluator.canAssignType(subtype, typeGuardSubtype); + + if (isPositiveTest) { + if (isSubType) { + return subtype; + } else if (isSuperType) { + return typeGuardSubtype; + } + } else { + if (!isSubType && !isSubType) { + return subtype; + } + } + + return undefined; + }); + }); +} + +// Narrow the type based on whether the subtype can be true or false. +function narrowTypeForTruthiness(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean) { + return mapSubtypes(type, (subtype) => { + if (isPositiveTest) { + if (evaluator.canBeTruthy(subtype)) { + return evaluator.removeFalsinessFromType(subtype); + } + } else { + if (evaluator.canBeFalsy(subtype)) { + return evaluator.removeTruthinessFromType(subtype); + } + } + return undefined; + }); +} + +// Handle type narrowing for expressions of the form "a[I] is None" and "a[I] is not None" where +// I is an integer and a is a union of Tuples with known lengths and entry types. +function narrowTupleTypeForIsNone(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean, indexValue: number) { + return evaluator.mapSubtypesExpandTypeVars(type, /* conditionFilter */ undefined, (subtype) => { + if ( + !isClassInstance(subtype) || + !isTupleClass(subtype) || + isUnboundedTupleClass(subtype) || + !subtype.tupleTypeArguments + ) { + return subtype; + } + + const tupleLength = subtype.tupleTypeArguments.length; + if (indexValue < 0 || indexValue >= tupleLength) { + return subtype; + } + + const typeOfEntry = evaluator.makeTopLevelTypeVarsConcrete(subtype.tupleTypeArguments[indexValue].type); + + if (isPositiveTest) { + if (!evaluator.canAssignType(typeOfEntry, NoneType.createInstance())) { + return undefined; + } + } else { + if (isNoneInstance(typeOfEntry)) { + return undefined; + } + } + + return subtype; + }); +} + +// Handle type narrowing for expressions of the form "x is None" and "x is not None". +function narrowTypeForIsNone(evaluator: TypeEvaluator, type: Type, isPositiveTest: boolean) { + const expandedType = mapSubtypes(type, (subtype) => { + return transformPossibleRecursiveTypeAlias(subtype); + }); + + return evaluator.mapSubtypesExpandTypeVars( + expandedType, + /* conditionFilter */ undefined, + (subtype, unexpandedSubtype) => { + if (isAnyOrUnknown(subtype)) { + // We need to assume that "Any" is always both None and not None, + // so it matches regardless of whether the test is positive or negative. + return subtype; + } + + // If this is a TypeVar that isn't constrained, use the unexpanded + // TypeVar. For all other cases (including constrained TypeVars), + // use the expanded subtype. + const adjustedSubtype = + isTypeVar(unexpandedSubtype) && unexpandedSubtype.details.constraints.length === 0 + ? unexpandedSubtype + : subtype; + + // See if it's a match for object. + if (isClassInstance(subtype) && ClassType.isBuiltIn(subtype, 'object')) { + return isPositiveTest + ? addConditionToType(NoneType.createInstance(), subtype.condition) + : adjustedSubtype; + } + + // See if it's a match for None. + if (isNoneInstance(subtype) === isPositiveTest) { + return subtype; + } + + return undefined; + } + ); +} + // The "isinstance" and "issubclass" calls support two forms - a simple form // that accepts a single class, and a more complex form that accepts a tuple // of classes. This method determines which form and returns a list of classes @@ -474,7 +795,7 @@ function getIsInstanceClassTypes(argType: Type): (ClassType | TypeVarType | None types.forEach((subtype) => { if (isInstantiableClass(subtype) || (isTypeVar(subtype) && TypeBase.isInstantiable(subtype))) { classTypeList.push(subtype); - } else if (isNone(subtype) && TypeBase.isInstantiable(subtype)) { + } else if (isNoneTypeClass(subtype)) { classTypeList.push(subtype); } else if ( isFunction(subtype) && @@ -492,9 +813,7 @@ function getIsInstanceClassTypes(argType: Type): (ClassType | TypeVarType | None doForEachSubtype(argType, (subtype) => { if (isClass(subtype) && TypeBase.isInstance(subtype) && isTupleClass(subtype)) { if (subtype.tupleTypeArguments) { - addClassTypesToList( - isOpenEndedTupleClass(subtype) ? [subtype.tupleTypeArguments[0]] : subtype.tupleTypeArguments - ); + addClassTypesToList(subtype.tupleTypeArguments.map((t) => t.type)); } } else { addClassTypesToList([subtype]); @@ -550,13 +869,13 @@ function narrowTypeForIsInstance( (ClassType.isDerivedFrom(varType, concreteFilterType) || (isInstanceCheck && ClassType.isProtocolClass(concreteFilterType) && - evaluator.canAssignType(concreteFilterType, varType, new DiagnosticAddendum())) || + evaluator.canAssignType(concreteFilterType, varType)) || (ClassType.isBuiltIn(concreteFilterType, 'dict') && ClassType.isTypedDictClass(varType))); const filterIsSubclass = ClassType.isDerivedFrom(concreteFilterType, varType) || (isInstanceCheck && ClassType.isProtocolClass(varType) && - evaluator.canAssignType(varType, concreteFilterType, new DiagnosticAddendum())); + evaluator.canAssignType(varType, concreteFilterType)); if (filterIsSuperclass) { foundSuperclass = true; @@ -583,7 +902,41 @@ function narrowTypeForIsInstance( } else if (filterIsSubclass) { // If the variable type is a superclass of the isinstance // filter, we can narrow the type to the subclass. - filteredTypes.push(addConditionToType(filterType, constraints)); + let specializedFilterType = filterType; + + // Try to retain the type arguments for the filter type. This is + // important because a specialized version of the filter cannot + // be passed to isinstance or issubclass. + if (isClass(filterType)) { + if ( + ClassType.isSpecialBuiltIn(filterType) || + filterType.details.typeParameters.length > 0 + ) { + const typeVarMap = new TypeVarMap(getTypeVarScopeId(filterType)); + const unspecializedFilterType = ClassType.cloneForSpecialization( + filterType, + /* typeArguments */ undefined, + /* isTypeArgumentExplicit */ false + ); + + if ( + evaluator.populateTypeVarMapBasedOnExpectedType( + unspecializedFilterType, + varType, + typeVarMap, + /* liveTypeVarScopes */ undefined + ) + ) { + specializedFilterType = applySolvedTypeVars( + unspecializedFilterType, + typeVarMap, + /* unknownIfNotFound */ true + ) as ClassType; + } + } + } + + filteredTypes.push(addConditionToType(specializedFilterType, constraints)); } else if (allowIntersections) { // The two types appear to have no relation. It's possible that the // two types are protocols or the program is expecting one type to @@ -592,7 +945,7 @@ function narrowTypeForIsInstance( // the two types. const className = ``; const fileInfo = getFileInfo(errorNode); - const newClassType = ClassType.createInstantiable( + let newClassType = ClassType.createInstantiable( className, ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), fileInfo.moduleName, @@ -606,6 +959,22 @@ function narrowTypeForIsInstance( newClassType.details.baseClasses = [ClassType.cloneAsInstantiable(varType), concreteFilterType]; computeMroLinearization(newClassType); + newClassType = addConditionToType(newClassType, concreteFilterType.condition) as ClassType; + + if ( + isTypeVar(unexpandedType) && + !unexpandedType.details.isParamSpec && + unexpandedType.details.constraints.length === 0 + ) { + newClassType = addConditionToType(newClassType, [ + { + typeVarName: TypeVarType.getNameWithScope(unexpandedType), + constraintIndex: 0, + isConstrainedTypeVar: false, + }, + ]) as ClassType; + } + filteredTypes.push(isInstanceCheck ? ClassType.cloneAsInstance(newClassType) : newClassType); } } @@ -692,7 +1061,7 @@ function narrowTypeForIsInstance( for (const filterType of classTypeList) { const concreteFilterType = evaluator.makeTopLevelTypeVarsConcrete(filterType); - if (evaluator.canAssignType(varType, convertToInstance(concreteFilterType), new DiagnosticAddendum())) { + if (evaluator.canAssignType(varType, convertToInstance(concreteFilterType))) { // If the filter type is a Callable, use the original type. If the // filter type is a callback protocol, use the filter type. if (isFunction(filterType)) { @@ -703,13 +1072,16 @@ function narrowTypeForIsInstance( } } } else if ( - !classTypeList.some((filterType) => - evaluator.canAssignType( - varType, - convertToInstance(evaluator.makeTopLevelTypeVarsConcrete(filterType)), - new DiagnosticAddendum() - ) - ) + !classTypeList.some((filterType) => { + // If the filter type is a runtime checkable protocol class, it can + // be used in an instance check. + const concreteFilterType = evaluator.makeTopLevelTypeVarsConcrete(filterType); + if (isClass(concreteFilterType) && !ClassType.isProtocolClass(concreteFilterType)) { + return false; + } + + return evaluator.canAssignType(varType, convertToInstance(concreteFilterType)); + }) ) { filteredTypes.push(unexpandedType); } @@ -749,8 +1121,8 @@ function narrowTypeForIsInstance( } if (isInstanceCheck) { - if (isNone(subtype)) { - const containsNoneType = classTypeList.some((t) => isNone(t) && TypeBase.isInstantiable(t)); + if (isNoneInstance(subtype)) { + const containsNoneType = classTypeList.some((t) => isNoneTypeClass(t)); if (isPositiveTest) { return containsNoneType ? subtype : undefined; } else { @@ -844,6 +1216,31 @@ function narrowTypeForIsInstance( return filteredType; } +// Attempts to narrow a union of tuples based on their known length. +function narrowTypeForTupleLength( + evaluator: TypeEvaluator, + referenceType: Type, + lengthValue: number, + isPositiveTest: boolean +) { + return mapSubtypes(referenceType, (subtype) => { + const concreteSubtype = evaluator.makeTopLevelTypeVarsConcrete(subtype); + + // If it's not a tuple, we can't narrow it. + if ( + !isClassInstance(concreteSubtype) || + !isTupleClass(concreteSubtype) || + isUnboundedTupleClass(concreteSubtype) || + !concreteSubtype.tupleTypeArguments + ) { + return subtype; + } + + const tupleLengthMatches = concreteSubtype.tupleTypeArguments.length === lengthValue; + return tupleLengthMatches === isPositiveTest ? subtype : undefined; + }); +} + // Attempts to narrow a type (make it more constrained) based on an "in" or // "not in" binary expression. function narrowTypeForContains(evaluator: TypeEvaluator, referenceType: Type, containerType: Type) { @@ -854,7 +1251,7 @@ function narrowTypeForContains(evaluator: TypeEvaluator, referenceType: Type, co const builtInName = containerType.details.name; - if (!['list', 'set', 'frozenset', 'deque'].some((name) => name === builtInName)) { + if (!['list', 'set', 'frozenset', 'deque', 'tuple'].some((name) => name === builtInName)) { return referenceType; } @@ -862,22 +1259,34 @@ function narrowTypeForContains(evaluator: TypeEvaluator, referenceType: Type, co return referenceType; } - const typeArg = containerType.typeArguments[0]; + let elementType = containerType.typeArguments[0]; + if (isTupleClass(containerType) && containerType.tupleTypeArguments) { + elementType = combineTypes(containerType.tupleTypeArguments.map((t) => t.type)); + } + let canNarrow = true; + const elementTypeWithoutLiteral = stripLiteralValue(elementType); - const narrowedType = mapSubtypes(referenceType, (subtype) => { - if (isAnyOrUnknown(subtype)) { + const narrowedType = mapSubtypes(referenceType, (referenceSubtype) => { + if (isAnyOrUnknown(referenceSubtype)) { canNarrow = false; - return subtype; + return referenceSubtype; } - if (!evaluator.canAssignType(typeArg, subtype, new DiagnosticAddendum())) { - // If the reference type isn't assignable to the element type, we will - // assume that the __contains__ method will return false. - return undefined; + if (evaluator.canAssignType(elementType, referenceSubtype)) { + return referenceSubtype; } - return subtype; + if (evaluator.canAssignType(elementTypeWithoutLiteral, referenceSubtype)) { + return mapSubtypes(elementType, (elementSubtype) => { + if (isClassInstance(elementSubtype) && isSameWithoutLiteralValue(referenceSubtype, elementSubtype)) { + return elementSubtype; + } + return undefined; + }); + } + + return undefined; }); return canNarrow ? narrowedType : referenceType; @@ -964,13 +1373,9 @@ function narrowTypeForDiscriminatedDictEntryComparison( if (tdEntry && isLiteralTypeOrUnion(tdEntry.valueType)) { if (isPositiveTest) { - return evaluator.canAssignType(tdEntry.valueType, literalType, new DiagnosticAddendum()) - ? subtype - : undefined; + return evaluator.canAssignType(tdEntry.valueType, literalType) ? subtype : undefined; } else { - return evaluator.canAssignType(literalType, tdEntry.valueType, new DiagnosticAddendum()) - ? undefined - : subtype; + return evaluator.canAssignType(literalType, tdEntry.valueType) ? undefined : subtype; } } } @@ -992,19 +1397,20 @@ function narrowTypeForDiscriminatedTupleComparison( let canNarrow = true; const narrowedType = mapSubtypes(referenceType, (subtype) => { - if (isClassInstance(subtype) && ClassType.isTupleClass(subtype) && !isOpenEndedTupleClass(subtype)) { - const indexValue = indexLiteralType.literalValue as number; + if ( + isClassInstance(subtype) && + ClassType.isTupleClass(subtype) && + !isUnboundedTupleClass(subtype) && + typeof indexLiteralType.literalValue === 'number' + ) { + const indexValue = indexLiteralType.literalValue; if (subtype.tupleTypeArguments && indexValue >= 0 && indexValue < subtype.tupleTypeArguments.length) { - const tupleEntryType = subtype.tupleTypeArguments[indexValue]; + const tupleEntryType = subtype.tupleTypeArguments[indexValue]?.type; if (tupleEntryType && isLiteralTypeOrUnion(tupleEntryType)) { if (isPositiveTest) { - return evaluator.canAssignType(tupleEntryType, literalType, new DiagnosticAddendum()) - ? subtype - : undefined; + return evaluator.canAssignType(tupleEntryType, literalType) ? subtype : undefined; } else { - return evaluator.canAssignType(literalType, tupleEntryType, new DiagnosticAddendum()) - ? undefined - : subtype; + return evaluator.canAssignType(literalType, tupleEntryType) ? undefined : subtype; } } } @@ -1027,8 +1433,6 @@ function narrowTypeForDiscriminatedFieldComparison( literalType: ClassType, isPositiveTest: boolean ): Type { - let canNarrow = true; - const narrowedType = mapSubtypes(referenceType, (subtype) => { let memberInfo: ClassMember | undefined; if (isClassInstance(subtype)) { @@ -1042,22 +1446,17 @@ function narrowTypeForDiscriminatedFieldComparison( if (isLiteralTypeOrUnion(memberType)) { if (isPositiveTest) { - return evaluator.canAssignType(memberType, literalType, new DiagnosticAddendum()) - ? subtype - : undefined; + return evaluator.canAssignType(memberType, literalType) ? subtype : undefined; } else { - return evaluator.canAssignType(literalType, memberType, new DiagnosticAddendum()) - ? undefined - : subtype; + return evaluator.canAssignType(literalType, memberType) ? undefined : subtype; } } } - canNarrow = false; return subtype; }); - return canNarrow ? narrowedType : referenceType; + return narrowedType; } // Attempts to narrow a type based on a "type(x) is y" or "type(x) is not y" check. @@ -1085,8 +1484,10 @@ function narrowTypeForTypeIs(type: Type, classType: ClassType, isPositiveTest: b // in which case `type(x) is y` would fail. return subtype; } - } else if (isNone(subtype)) { + } else if (isNoneInstance(subtype)) { return isPositiveTest ? undefined : subtype; + } else if (isAnyOrUnknown(subtype)) { + return isPositiveTest ? ClassType.cloneAsInstance(classType) : subtype; } return subtype; @@ -1118,14 +1519,14 @@ function narrowTypeForLiteralComparison( // If we're able to enumerate all possible literal values // (for bool or enum), we can eliminate all others in a negative test. const allLiteralTypes = enumerateLiteralsForType(evaluator, subtype); - if (allLiteralTypes) { + if (allLiteralTypes && allLiteralTypes.length > 0) { return combineTypes( allLiteralTypes.filter((type) => !ClassType.isLiteralValueSame(type, literalType)) ); } } } else if (isPositiveTest) { - if (isIsOperator || isNone(subtype)) { + if (isIsOperator || isNoneInstance(subtype)) { return undefined; } } @@ -1134,7 +1535,7 @@ function narrowTypeForLiteralComparison( }); } -function enumerateLiteralsForType(evaluator: TypeEvaluator, type: ClassType): ClassType[] | undefined { +export function enumerateLiteralsForType(evaluator: TypeEvaluator, type: ClassType): ClassType[] | undefined { if (ClassType.isBuiltIn(type, 'bool')) { // Booleans have only two types: True and False. return [ @@ -1147,8 +1548,8 @@ function enumerateLiteralsForType(evaluator: TypeEvaluator, type: ClassType): Cl // Enumerate all of the values in this enumeration. const enumList: ClassType[] = []; const fields = type.details.fields; - fields.forEach((symbol, name) => { - if (!symbol.isIgnoredForProtocolMatch() && !symbol.isInstanceMember()) { + fields.forEach((symbol) => { + if (!symbol.isIgnoredForProtocolMatch()) { const symbolType = evaluator.getEffectiveTypeOfSymbol(symbol); if ( isClassInstance(symbolType) && @@ -1207,7 +1608,7 @@ function narrowTypeForCallable( // new intersection type. const className = ``; const fileInfo = getFileInfo(errorNode); - const newClassType = ClassType.createInstantiable( + let newClassType = ClassType.createInstantiable( className, ParseTreeUtils.getClassFullName(errorNode, fileInfo.moduleName, className), fileInfo.moduleName, @@ -1221,6 +1622,8 @@ function narrowTypeForCallable( newClassType.details.baseClasses = [ClassType.cloneAsInstantiable(subtype)]; computeMroLinearization(newClassType); + newClassType = addConditionToType(newClassType, subtype.condition) as ClassType; + // Add a __call__ method to the new class. const callMethod = FunctionType.createInstance( '__call__', diff --git a/packages/pyright-internal/src/analyzer/typePrinter.ts b/packages/pyright-internal/src/analyzer/typePrinter.ts index 1cfc543d2af1..6cad3d3fafa7 100644 --- a/packages/pyright-internal/src/analyzer/typePrinter.ts +++ b/packages/pyright-internal/src/analyzer/typePrinter.ts @@ -17,16 +17,21 @@ import { isClassInstance, isInstantiableClass, isNever, + isNoneInstance, isParamSpec, + isTypeSame, + isTypeVar, + isUnpacked, isVariadicTypeVar, maxTypeRecursionCount, removeNoneFromUnion, + TupleTypeArgument, Type, TypeBase, TypeCategory, TypeVarType, } from './types'; -import { doForEachSubtype, isOptionalType, isTupleClass } from './typeUtils'; +import { convertToInstance, doForEachSubtype, isTupleClass } from './typeUtils'; const singleTickRegEx = /'/g; const escapedDoubleQuoteRegEx = /\\"/g; @@ -53,9 +58,12 @@ export const enum PrintTypeFlags { // Expand type aliases to display their individual parts? ExpandTypeAlias = 1 << 5, - // Add "*" for types that are conditionally constrained when + // Omit "*" for types that are conditionally constrained when // used with constrained TypeVars. OmitConditionalConstraint = 1 << 6, + + // Include a parentheses around a callable. + ParenthesizeCallable = 1 << 7, } export type FunctionReturnTypeCallback = (type: FunctionType) => Type; @@ -67,7 +75,8 @@ export function printType( recursionTypes: Type[] = [] ): string { const parenthesizeUnion = (printTypeFlags & PrintTypeFlags.ParenthesizeUnion) !== 0; - printTypeFlags &= ~PrintTypeFlags.ParenthesizeUnion; + const parenthesizeCallable = (printTypeFlags & PrintTypeFlags.ParenthesizeCallable) !== 0; + printTypeFlags &= ~(PrintTypeFlags.ParenthesizeUnion | PrintTypeFlags.ParenthesizeCallable); // If this is a type alias, see if we should use its name rather than // the type it represents. @@ -111,7 +120,12 @@ export function printType( ) { typeArg.tupleTypeArguments.forEach((tupleTypeArg) => { argumentStrings!.push( - printType(tupleTypeArg, printTypeFlags, returnTypeCallback, recursionTypes) + printType( + tupleTypeArg.type, + printTypeFlags, + returnTypeCallback, + recursionTypes + ) ); }); } else { @@ -155,7 +169,24 @@ export function printType( } } - if (recursionTypes.find((t) => t === type) || recursionTypes.length > maxTypeRecursionCount) { + if ( + recursionTypes.find( + (t) => + t === type || + (t.typeAliasInfo !== undefined && t.typeAliasInfo.fullName === type.typeAliasInfo?.fullName) + ) || + recursionTypes.length > maxTypeRecursionCount + ) { + // If this is a recursive TypeVar, we've already expanded it once, so + // just print its name at this point. + if (isTypeVar(type) && type.details.isSynthesized && type.details.recursiveTypeAliasName) { + return type.details.recursiveTypeAliasName; + } + + if (type.typeAliasInfo) { + return type.typeAliasInfo.name; + } + return '...'; } @@ -210,24 +241,17 @@ export function printType( // If it's a Callable with a ParamSpec, use the // Callable notation. const parts = printFunctionParts(type, printTypeFlags, returnTypeCallback, recursionTypes); - if (type.details.paramSpec) { - if (type.details.parameters.length > 0) { - // Remove the args and kwargs parameters from the end. - const paramTypes = type.details.parameters.map((param) => - printType(param.type, printTypeFlags, returnTypeCallback, recursionTypes) - ); - return `Callable[Concatenate[${paramTypes.join(', ')}, ${TypeVarType.getReadableName( - type.details.paramSpec - )}], ${parts[1]}]`; - } - return `Callable[${TypeVarType.getReadableName(type.details.paramSpec)}, ${parts[1]}]`; - } - const paramSignature = `(${parts[0].join(', ')})`; if (FunctionType.isParamSpecValue(type)) { return paramSignature; } - return `${paramSignature} -> ${parts[1]}`; + const fullSignature = `${paramSignature} -> ${parts[1]}`; + + if (parenthesizeCallable) { + return `(${fullSignature})`; + } + + return fullSignature; } case TypeCategory.OverloadedFunction: { @@ -239,13 +263,80 @@ export function printType( } case TypeCategory.Union: { - if (isOptionalType(type)) { + // Allocate a set that refers to subtypes in the union by + // their indices. If the index is within the set, it is already + // accounted for in the output. + const subtypeHandledSet = new Set(); + + // Allocate another set that represents the textual representations + // of the subtypes in the union. + const subtypeStrings = new Set(); + + // If we're using "|" notation, enclose callable subtypes in parens. + const updatedPrintTypeFlags = + printTypeFlags & PrintTypeFlags.PEP604 + ? printTypeFlags | PrintTypeFlags.ParenthesizeCallable + : printTypeFlags; + + // Start by matching possible type aliases to the subtypes. + if ((printTypeFlags & PrintTypeFlags.ExpandTypeAlias) === 0 && type.typeAliasSources) { + for (const typeAliasSource of type.typeAliasSources) { + let matchedAllSubtypes = true; + let allSubtypesPreviouslyHandled = true; + const indicesCoveredByTypeAlias = new Set(); + + for (const sourceSubtype of typeAliasSource.subtypes) { + let unionSubtypeIndex = 0; + let foundMatch = false; + + for (const unionSubtype of type.subtypes) { + if ( + isTypeSame( + sourceSubtype, + unionSubtype, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ true + ) + ) { + if (!subtypeHandledSet.has(unionSubtypeIndex)) { + allSubtypesPreviouslyHandled = false; + } + indicesCoveredByTypeAlias.add(unionSubtypeIndex); + foundMatch = true; + break; + } + + unionSubtypeIndex++; + } + + if (!foundMatch) { + matchedAllSubtypes = false; + break; + } + } + + if (matchedAllSubtypes && !allSubtypesPreviouslyHandled) { + subtypeStrings.add( + printType(typeAliasSource, updatedPrintTypeFlags, returnTypeCallback, recursionTypes) + ); + indicesCoveredByTypeAlias.forEach((index) => subtypeHandledSet.add(index)); + } + } + } + + const noneIndex = type.subtypes.findIndex((subtype) => isNoneInstance(subtype)); + if (noneIndex >= 0 && !subtypeHandledSet.has(noneIndex)) { const typeWithoutNone = removeNoneFromUnion(type); if (isNever(typeWithoutNone)) { return 'None'; } - const optionalType = printType(typeWithoutNone, printTypeFlags, returnTypeCallback, recursionTypes); + const optionalType = printType( + typeWithoutNone, + updatedPrintTypeFlags, + returnTypeCallback, + recursionTypes + ); if (printTypeFlags & PrintTypeFlags.PEP604) { return optionalType + ' | None'; @@ -254,16 +345,19 @@ export function printType( return 'Optional[' + optionalType + ']'; } - const subtypeStrings = new Set(); const literalObjectStrings = new Set(); const literalClassStrings = new Set(); - doForEachSubtype(type, (subtype) => { - if (isClassInstance(subtype) && subtype.literalValue !== undefined) { - literalObjectStrings.add(printLiteralValue(subtype)); - } else if (isInstantiableClass(subtype) && subtype.literalValue !== undefined) { - literalClassStrings.add(printLiteralValue(subtype)); - } else { - subtypeStrings.add(printType(subtype, printTypeFlags, returnTypeCallback, recursionTypes)); + doForEachSubtype(type, (subtype, index) => { + if (!subtypeHandledSet.has(index)) { + if (isClassInstance(subtype) && subtype.literalValue !== undefined) { + literalObjectStrings.add(printLiteralValue(subtype)); + } else if (isInstantiableClass(subtype) && subtype.literalValue !== undefined) { + literalClassStrings.add(printLiteralValue(subtype)); + } else { + subtypeStrings.add( + printType(subtype, updatedPrintTypeFlags, returnTypeCallback, recursionTypes) + ); + } } }); @@ -308,7 +402,9 @@ export function printType( if (type.details.recursiveTypeAliasName) { if ((printTypeFlags & PrintTypeFlags.ExpandTypeAlias) !== 0 && type.details.boundType) { return printType( - type.details.boundType, + TypeBase.isInstance(type) + ? convertToInstance(type.details.boundType) + : type.details.boundType, printTypeFlags, returnTypeCallback, recursionTypes @@ -317,14 +413,21 @@ export function printType( return type.details.recursiveTypeAliasName; } - if (type.details.boundType) { - const boundTypeString = printType( + // If it's a synthesized type var used to implement `self` or `cls` types, + // print the type with a special character that indicates that the type + // is internally represented as a TypeVar. + if (type.details.isSynthesizedSelf && type.details.boundType) { + let boundTypeString = printType( type.details.boundType, printTypeFlags & ~PrintTypeFlags.ExpandTypeAlias, returnTypeCallback, recursionTypes ); + if (!isAnyOrUnknown(type.details.boundType)) { + boundTypeString = `Self@${boundTypeString}`; + } + if (TypeBase.isInstantiable(type)) { return `Type[${boundTypeString}]`; } @@ -356,11 +459,11 @@ export function printType( } case TypeCategory.None: { - return `${TypeBase.isInstantiable(type) ? 'NoneType' : 'None'}${getConditionalIndicator(type)}`; + return `${TypeBase.isInstantiable(type) ? 'Type[None]' : 'None'}${getConditionalIndicator(type)}`; } case TypeCategory.Never: { - return 'Never'; + return type.isNoReturn ? 'NoReturn' : 'Never'; } case TypeCategory.Any: { @@ -402,6 +505,11 @@ export function printLiteralValue(type: ClassType, quotation = "'"): string { literalStr = literalValue ? 'True' : 'False'; } else if (literalValue instanceof EnumLiteral) { literalStr = `${literalValue.className}.${literalValue.itemName}`; + } else if (typeof literalValue === 'bigint') { + literalStr = literalValue.toString(); + if (literalStr.endsWith('n')) { + literalStr = literalStr.substring(0, literalStr.length - 1); + } } else { literalStr = literalValue.toString(); } @@ -425,7 +533,11 @@ export function printObjectTypeForClass( const isVariadic = lastTypeParam ? lastTypeParam.details.isVariadic : false; // If there is a type arguments array, it's a specialized class. - const typeArgs = type.tupleTypeArguments || type.typeArguments; + const typeArgs: TupleTypeArgument[] | undefined = + type.tupleTypeArguments ?? + type.typeArguments?.map((t) => { + return { type: t, isUnbounded: false }; + }); if (typeArgs) { // Handle Tuple[()] as a special case. if (typeArgs.length > 0) { @@ -437,37 +549,66 @@ export function printObjectTypeForClass( if ( typeParam && typeParam.details.isVariadic && - isClassInstance(typeArg) && - ClassType.isBuiltIn(typeArg, 'tuple') && - typeArg.tupleTypeArguments + isClassInstance(typeArg.type) && + ClassType.isBuiltIn(typeArg.type, 'tuple') && + typeArg.type.tupleTypeArguments ) { // Expand the tuple type that maps to the variadic type parameter. - if (typeArg.tupleTypeArguments.length === 0) { - if (!isAnyOrUnknown(typeArg)) { + if (typeArg.type.tupleTypeArguments.length === 0) { + if (!isAnyOrUnknown(typeArg.type)) { isAllAny = false; } typeArgStrings.push('()'); } else { typeArgStrings.push( - ...typeArg.tupleTypeArguments.map((typeArg) => { - if (!isAnyOrUnknown(typeArg)) { + ...typeArg.type.tupleTypeArguments.map((typeArg) => { + if (!isAnyOrUnknown(typeArg.type)) { isAllAny = false; } - return printType(typeArg, printTypeFlags, returnTypeCallback, recursionTypes); + const typeArgText = printType( + typeArg.type, + printTypeFlags, + returnTypeCallback, + recursionTypes + ); + if (typeArg.isUnbounded) { + return `*tuple[${typeArgText}, ...]`; + } + + return typeArgText; }) ); } } else { - if (!isAnyOrUnknown(typeArg)) { + if (!isAnyOrUnknown(typeArg.type)) { isAllAny = false; } - typeArgStrings.push(printType(typeArg, printTypeFlags, returnTypeCallback, recursionTypes)); + const typeArgTypeText = printType( + typeArg.type, + printTypeFlags, + returnTypeCallback, + recursionTypes + ); + + if (typeArg.isUnbounded) { + if (typeArgs.length === 1) { + typeArgStrings.push(typeArgTypeText, '...'); + } else { + typeArgStrings.push(`*tuple[${typeArgTypeText}, ...]`); + } + } else { + typeArgStrings.push(typeArgTypeText); + } } }); + if (type.isUnpacked) { + objName = '*' + objName; + } + if ((printTypeFlags & PrintTypeFlags.OmitTypeArgumentsIfAny) === 0 || !isAllAny) { objName += '[' + typeArgStrings.join(', ') + ']'; } @@ -505,6 +646,8 @@ export function printFunctionParts( recursionTypes: Type[] = [] ): [string[], string] { const paramTypeStrings: string[] = []; + let sawDefinedName = false; + type.details.parameters.forEach((param, index) => { // Handle specialized variadic type parameters specially. if ( @@ -518,13 +661,8 @@ export function printFunctionParts( ClassType.isBuiltIn(specializedParamType, 'tuple') && specializedParamType.tupleTypeArguments ) { - specializedParamType.tupleTypeArguments.forEach((paramType, paramIndex) => { - const paramString = `_p${(index + paramIndex).toString()}: ${printType( - paramType, - printTypeFlags, - returnTypeCallback, - recursionTypes - )}`; + specializedParamType.tupleTypeArguments.forEach((paramType) => { + const paramString = printType(paramType.type, printTypeFlags, returnTypeCallback, recursionTypes); paramTypeStrings.push(paramString); }); return; @@ -533,16 +671,21 @@ export function printFunctionParts( let paramString = ''; if (param.category === ParameterCategory.VarArgList) { - paramString += '*'; + if (!param.name || !param.isNameSynthesized) { + paramString += '*'; + } } else if (param.category === ParameterCategory.VarArgDictionary) { paramString += '**'; } - if (param.name) { + if (param.name && !param.isNameSynthesized) { paramString += param.name; + sawDefinedName = true; } let defaultValueAssignment = '='; + let isParamSpecArgsKwargsParam = false; + if (param.name) { // Avoid printing type types if parameter have unknown type. if (param.hasDeclaredType || param.isTypeInferred) { @@ -551,13 +694,25 @@ export function printFunctionParts( recursionTypes.length < maxTypeRecursionCount ? printType(paramType, printTypeFlags, returnTypeCallback, recursionTypes) : ''; - paramString += ': ' + paramTypeString; + + if (!param.isNameSynthesized) { + paramString += ': '; + } else if (param.category === ParameterCategory.VarArgList && !isUnpacked(paramType)) { + paramString += '*'; + } + + if (param.category === ParameterCategory.VarArgDictionary && isUnpacked(paramType)) { + paramString += '**'; + } + + paramString += paramTypeString; if (isParamSpec(paramType)) { - if (param.category === ParameterCategory.VarArgList) { - paramString += '.args'; - } else if (param.category === ParameterCategory.VarArgDictionary) { - paramString += '.kwargs'; + if ( + param.category === ParameterCategory.VarArgList || + param.category === ParameterCategory.VarArgDictionary + ) { + isParamSpecArgsKwargsParam = true; } } @@ -565,11 +720,18 @@ export function printFunctionParts( // spaces when used with a type annotation. defaultValueAssignment = ' = '; } else if ((printTypeFlags & PrintTypeFlags.OmitTypeArgumentsIfAny) === 0) { - paramString += ': Unknown'; + if (!param.isNameSynthesized) { + paramString += ': '; + } + paramString += 'Unknown'; defaultValueAssignment = ' = '; } } else if (param.category === ParameterCategory.Simple) { - paramString += '/'; + if (sawDefinedName) { + paramString += '/'; + } else { + return; + } } if (param.hasDefault) { @@ -583,15 +745,30 @@ export function printFunctionParts( } } + // If this is a (...) signature, replace the *args, **kwargs with "...". + if (FunctionType.shouldSkipArgsKwargsCompatibilityCheck(type) && !isParamSpecArgsKwargsParam) { + if (param.category === ParameterCategory.VarArgList) { + paramString = '...'; + } else if (param.category === ParameterCategory.VarArgDictionary) { + return; + } + } + paramTypeStrings.push(paramString); }); + if (type.details.paramSpec) { + paramTypeStrings.push( + `**${printType(type.details.paramSpec, printTypeFlags, returnTypeCallback, recursionTypes)}` + ); + } + const returnType = returnTypeCallback(type); const returnTypeString = recursionTypes.length < maxTypeRecursionCount ? printType( returnType, - printTypeFlags | PrintTypeFlags.ParenthesizeUnion, + printTypeFlags | PrintTypeFlags.ParenthesizeUnion | PrintTypeFlags.ParenthesizeCallable, returnTypeCallback, recursionTypes ) diff --git a/packages/pyright-internal/src/analyzer/typeStubWriter.ts b/packages/pyright-internal/src/analyzer/typeStubWriter.ts index 0d700a23aee1..580234740144 100644 --- a/packages/pyright-internal/src/analyzer/typeStubWriter.ts +++ b/packages/pyright-internal/src/analyzer/typeStubWriter.ts @@ -20,6 +20,7 @@ import { IfNode, ImportFromNode, ImportNode, + MemberAccessNode, ModuleNameNode, NameNode, ParameterCategory, @@ -33,6 +34,7 @@ import { WhileNode, WithNode, } from '../parser/parseNodes'; +import { OperatorType } from '../parser/tokenizerTypes'; import * as AnalyzerNodeInfo from './analyzerNodeInfo'; import * as ParseTreeUtils from './parseTreeUtils'; import { ParseTreeWalker } from './parseTreeWalker'; @@ -41,7 +43,7 @@ import { SourceFile } from './sourceFile'; import { Symbol } from './symbol'; import * as SymbolNameUtils from './symbolNameUtils'; import { TypeEvaluator } from './typeEvaluatorTypes'; -import { isFunction, isNever, isUnknown, removeUnknownFromUnion } from './types'; +import { ClassType, isFunction, isInstantiableClass, isNever, isUnknown, removeUnknownFromUnion } from './types'; class TrackedImport { constructor(public importName: string) {} @@ -101,6 +103,16 @@ class ImportSymbolWalker extends ParseTreeWalker { return true; } + override visitMemberAccess(node: MemberAccessNode): boolean { + const baseExpression = this._getRecursiveModuleAccessExpression(node.leftExpression); + + if (baseExpression) { + this._accessedImportedSymbols.set(`${baseExpression}.${node.memberName.value}`, true); + } + + return true; + } + override visitString(node: StringNode) { if (this._treatStringsAsSymbols) { this._accessedImportedSymbols.set(node.value, true); @@ -108,6 +120,23 @@ class ImportSymbolWalker extends ParseTreeWalker { return true; } + + private _getRecursiveModuleAccessExpression(node: ExpressionNode): string | undefined { + if (node.nodeType === ParseNodeType.Name) { + return node.value; + } + + if (node.nodeType === ParseNodeType.MemberAccess) { + const baseExpression = this._getRecursiveModuleAccessExpression(node.leftExpression); + if (!baseExpression) { + return undefined; + } + + return `${baseExpression}.${node.memberName.value}`; + } + + return undefined; + } } export class TypeStubWriter extends ParseTreeWalker { @@ -327,8 +356,17 @@ export class TypeStubWriter extends ParseTreeWalker { let line = ''; if (node.leftExpression.nodeType === ParseNodeType.Name) { - // Strip out "__all__" assignments. + // Handle "__all__" as a special case. if (node.leftExpression.value === '__all__') { + if (this._functionNestCount === 0 && this._ifNestCount === 0) { + this._emittedSuite = true; + + line = this._printExpression(node.leftExpression); + line += ' = '; + line += this._printExpression(node.rightExpression); + this._emitLine(line); + } + return false; } @@ -337,11 +375,22 @@ export class TypeStubWriter extends ParseTreeWalker { if (node.typeAnnotationComment) { line += ': ' + this._printExpression(node.typeAnnotationComment, /* treatStringsAsSymbols */ true); } - } - const valueType = this._evaluator.getType(node.leftExpression); - if (valueType?.typeAliasInfo) { - isTypeAlias = true; + const valueType = this._evaluator.getType(node.leftExpression); + if (valueType?.typeAliasInfo) { + isTypeAlias = true; + } else if (node.rightExpression.nodeType === ParseNodeType.Call) { + // Special-case TypeVar, TypeVarTuple, ParamSpec and NewType calls. Treat + // them like type aliases. + const callBaseType = this._evaluator.getType(node.rightExpression.leftExpression); + if ( + callBaseType && + isInstantiableClass(callBaseType) && + ClassType.isBuiltIn(callBaseType, ['TypeVar', 'TypeVarTuple', 'ParamSpec', 'NewType']) + ) { + isTypeAlias = true; + } + } } } else if (node.leftExpression.nodeType === ParseNodeType.TypeAnnotation) { const valueExpr = node.leftExpression.valueExpression; @@ -373,6 +422,18 @@ export class TypeStubWriter extends ParseTreeWalker { } override visitAugmentedAssignment(node: AugmentedAssignmentNode) { + if (node.leftExpression.nodeType === ParseNodeType.Name) { + // Handle "__all__ +=" as a special case. + if (node.leftExpression.value === '__all__' && node.operator === OperatorType.AddEqual) { + if (this._functionNestCount === 0 && this._ifNestCount === 0) { + let line = this._printExpression(node.leftExpression); + line += ' += '; + line += this._printExpression(node.rightExpression); + this._emitLine(line); + } + } + } + return false; } diff --git a/packages/pyright-internal/src/analyzer/typeUtils.ts b/packages/pyright-internal/src/analyzer/typeUtils.ts index afa41508572d..9140ac595ecf 100644 --- a/packages/pyright-internal/src/analyzer/typeUtils.ts +++ b/packages/pyright-internal/src/analyzer/typeUtils.ts @@ -11,12 +11,14 @@ import { assert } from '../common/debug'; import { ParameterCategory } from '../parser/parseNodes'; import { DeclarationType } from './declaration'; import { Symbol, SymbolFlags, SymbolTable } from './symbol'; +import { isDunderName } from './symbolNameUtils'; import { isTypedDictMemberAccessedThroughIndex } from './symbolUtils'; import { AnyType, ClassType, combineTypes, findSubtype, + FunctionParameter, FunctionType, FunctionTypeFlags, isAny, @@ -26,7 +28,7 @@ import { isFunction, isInstantiableClass, isNever, - isNone, + isNoneInstance, isOverloadedFunction, isParamSpec, isTypeSame, @@ -34,6 +36,7 @@ import { isUnbound, isUnion, isUnknown, + isUnpackedClass, isVariadicTypeVar, maxTypeRecursionCount, ModuleType, @@ -42,8 +45,8 @@ import { OverloadedFunctionType, ParamSpecEntry, ParamSpecValue, - removeFromUnion, SpecializedFunctionTypes, + TupleTypeArgument, Type, TypeBase, TypeCategory, @@ -66,8 +69,15 @@ export interface ClassMember { // True if instance member, false if class member isInstanceMember: boolean; + // True if explicitly declared as "ClassVar" + isClassVar: boolean; + // True if member has declared type, false if inferred isTypeDeclared: boolean; + + // True if member lookup skipped an undeclared (inferred) type + // in a subclass before finding a declared type in a base class + skippedUndeclaredType: boolean; } export const enum ClassMemberLookupFlags { @@ -94,6 +104,9 @@ export const enum ClassMemberLookupFlags { // an inferred type associated with it. If this flag is set, // the search looks only for symbols with declared types. DeclaredTypesOnly = 1 << 4, + + // Skip the 'type' base class in particular. + SkipTypeBaseClass = 1 << 5, } export const enum ClassIteratorFlags { @@ -111,6 +124,9 @@ export const enum ClassIteratorFlags { // Skip the 'object' base class in particular. SkipObjectBaseClass = 1 << 2, + + // Skip the 'type' base class in particular. + SkipTypeBaseClass = 1 << 3, } export const enum CanAssignFlags { @@ -135,8 +151,10 @@ export const enum CanAssignFlags { SkipSolveTypeVars = 1 << 3, // If the dest is not Any but the src is Any, treat it - // as incompatible. - DisallowAssignFromAny = 1 << 4, + // as incompatible. Also, treat all source TypeVars as their + // concrete counterparts. This option is used for validating + // whether overload signatures overlap. + OverloadOverlapCheck = 1 << 4, // For function types, skip the return type check. SkipFunctionReturnTypeCheck = 1 << 5, @@ -147,20 +165,234 @@ export const enum CanAssignFlags { // In most cases, literals are stripped when assigning to a // type variable. This overrides the standard behavior. RetainLiteralsForTypeVar = 1 << 7, -} -interface TypeVarTransformer { - transformTypeVar: (typeVar: TypeVarType) => Type; - transformVariadicTypeVar: (paramSpec: TypeVarType) => Type[] | undefined; - transformParamSpec: (paramSpec: TypeVarType) => ParamSpecValue | undefined; - transformUnion?: (type: UnionType) => Type; -} + // When validating the type of a self or cls parameter, allow + // a type mismatch. This is used in overload consistency validation + // because overloads can provide explicit type annotations for self + // or cls. + SkipSelfClsTypeCheck = 1 << 8, + + // If an assignment is made to a TypeVar that is out of scope, + // do not generate an error. This is used for populating the + // typeVarMap when handling contravariant parameters in a callable. + IgnoreTypeVarScope = 1 << 9, + + // We're initially populating the TypeVarMap with an expected type, + // so TypeVars should match the specified type exactly rather than + // employing narrowing or widening, and don't strip literals. + PopulatingExpectedType = 1 << 10, + + // We're comparing type compatibility of two distinct recursive types. + // This has the potential of recursing infinitely. This flag allows us + // to detect the recursion after the first level of checking. + SkipRecursiveTypeCheck = 1 << 11, +} + +export enum ParameterSource { + PositionOnly, + PositionOrKeyword, + KeywordOnly, +} + +export interface VirtualParameterDetails { + param: FunctionParameter; + type: Type; + index: number; + source: ParameterSource; +} + +export interface ParameterListDetails { + // Virtual parameter list that refers to original parameters + params: VirtualParameterDetails[]; + + // Counts of virtual parameters + positionOnlyParamCount: number; + positionParamCount: number; + + // Indexes into virtual parameter list + kwargsIndex?: number; + argsIndex?: number; + firstKeywordOnlyIndex?: number; + firstPositionOrKeywordIndex: number; + + // Other information + hasUnpackedVariadicTypeVar: boolean; +} + +// Examines the input parameters within a function signature and creates a +// "virtual list" of parameters, stripping out any markers and expanding +// any *args with unpacked tuples. +export function getParameterListDetails(type: FunctionType): ParameterListDetails { + const result: ParameterListDetails = { + firstPositionOrKeywordIndex: 0, + positionParamCount: 0, + positionOnlyParamCount: 0, + params: [], + hasUnpackedVariadicTypeVar: false, + }; + + let positionOnlyIndex = type.details.parameters.findIndex( + (p) => p.category === ParameterCategory.Simple && !p.name + ); + + // Handle the old (pre Python 3.8) way of specifying positional-only + // parameters by naming them with "__". + if (positionOnlyIndex < 0) { + for (let i = 0; i < type.details.parameters.length; i++) { + const p = type.details.parameters[i]; + if (p.category !== ParameterCategory.Simple) { + break; + } + + if (!p.name) { + break; + } + + if (isDunderName(p.name) || !p.name.startsWith('__')) { + break; + } + + positionOnlyIndex = i + 1; + } + } + + if (positionOnlyIndex >= 0) { + result.firstPositionOrKeywordIndex = positionOnlyIndex; + } + + for (let i = 0; i < positionOnlyIndex; i++) { + if (type.details.parameters[i].hasDefault) { + break; + } + + result.positionOnlyParamCount++; + result.positionParamCount++; + } + + let sawKeywordOnlySeparator = false; + + const addVirtualParameter = (param: FunctionParameter, index: number, typeOverride?: Type) => { + if (param.name) { + let source: ParameterSource; + if (param.category === ParameterCategory.VarArgList) { + source = ParameterSource.PositionOnly; + } else if (sawKeywordOnlySeparator) { + source = ParameterSource.KeywordOnly; + } else if (positionOnlyIndex >= 0 && index < positionOnlyIndex) { + source = ParameterSource.PositionOnly; + } else { + source = ParameterSource.PositionOrKeyword; + } + + result.params.push({ + param, + index, + type: typeOverride ?? FunctionType.getEffectiveParameterType(type, index), + source, + }); + } + }; + + type.details.parameters.forEach((param, index) => { + if (param.category === ParameterCategory.VarArgList) { + // If this is an unpacked tuple, expand the entries. + if (param.name && isUnpackedClass(param.type) && param.type.tupleTypeArguments) { + param.type.tupleTypeArguments.forEach((tupleArg, index) => { + const category = + isVariadicTypeVar(tupleArg.type) || tupleArg.isUnbounded + ? ParameterCategory.VarArgList + : ParameterCategory.Simple; + + if (category === ParameterCategory.VarArgList) { + result.argsIndex = result.params.length; + } + + if (isVariadicTypeVar(param.type)) { + result.hasUnpackedVariadicTypeVar = true; + } + + addVirtualParameter( + { + category, + name: `${param.name}[${index.toString()}]`, + type: tupleArg.type, + hasDeclaredType: true, + }, + index, + tupleArg.type + ); + }); + } else { + if (param.name && result.argsIndex === undefined) { + result.argsIndex = result.params.length; + + if (isVariadicTypeVar(param.type)) { + result.hasUnpackedVariadicTypeVar = true; + } + } + + // Normally, a VarArgList parameter (either named or as an unnamed separator) + // would signify the start of keyword-only parameters. However, we can construct + // callable signatures that defy this rule by using Callable and TypeVarTuples + // or unpacked tuples. + if (!sawKeywordOnlySeparator && (positionOnlyIndex < 0 || index >= positionOnlyIndex)) { + result.firstKeywordOnlyIndex = result.params.length; + if (param.name) { + result.firstKeywordOnlyIndex++; + } + sawKeywordOnlySeparator = true; + } + + addVirtualParameter(param, index); + } + } else if (param.category === ParameterCategory.VarArgDictionary) { + sawKeywordOnlySeparator = true; + + // Is this an unpacked TypedDict? If so, expand the entries. + if (isClassInstance(param.type) && isUnpackedClass(param.type) && param.type.details.typedDictEntries) { + if (result.firstKeywordOnlyIndex === undefined) { + result.firstKeywordOnlyIndex = result.params.length; + } + + param.type.details.typedDictEntries.forEach((entry, name) => { + addVirtualParameter( + { + category: ParameterCategory.Simple, + name, + type: entry.valueType, + hasDeclaredType: true, + hasDefault: !entry.isRequired, + }, + index, + entry.valueType + ); + }); + } else if (param.name) { + if (result.kwargsIndex === undefined) { + result.kwargsIndex = result.params.length; + } + + if (result.firstKeywordOnlyIndex === undefined) { + result.firstKeywordOnlyIndex = result.params.length; + } + + addVirtualParameter(param, index); + } + } else if (param.category === ParameterCategory.Simple) { + if (param.name && !sawKeywordOnlySeparator) { + result.positionParamCount++; + } -let synthesizedTypeVarIndexForExpectedType = 1; + addVirtualParameter(param, index); + } + }); + + return result; +} export function isOptionalType(type: Type): boolean { if (isUnion(type)) { - return findSubtype(type, (subtype) => isNone(subtype)) !== undefined; + return findSubtype(type, (subtype) => isNoneInstance(subtype)) !== undefined; } return false; @@ -187,12 +419,22 @@ export function mapSubtypes(type: Type, callback: (type: Type) => Type | undefin } }); - return typeChanged ? combineTypes(newSubtypes) : type; + if (!typeChanged) { + return type; + } + + const newType = combineTypes(newSubtypes); + + // Do our best to retain type aliases. + if (newType.category === TypeCategory.Union) { + UnionType.addTypeAliasSource(newType, type); + } + return newType; } const transformedSubtype = callback(type); if (!transformedSubtype) { - return NeverType.create(); + return NeverType.createNever(); } return transformedSubtype; } @@ -411,111 +653,6 @@ export function transformPossibleRecursiveTypeAlias(type: Type | undefined): Typ return type; } -// None is always falsy. All other types are generally truthy -// unless they are objects that support the __bool__ or __len__ -// methods. -export function canBeFalsy(type: Type, recursionLevel = 0): boolean { - if (recursionLevel > maxTypeRecursionCount) { - return true; - } - - switch (type.category) { - case TypeCategory.Unbound: - case TypeCategory.Unknown: - case TypeCategory.Any: - case TypeCategory.Never: - case TypeCategory.None: { - return true; - } - - case TypeCategory.Union: { - return findSubtype(type, (subtype) => canBeFalsy(subtype, recursionLevel + 1)) !== undefined; - } - - case TypeCategory.Function: - case TypeCategory.OverloadedFunction: - case TypeCategory.Module: - case TypeCategory.TypeVar: { - return false; - } - - case TypeCategory.Class: { - if (TypeBase.isInstantiable(type)) { - return false; - } - - // Handle tuples specially. - if (isTupleClass(type) && type.tupleTypeArguments) { - return isOpenEndedTupleClass(type) || type.tupleTypeArguments.length === 0; - } - - // Check for Literal[False] and Literal[True]. - if (ClassType.isBuiltIn(type, 'bool') && type.literalValue !== undefined) { - return type.literalValue === false; - } - - const lenMethod = lookUpObjectMember(type, '__len__'); - if (lenMethod) { - return true; - } - - const boolMethod = lookUpObjectMember(type, '__bool__'); - if (boolMethod) { - return true; - } - - return false; - } - } -} - -export function canBeTruthy(type: Type, recursionLevel = 0): boolean { - if (recursionLevel > maxTypeRecursionCount) { - return true; - } - - switch (type.category) { - case TypeCategory.Unknown: - case TypeCategory.Function: - case TypeCategory.OverloadedFunction: - case TypeCategory.Module: - case TypeCategory.TypeVar: - case TypeCategory.Never: - case TypeCategory.Any: { - return true; - } - - case TypeCategory.Union: { - return findSubtype(type, (subtype) => canBeTruthy(subtype, recursionLevel + 1)) !== undefined; - } - - case TypeCategory.Unbound: - case TypeCategory.None: { - return false; - } - - case TypeCategory.Class: { - if (TypeBase.isInstantiable(type)) { - return true; - } - - // Check for Tuple[()] (an empty tuple). - if (isTupleClass(type)) { - if (type.tupleTypeArguments && type.tupleTypeArguments!.length === 0) { - return false; - } - } - - // Check for Literal[False], Literal[0], Literal[""]. - if (type.literalValue === false || type.literalValue === 0 || type.literalValue === '') { - return false; - } - - return true; - } - } -} - export function getTypeVarScopeId(type: Type): TypeVarScopeId | undefined { if (isClass(type)) { return type.details.typeVarScopeId; @@ -580,28 +717,89 @@ export function isLiteralTypeOrUnion(type: Type): boolean { return false; } -export function containsLiteralType(type: Type): boolean { +export function containsLiteralType(type: Type, includeTypeArgs = false, recursionCount = 0): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + recursionCount++; + if (isClassInstance(type) && isLiteralType(type)) { return true; } + if (includeTypeArgs && isClass(type)) { + const typeArgs = type.tupleTypeArguments?.map((t) => t.type) || type.typeArguments; + if (typeArgs) { + return typeArgs.some((typeArg) => containsLiteralType(typeArg, includeTypeArgs, recursionCount)); + } + } + if (isUnion(type)) { - return type.subtypes.some((subtype) => isClassInstance(subtype) && isLiteralType(subtype)); + return type.subtypes.some((subtype) => containsLiteralType(subtype, includeTypeArgs, recursionCount)); + } + + if (isOverloadedFunction(type)) { + return type.overloads.some((overload) => containsLiteralType(overload, includeTypeArgs, recursionCount)); + } + + if (isFunction(type)) { + const returnType = FunctionType.getSpecializedReturnType(type); + if (returnType && containsLiteralType(returnType, includeTypeArgs, recursionCount)) { + return true; + } + + for (let i = 0; i < type.details.parameters.length; i++) { + const paramType = FunctionType.getEffectiveParameterType(type, i); + if (containsLiteralType(paramType, includeTypeArgs, recursionCount)) { + return true; + } + } } return false; } -export function isEllipsisType(type: Type): boolean { - return isAny(type) && type.isEllipsis; +// If all of the subtypes are literals with the same built-in class (e.g. +// all 'int' or all 'str'), this function returns the name of that type. If +// some of the subtypes are not literals or the literal classes don't match, +// it returns undefined. +export function getLiteralTypeClassName(type: Type): string | undefined { + if (isClassInstance(type)) { + if (type.literalValue !== undefined && ClassType.isBuiltIn(type)) { + return type.details.name; + } + return undefined; + } + + if (isUnion(type)) { + let className: string | undefined; + let foundMismatch = false; + + doForEachSubtype(type, (subtype) => { + const subtypeLiteralTypeName = getLiteralTypeClassName(subtype); + if (!subtypeLiteralTypeName) { + foundMismatch = true; + } else if (!className) { + className = subtypeLiteralTypeName; + } + }); + + return foundMismatch ? undefined : className; + } + + return undefined; } -export function isNoReturnType(type: Type): boolean { - return isClassInstance(type) && ClassType.isBuiltIn(type, 'NoReturn'); +export function getUnionSubtypeCount(type: Type): number { + if (isUnion(type)) { + return type.subtypes.length; + } + + return 1; } -export function removeNoReturnFromUnion(type: Type): Type { - return removeFromUnion(type, (subtype) => isNoReturnType(subtype)); +export function isEllipsisType(type: Type): boolean { + return isAny(type) && type.isEllipsis; } export function isProperty(type: Type) { @@ -615,26 +813,39 @@ export function isTupleClass(type: ClassType) { // Indicates whether the type is a tuple class of // the form tuple[x, ...] where the number of elements // in the tuple is unknown. -export function isOpenEndedTupleClass(type: ClassType) { - return ( - type.tupleTypeArguments && type.tupleTypeArguments.length === 2 && isEllipsisType(type.tupleTypeArguments[1]) - ); +export function isUnboundedTupleClass(type: ClassType) { + return type.tupleTypeArguments && type.tupleTypeArguments.some((t) => t.isUnbounded); } // Partially specializes a type within the context of a specified -// (presumably specialized) class. -export function partiallySpecializeType(type: Type, contextClassType: ClassType): Type { +// (presumably specialized) class. Optionally specializes the `Self` +// type variables, replacing them with selfClass. +export function partiallySpecializeType(type: Type, contextClassType: ClassType, selfClass?: ClassType): Type { // If the context class is not specialized (or doesn't need specialization), // then there's no need to do any more work. - if (ClassType.isGeneric(contextClassType)) { + if (ClassType.isUnspecialized(contextClassType)) { return type; } // Partially specialize the type using the specialized class type vars. - const typeVarMap = buildTypeVarMapFromSpecializedClass(contextClassType); + const typeVarMap = buildTypeVarMapFromSpecializedClass(contextClassType, /* makeConcrete */ undefined); + + if (selfClass) { + populateTypeVarMapForSelfType(typeVarMap, contextClassType, selfClass); + } + return applySolvedTypeVars(type, typeVarMap); } +export function populateTypeVarMapForSelfType( + typeVarMap: TypeVarMap, + contextClassType: ClassType, + selfClass: ClassType +) { + const synthesizedSelfTypeVar = synthesizeTypeVarForSelfCls(contextClassType, /* isClsParam */ false); + typeVarMap.setTypeVarType(synthesizedSelfTypeVar, convertToInstance(selfClass)); +} + // Specializes a (potentially generic) type by substituting // type variables from a type var map. export function applySolvedTypeVars( @@ -649,72 +860,19 @@ export function applySolvedTypeVars( return type; } - return _transformTypeVars(type, { - transformTypeVar: (typeVar: TypeVarType) => { - // If the type variable is unrelated to the scopes we're solving, - // don't transform that type variable. - if (typeVar.scopeId && typeVarMap.hasSolveForScope(typeVar.scopeId)) { - let replacement = typeVarMap.getTypeVarType(typeVar, useNarrowBoundOnly); - if (replacement) { - if (TypeBase.isInstantiable(typeVar)) { - replacement = convertToInstantiable(replacement); - } - return replacement; - } - - // If this typeVar is in scope for what we're solving but the type - // var map doesn't contain any entry for it, replace with Unknown. - if (unknownIfNotFound) { - return UnknownType.create(); - } - } - - return typeVar; - }, - transformUnion: (type: UnionType) => { - // If a union contains unsolved TypeVars within scope, eliminate them - // unless this results in an empty union. This elimination is needed - // in cases where TypeVars can go unmatched due to unions in parameter - // annotations, like this: - // def test(x: Union[str, T]) -> Union[str, T] - if (eliminateUnsolvedInUnions) { - const updatedUnion = mapSubtypes(type, (subtype) => { - if ( - isTypeVar(subtype) && - subtype.scopeId !== undefined && - typeVarMap.hasSolveForScope(subtype.scopeId) - ) { - return undefined; - } - return subtype; - }); - - return isNever(updatedUnion) ? type : updatedUnion; - } - - return type; - }, - transformVariadicTypeVar: (typeVar: TypeVarType) => { - if (!typeVar.scopeId || !typeVarMap.hasSolveForScope(typeVar.scopeId)) { - return undefined; - } - - return typeVarMap.getVariadicTypeVar(typeVar); - }, - transformParamSpec: (paramSpec: TypeVarType) => { - if (!paramSpec.scopeId || !typeVarMap.hasSolveForScope(paramSpec.scopeId)) { - return undefined; - } - - return typeVarMap.getParamSpec(paramSpec); - }, - }); + const transformer = new ApplySolvedTypeVarsTransformer( + typeVarMap, + unknownIfNotFound, + useNarrowBoundOnly, + eliminateUnsolvedInUnions + ); + return transformer.apply(type); } // During bidirectional type inference for constructors, an "executed type" // is used to prepopulate the type var map. This is problematic when the // expected type uses TypeVars that are not part of the context of the -// class we are constructor. We'll replace these type variables with dummy +// class we are constructing. We'll replace these type variables with dummy // type variables that are scoped to the appropriate context. export function transformExpectedTypeForConstructor( expectedType: Type, @@ -723,32 +881,6 @@ export function transformExpectedTypeForConstructor( ): Type | undefined { const isTypeVarLive = (typeVar: TypeVarType) => liveTypeVarScopes.some((scopeId) => typeVar.scopeId === scopeId); - const createDummyTypeVar = (prevTypeVar: TypeVarType) => { - // If we previously synthesized this dummy type var, just return it. - if (prevTypeVar.details.isSynthesized && prevTypeVar.details.name.startsWith(dummyTypeVarPrefix)) { - return prevTypeVar; - } - - const isInstance = TypeBase.isInstance(prevTypeVar); - let newTypeVar = TypeVarType.createInstance(`__expected_type_${synthesizedTypeVarIndexForExpectedType}`); - newTypeVar.details.isSynthesized = true; - newTypeVar.scopeId = dummyScopeId; - newTypeVar.nameWithScope = TypeVarType.makeNameWithScope(newTypeVar.details.name, dummyScopeId); - if (!isInstance) { - newTypeVar = convertToInstantiable(newTypeVar) as TypeVarType; - } - - // If the original TypeVar was bound or constrained, make the replacement as well. - newTypeVar.details.boundType = prevTypeVar.details.boundType; - newTypeVar.details.constraints = prevTypeVar.details.constraints; - - // Also copy the variance. - newTypeVar.details.variance = prevTypeVar.details.variance; - - synthesizedTypeVarIndexForExpectedType++; - return newTypeVar; - }; - // Handle "naked TypeVars" (i.e. the expectedType is a TypeVar itself) // specially. Return undefined to indicate that it's an out-of-scope // TypeVar. @@ -760,39 +892,59 @@ export function transformExpectedTypeForConstructor( return undefined; } - const dummyScopeId = '__expected_type_scope_id'; - const dummyTypeVarPrefix = '__expected_type_'; - typeVarMap.addSolveForScope(dummyScopeId); - - return _transformTypeVars(expectedType, { - transformTypeVar: (typeVar: TypeVarType) => { - // If the type variable is unrelated to the scopes we're solving, - // don't transform that type variable. - if (isTypeVarLive(typeVar)) { - return typeVar; - } - - return createDummyTypeVar(typeVar); - }, - transformVariadicTypeVar: (typeVar: TypeVarType) => { - return undefined; - }, - transformParamSpec: (paramSpec: TypeVarType) => { - return undefined; - }, - }); + const transformer = new ExpectedConstructorTypeTransformer(typeVarMap, liveTypeVarScopes); + return transformer.apply(expectedType); } -export function lookUpObjectMember( - objectType: Type, - memberName: string, - flags = ClassMemberLookupFlags.Default -): ClassMember | undefined { - if (isClassInstance(objectType)) { - return lookUpClassMember(objectType, memberName, flags); +// Given a protocol class, this function returns a set of all the +// symbols (indexed by symbol name) that are part of that protocol +// and its protocol parent classes. If a same-named symbol appears +// in a parent and a child, the child overrides the parent. +export function getProtocolSymbols(classType: ClassType) { + const symbolMap = new Map(); + + if (ClassType.isProtocolClass(classType)) { + getProtocolSymbolsRecursive(classType, symbolMap); } - return undefined; + return symbolMap; +} + +function getProtocolSymbolsRecursive(classType: ClassType, symbolMap: Map, recursionCount = 0) { + if (recursionCount > maxTypeRecursionCount) { + return; + } + + classType.details.baseClasses.forEach((baseClass) => { + if (isClass(baseClass) && ClassType.isProtocolClass(baseClass)) { + getProtocolSymbolsRecursive(baseClass, symbolMap, recursionCount + 1); + } + }); + + classType.details.fields.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch()) { + symbolMap.set(name, { + symbol, + classType, + isInstanceMember: symbol.isInstanceMember(), + isClassVar: symbol.isClassVar(), + isTypeDeclared: symbol.hasTypedDeclarations(), + skippedUndeclaredType: false, + }); + } + }); +} + +export function lookUpObjectMember( + objectType: Type, + memberName: string, + flags = ClassMemberLookupFlags.Default +): ClassMember | undefined { + if (isClassInstance(objectType)) { + return lookUpClassMember(objectType, memberName, flags); + } + + return undefined; } // Looks up a member in a class using the multiple-inheritance rules @@ -817,6 +969,7 @@ export function lookUpClassMember( // (self) -> Iterator[str]. export function* getClassMemberIterator(classType: Type, memberName: string, flags = ClassMemberLookupFlags.Default) { const declaredTypesOnly = (flags & ClassMemberLookupFlags.DeclaredTypesOnly) !== 0; + let skippedUndeclaredType = false; if (isClass(classType)) { let classFlags = ClassIteratorFlags.Default; @@ -829,6 +982,9 @@ export function* getClassMemberIterator(classType: Type, memberName: string, fla if (flags & ClassMemberLookupFlags.SkipObjectBaseClass) { classFlags = classFlags | ClassIteratorFlags.SkipObjectBaseClass; } + if (flags & ClassMemberLookupFlags.SkipTypeBaseClass) { + classFlags = classFlags | ClassIteratorFlags.SkipTypeBaseClass; + } const classItr = getClassIterator(classType, classFlags); @@ -840,8 +996,10 @@ export function* getClassMemberIterator(classType: Type, memberName: string, fla const cm: ClassMember = { symbol: Symbol.createWithType(SymbolFlags.None, UnknownType.create()), isInstanceMember: false, + isClassVar: true, classType: UnknownType.create(), isTypeDeclared: false, + skippedUndeclaredType: false, }; yield cm; } @@ -863,10 +1021,14 @@ export function* getClassMemberIterator(classType: Type, memberName: string, fla const cm: ClassMember = { symbol, isInstanceMember: true, + isClassVar: symbol.isClassVar(), classType: specializedMroClass, isTypeDeclared: hasDeclaredType, + skippedUndeclaredType, }; yield cm; + } else { + skippedUndeclaredType = true; } } } @@ -893,10 +1055,14 @@ export function* getClassMemberIterator(classType: Type, memberName: string, fla const cm: ClassMember = { symbol, isInstanceMember, + isClassVar: symbol.isClassVar(), classType: specializedMroClass, isTypeDeclared: hasDeclaredType, + skippedUndeclaredType, }; yield cm; + } else { + skippedUndeclaredType = true; } } } @@ -906,8 +1072,10 @@ export function* getClassMemberIterator(classType: Type, memberName: string, fla const cm: ClassMember = { symbol: Symbol.createWithType(SymbolFlags.None, UnknownType.create()), isInstanceMember: false, + isClassVar: true, classType: UnknownType.create(), isTypeDeclared: false, + skippedUndeclaredType: false, }; yield cm; } @@ -933,7 +1101,16 @@ export function* getClassIterator(classType: Type, flags = ClassIteratorFlags.De if (flags & ClassIteratorFlags.SkipObjectBaseClass) { if (isInstantiableClass(specializedMroClass)) { if (ClassType.isBuiltIn(specializedMroClass, 'object')) { - continue; + break; + } + } + } + + // Should we ignore members on the 'type' base class? + if (flags & ClassIteratorFlags.SkipTypeBaseClass) { + if (isInstantiableClass(specializedMroClass)) { + if (ClassType.isBuiltIn(specializedMroClass, 'type')) { + break; } } } @@ -949,6 +1126,37 @@ export function* getClassIterator(classType: Type, flags = ClassIteratorFlags.De return undefined; } +export function getClassFieldsRecursive(classType: ClassType): Map { + const memberMap = new Map(); + + // Evaluate the types of members from the end of the MRO to the beginning. + for (let i = classType.details.mro.length - 1; i >= 0; i--) { + const mroClass = partiallySpecializeType(classType.details.mro[i], classType); + + // If this ancestor class is unknown, throw away all symbols + // found so far because they could be overridden by the unknown class. + if (!isClass(mroClass)) { + memberMap.clear(); + continue; + } + + mroClass.details.fields.forEach((symbol, name) => { + if (!symbol.isIgnoredForProtocolMatch() && symbol.hasTypedDeclarations()) { + memberMap.set(name, { + classType: mroClass, + symbol, + isInstanceMember: symbol.isInstanceMember(), + isClassVar: symbol.isClassVar(), + isTypeDeclared: true, + skippedUndeclaredType: false, + }); + } + }); + } + + return memberMap; +} + // Combines two lists of type var types, maintaining the combined order // but removing any duplicates. export function addTypeVarsToListIfUnique(list1: TypeVarType[], list2: TypeVarType[]) { @@ -968,12 +1176,13 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty if (recursionCount > maxTypeRecursionCount) { return []; } + recursionCount++; const getTypeVarsFromClass = (classType: ClassType) => { const combinedList: TypeVarType[] = []; if (classType.typeArguments) { classType.typeArguments.forEach((typeArg) => { - addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(typeArg, recursionCount + 1)); + addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(typeArg, recursionCount)); }); } @@ -984,7 +1193,7 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty const combinedList: TypeVarType[] = []; type.typeAliasInfo?.typeArguments.forEach((typeArg) => { - addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(typeArg, recursionCount + 1)); + addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(typeArg, recursionCount)); }); return combinedList; @@ -1011,7 +1220,7 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty if (isUnion(type)) { const combinedList: TypeVarType[] = []; doForEachSubtype(type, (subtype) => { - addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(subtype, recursionCount + 1)); + addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(subtype, recursionCount)); }); return combinedList; } @@ -1022,7 +1231,7 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty for (let i = 0; i < type.details.parameters.length; i++) { addTypeVarsToListIfUnique( combinedList, - getTypeVarArgumentsRecursive(FunctionType.getEffectiveParameterType(type, i), recursionCount + 1) + getTypeVarArgumentsRecursive(FunctionType.getEffectiveParameterType(type, i), recursionCount) ); } @@ -1032,7 +1241,7 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty const returnType = FunctionType.getSpecializedReturnType(type); if (returnType) { - addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(returnType, recursionCount + 1)); + addTypeVarsToListIfUnique(combinedList, getTypeVarArgumentsRecursive(returnType, recursionCount)); } return combinedList; @@ -1041,23 +1250,12 @@ export function getTypeVarArgumentsRecursive(type: Type, recursionCount = 0): Ty return []; } -// If the class is generic, the type is cloned, and its own -// type parameters are used as type arguments. This is useful -// for typing "self" or "cls" within a class's implementation. -export function selfSpecializeClassType(type: ClassType, includeSubclasses = false): ClassType { - if (!ClassType.isGeneric(type) && !includeSubclasses) { - return type; - } - - const typeArgs = ClassType.getTypeParameters(type); - return ClassType.cloneForSpecialization(type, typeArgs, /* isTypeArgumentExplicit */ false, includeSubclasses); -} - // Creates a specialized version of the class, filling in any unspecified // type arguments with Unknown. export function specializeClassType(type: ClassType): ClassType { const typeVarMap = new TypeVarMap(getTypeVarScopeId(type)); const typeParams = ClassType.getTypeParameters(type); + typeParams.forEach((typeParam) => { typeVarMap.setTypeVarType(typeParam, UnknownType.create()); }); @@ -1071,6 +1269,7 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar if (recursionCount > maxTypeRecursionCount) { return; } + recursionCount++; if (typeVarMap.isLocked()) { return; @@ -1079,19 +1278,19 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar switch (destType.category) { case TypeCategory.Union: doForEachSubtype(destType, (subtype) => { - setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount); }); break; case TypeCategory.Class: if (destType.typeArguments) { destType.typeArguments.forEach((typeArg) => { - setTypeArgumentsRecursive(typeArg, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(typeArg, srcType, typeVarMap, recursionCount); }); } if (destType.tupleTypeArguments) { destType.tupleTypeArguments.forEach((typeArg) => { - setTypeArgumentsRecursive(typeArg, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(typeArg.type, srcType, typeVarMap, recursionCount); }); } break; @@ -1099,34 +1298,29 @@ export function setTypeArgumentsRecursive(destType: Type, srcType: Type, typeVar case TypeCategory.Function: if (destType.specializedTypes) { destType.specializedTypes.parameterTypes.forEach((paramType) => { - setTypeArgumentsRecursive(paramType, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(paramType, srcType, typeVarMap, recursionCount); }); if (destType.specializedTypes.returnType) { setTypeArgumentsRecursive( destType.specializedTypes.returnType, srcType, typeVarMap, - recursionCount + 1 + recursionCount ); } } else { destType.details.parameters.forEach((param) => { - setTypeArgumentsRecursive(param.type, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(param.type, srcType, typeVarMap, recursionCount); }); if (destType.details.declaredReturnType) { - setTypeArgumentsRecursive( - destType.details.declaredReturnType, - srcType, - typeVarMap, - recursionCount + 1 - ); + setTypeArgumentsRecursive(destType.details.declaredReturnType, srcType, typeVarMap, recursionCount); } } break; case TypeCategory.OverloadedFunction: destType.overloads.forEach((subtype) => { - setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount + 1); + setTypeArgumentsRecursive(subtype, srcType, typeVarMap, recursionCount); }); break; @@ -1176,19 +1370,31 @@ export function buildTypeVarMap( typeArgType = typeArgs[index]; if (isFunction(typeArgType) && FunctionType.isParamSpecValue(typeArgType)) { const paramSpecEntries: ParamSpecEntry[] = []; - typeArgType.details.parameters.forEach((param) => { + const typeArgFunctionType = typeArgType; + typeArgType.details.parameters.forEach((param, paramIndex) => { paramSpecEntries.push({ category: param.category, name: param.name, hasDefault: !!param.hasDefault, - type: param.type, + isNameSynthesized: param.isNameSynthesized, + type: FunctionType.getEffectiveParameterType(typeArgFunctionType, paramIndex), }); }); typeVarMap.setParamSpec(typeParam, { - concrete: { parameters: paramSpecEntries, flags: typeArgType.details.flags }, + parameters: paramSpecEntries, + typeVarScopeId: typeArgType.details.typeVarScopeId, + flags: typeArgType.details.flags, + docString: typeArgType.details.docString, + paramSpec: typeArgType.details.paramSpec, }); } else if (isParamSpec(typeArgType)) { - typeVarMap.setParamSpec(typeParam, { paramSpec: typeArgType }); + typeVarMap.setParamSpec(typeParam, { + flags: FunctionTypeFlags.None, + parameters: [], + typeVarScopeId: undefined, + docString: undefined, + paramSpec: typeArgType, + }); } } } else { @@ -1198,7 +1404,7 @@ export function buildTypeVarMap( typeArgType = typeArgs[index]; } - typeVarMap.setTypeVarType(typeParam, typeArgType, /* wideBound */ undefined, /* retainLiteral */ true); + typeVarMap.setTypeVarType(typeParam, typeArgType, typeArgType, /* retainLiteral */ true); } } }); @@ -1244,130 +1450,90 @@ export function derivesFromClassRecursive(classType: ClassType, baseClassToFind: return false; } -// Filters a type such that that no part of it is definitely -// falsy. For example, if a type is a union of None -// and an "int", this method would strip off the "None" -// and return only the "int". -export function removeFalsinessFromType(type: Type): Type { - return mapSubtypes(type, (subtype) => { - if (isClassInstance(subtype)) { - if (subtype.literalValue !== undefined) { - // If the object is already definitely truthy, it's fine to - // include, otherwise it should be removed. - return subtype.literalValue ? subtype : undefined; - } - - // If the object is a bool, make it "true", since - // "false" is a falsy value. - if (ClassType.isBuiltIn(subtype, 'bool')) { - return ClassType.cloneWithLiteral(subtype, /* value */ true); - } - } - - // If it's possible for the type to be truthy, include it. - if (canBeTruthy(subtype)) { - return subtype; - } - - return undefined; - }); -} - -// Filters a type such that that no part of it is definitely -// truthy. For example, if a type is a union of None -// and a custom class "Foo" that has no __len__ or __nonzero__ -// method, this method would strip off the "Foo" -// and return only the "None". -export function removeTruthinessFromType(type: Type): Type { - return mapSubtypes(type, (subtype) => { - if (isClassInstance(subtype)) { - if (subtype.literalValue !== undefined) { - // If the object is already definitely falsy, it's fine to - // include, otherwise it should be removed. - return !subtype.literalValue ? subtype : undefined; - } - - // If the object is a bool, make it "false", since - // "true" is a truthy value. - if (ClassType.isBuiltIn(subtype, 'bool')) { - return ClassType.cloneWithLiteral(subtype, /* value */ false); - } - } - - // If it's possible for the type to be falsy, include it. - if (canBeFalsy(subtype)) { - return subtype; - } - - return undefined; - }); -} - -export function synthesizeTypeVarForSelfCls(classType: ClassType, isClsParam: boolean) { - const selfType = TypeVarType.createInstance(`__type_of_${isClsParam ? 'cls' : 'self'}_${classType.details.name}`); +export function synthesizeTypeVarForSelfCls(classType: ClassType, isClsParam: boolean): TypeVarType { + const selfType = TypeVarType.createInstance(`__type_of_self__`); const scopeId = getTypeVarScopeId(classType) ?? ''; selfType.details.isSynthesized = true; - selfType.details.isSynthesizedSelfCls = true; + selfType.details.isSynthesizedSelf = true; selfType.nameWithScope = TypeVarType.makeNameWithScope(selfType.details.name, scopeId); selfType.scopeId = scopeId; - // The self/cls parameter is allowed to skip the abstract class test - // because the caller is possibly passing in a non-abstract subclass. - selfType.details.boundType = ClassType.cloneAsInstance( - selfSpecializeClassType(classType, /* includeSubclasses */ true) + const boundType = ClassType.cloneForSpecialization( + classType, + ClassType.getTypeParameters(classType), + /* isTypeArgumentExplicit */ false, + /* includeSubclasses */ true ); - return isClsParam ? convertToInstantiable(selfType) : selfType; + selfType.details.boundType = ClassType.cloneAsInstance(boundType); + + return isClsParam ? TypeVarType.cloneAsInstantiable(selfType) : selfType; } -// Returns the declared yield type if provided, or undefined otherwise. -export function getDeclaredGeneratorYieldType(functionType: FunctionType): Type | undefined { +// Returns the declared "return" type (the type returned from a return statement) +// if it was declared, or undefined otherwise. +export function getDeclaredGeneratorReturnType(functionType: FunctionType): Type | undefined { const returnType = FunctionType.getSpecializedReturnType(functionType); if (returnType) { const generatorTypeArgs = getGeneratorTypeArgs(returnType); - if (generatorTypeArgs && generatorTypeArgs.length >= 1) { - return generatorTypeArgs[0]; + if (generatorTypeArgs) { + // The send type is the third type arg. + return generatorTypeArgs.length >= 3 ? generatorTypeArgs[2] : UnknownType.create(); } } return undefined; } -// Returns the declared "send" type (the type returned from the yield -// statement) if it was declared, or undefined otherwise. -export function getDeclaredGeneratorSendType(functionType: FunctionType): Type | undefined { - const returnType = FunctionType.getSpecializedReturnType(functionType); - if (returnType) { - const generatorTypeArgs = getGeneratorTypeArgs(returnType); +// If the declared return type is a Generator, Iterable, Iterator or the async +// counterparts, returns the yield type. If the type is invalid for a generator, +// returns undefined. +export function getGeneratorYieldType(declaredReturnType: Type, isAsync: boolean): Type | undefined { + let isLegalGeneratorType = true; - if (generatorTypeArgs && generatorTypeArgs.length >= 2) { - // The send type is the second type arg. - return generatorTypeArgs[1]; + const yieldType = mapSubtypes(declaredReturnType, (subtype) => { + if (isAnyOrUnknown(subtype)) { + return subtype; } - return UnknownType.create(); - } + if (isClassInstance(subtype)) { + const expectedClasses = [ + ['AsyncIterable', 'Iterable'], + ['AsyncIterator', 'Iterator'], + ['AsyncGenerator', 'Generator'], + ['', 'AwaitableGenerator'], + ]; - return undefined; + if (expectedClasses.some((classes) => ClassType.isBuiltIn(subtype, isAsync ? classes[0] : classes[1]))) { + return subtype.typeArguments && subtype.typeArguments.length >= 1 + ? subtype.typeArguments[0] + : UnknownType.create(); + } + } + + isLegalGeneratorType = false; + return undefined; + }); + + return isLegalGeneratorType ? yieldType : undefined; } -// Returns the declared "return" type (the type returned from a return statement) -// if it was declared, or undefined otherwise. -export function getDeclaredGeneratorReturnType(functionType: FunctionType): Type | undefined { - const returnType = FunctionType.getSpecializedReturnType(functionType); - if (returnType) { - const generatorTypeArgs = getGeneratorTypeArgs(returnType); +export function isEffectivelyInstantiable(type: Type): boolean { + if (TypeBase.isInstantiable(type)) { + return true; + } - if (generatorTypeArgs && generatorTypeArgs.length >= 3) { - // The send type is the third type arg. - return generatorTypeArgs[2]; - } + // Handle the special case of 'type', which is instantiable. + if (isClassInstance(type) && ClassType.isBuiltIn(type, 'type')) { + return true; + } - return UnknownType.create(); + if (isUnion(type)) { + return type.subtypes.every((subtype) => isEffectivelyInstantiable(subtype)); } - return undefined; + return false; } export function convertToInstance(type: Type): Type { @@ -1480,8 +1646,16 @@ export function getMembersForClass(classType: ClassType, symbolTable: SymbolTabl mroClass.details.fields.forEach((symbol, name) => { if (symbol.isClassMember() || (includeInstanceVars && symbol.isInstanceMember())) { if (!isClassTypedDict || !isTypedDictMemberAccessedThroughIndex(symbol)) { - if (!symbolTable.get(name)) { - symbolTable.set(name, symbol); + if (!symbol.isInitVar()) { + const existingSymbol = symbolTable.get(name); + + if (!existingSymbol) { + symbolTable.set(name, symbol); + } else if (!existingSymbol.hasTypedDeclarations() && symbol.hasTypedDeclarations()) { + // If the existing symbol is unannotated but a parent class + // has an annotation for the symbol, use the parent type instead. + symbolTable.set(name, symbol); + } } } } @@ -1496,7 +1670,13 @@ export function getMembersForClass(classType: ClassType, symbolTable: SymbolTabl for (const mroClass of metaclass.details.mro) { if (isInstantiableClass(mroClass)) { mroClass.details.fields.forEach((symbol, name) => { - if (!symbolTable.get(name)) { + const existingSymbol = symbolTable.get(name); + + if (!existingSymbol) { + symbolTable.set(name, symbol); + } else if (!existingSymbol.hasTypedDeclarations() && symbol.hasTypedDeclarations()) { + // If the existing symbol is unannotated but a parent class + // has an annotation for the symbol, use the parent type instead. symbolTable.set(name, symbol); } }); @@ -1542,17 +1722,29 @@ export function isPartlyUnknown(type: Type, allowUnknownTypeArgsForClasses = fal if (recursionCount > maxTypeRecursionCount) { return false; } + recursionCount++; if (isUnknown(type)) { return true; } + // If this is a generic type alias, see if any of its type arguments + // are either unspecified or are partially known. + if (type.typeAliasInfo?.typeArguments) { + if ( + type.typeAliasInfo.typeArguments.some((typeArg) => + isPartlyUnknown(typeArg, allowUnknownTypeArgsForClasses, recursionCount) + ) + ) { + return true; + } + } + // See if a union contains an unknown type. if (isUnion(type)) { return ( - findSubtype(type, (subtype) => - isPartlyUnknown(subtype, allowUnknownTypeArgsForClasses, recursionCount + 1) - ) !== undefined + findSubtype(type, (subtype) => isPartlyUnknown(subtype, allowUnknownTypeArgsForClasses, recursionCount)) !== + undefined ); } @@ -1563,10 +1755,10 @@ export function isPartlyUnknown(type: Type, allowUnknownTypeArgsForClasses = fal } if (!allowUnknownTypeArgsForClasses && !ClassType.isPseudoGenericClass(type)) { - const typeArgs = type.tupleTypeArguments || type.typeArguments; + const typeArgs = type.tupleTypeArguments?.map((t) => t.type) || type.typeArguments; if (typeArgs) { for (const argType of typeArgs) { - if (isPartlyUnknown(argType, allowUnknownTypeArgsForClasses, recursionCount + 1)) { + if (isPartlyUnknown(argType, allowUnknownTypeArgsForClasses, recursionCount)) { return true; } } @@ -1579,7 +1771,7 @@ export function isPartlyUnknown(type: Type, allowUnknownTypeArgsForClasses = fal // See if a function has an unknown type. if (isOverloadedFunction(type)) { return type.overloads.some((overload) => { - return isPartlyUnknown(overload, false, recursionCount + 1); + return isPartlyUnknown(overload, false, recursionCount); }); } @@ -1588,7 +1780,7 @@ export function isPartlyUnknown(type: Type, allowUnknownTypeArgsForClasses = fal // Ignore parameters such as "*" that have no name. if (type.details.parameters[i].name) { const paramType = FunctionType.getEffectiveParameterType(type, i); - if (isPartlyUnknown(paramType, false, recursionCount + 1)) { + if (isPartlyUnknown(paramType, /* allowUnknownTypeArgsForClasses */ false, recursionCount)) { return true; } } @@ -1596,7 +1788,8 @@ export function isPartlyUnknown(type: Type, allowUnknownTypeArgsForClasses = fal if ( type.details.declaredReturnType && - isPartlyUnknown(type.details.declaredReturnType, false, recursionCount + 1) + !FunctionType.isParamSpecValue(type) && + isPartlyUnknown(type.details.declaredReturnType, /* allowUnknownTypeArgsForClasses */ false, recursionCount) ) { return true; } @@ -1625,7 +1818,7 @@ export function explodeGenericClass(classType: ClassType) { // If the type is a union of same-sized tuples, these are combined into // a single tuple with that size. Otherwise, returns undefined. export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) { - if (!tupleType || !isInstantiableClass(tupleType)) { + if (!tupleType || !isInstantiableClass(tupleType) || isUnboundedTupleClass(tupleType)) { return undefined; } @@ -1635,7 +1828,7 @@ export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) doForEachSubtype(type, (subtype) => { if (isClassInstance(subtype)) { let tupleClass: ClassType | undefined; - if (isClass(subtype) && isTupleClass(subtype) && !isOpenEndedTupleClass(subtype)) { + if (isClass(subtype) && isTupleClass(subtype) && !isUnboundedTupleClass(subtype)) { tupleClass = subtype; } @@ -1643,7 +1836,7 @@ export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) // Look in the mro list to see if this subtype derives from a // tuple with a known size. This includes named tuples. tupleClass = subtype.details.mro.find( - (mroClass) => isClass(mroClass) && isTupleClass(mroClass) && !isOpenEndedTupleClass(mroClass) + (mroClass) => isClass(mroClass) && isTupleClass(mroClass) && !isUnboundedTupleClass(mroClass) ) as ClassType | undefined; } @@ -1651,13 +1844,13 @@ export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) if (tupleEntries) { if (tupleEntries.length === tupleClass.tupleTypeArguments.length) { tupleClass.tupleTypeArguments.forEach((entry, index) => { - tupleEntries![index].push(entry); + tupleEntries![index].push(entry.type); }); } else { isValid = false; } } else { - tupleEntries = tupleClass.tupleTypeArguments.map((entry) => [entry]); + tupleEntries = tupleClass.tupleTypeArguments.map((entry) => [entry.type]); } } else { isValid = false; @@ -1674,7 +1867,9 @@ export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) return convertToInstance( specializeTupleClass( tupleType, - tupleEntries.map((entry) => combineTypes(entry)) + tupleEntries.map((entry) => { + return { type: combineTypes(entry), isUnbounded: false }; + }) ) ); } @@ -1685,17 +1880,12 @@ export function combineSameSizedTuples(type: Type, tupleType: Type | undefined) // computing the effective type args. export function specializeTupleClass( classType: ClassType, - typeArgs: Type[], + typeArgs: TupleTypeArgument[], isTypeArgumentExplicit = true, stripLiterals = true, - isForUnpackedVariadicTypeVar = false + isUnpackedTuple = false ): ClassType { - let combinedTupleType: Type = AnyType.create(/* isEllipsis */ false); - if (typeArgs.length === 2 && isEllipsisType(typeArgs[1])) { - combinedTupleType = typeArgs[0]; - } else { - combinedTupleType = combineTypes(typeArgs); - } + let combinedTupleType = combineTypes(typeArgs.map((t) => t.type)); if (stripLiterals) { combinedTupleType = stripLiteralValue(combinedTupleType); @@ -1714,8 +1904,8 @@ export function specializeTupleClass( typeArgs ); - if (isForUnpackedVariadicTypeVar) { - clonedClassType.isTupleForUnpackedVariadicTypeVar = true; + if (isUnpackedTuple) { + clonedClassType.isUnpacked = true; } return clonedClassType; @@ -1726,14 +1916,14 @@ export function specializeTupleClass( // it removes these parameters from the function. export function removeParamSpecVariadicsFromSignature(type: FunctionType | OverloadedFunctionType) { if (isFunction(type)) { - return _removeParamSpecVariadicsFromFunction(type); + return removeParamSpecVariadicsFromFunction(type); } const newOverloads: FunctionType[] = []; let newTypeNeeded = false; for (const overload of type.overloads) { - const newOverload = _removeParamSpecVariadicsFromFunction(overload); + const newOverload = removeParamSpecVariadicsFromFunction(overload); newOverloads.push(newOverload); if (newOverload !== overload) { newTypeNeeded = true; @@ -1743,599 +1933,229 @@ export function removeParamSpecVariadicsFromSignature(type: FunctionType | Overl return newTypeNeeded ? OverloadedFunctionType.create(newOverloads) : type; } -function _removeParamSpecVariadicsFromFunction(type: FunctionType): FunctionType { - if (!type.details.paramSpec) { - return type; - } - +export function removeParamSpecVariadicsFromFunction(type: FunctionType): FunctionType { const paramCount = type.details.parameters.length; - if (paramCount <= 2) { + if (paramCount < 2) { return type; } + const argsParam = type.details.parameters[paramCount - 2]; + const kwargsParam = type.details.parameters[paramCount - 1]; + if ( - type.details.parameters[paramCount - 2].category !== ParameterCategory.VarArgList || - type.details.parameters[paramCount - 1].category !== ParameterCategory.VarArgDictionary + argsParam.category !== ParameterCategory.VarArgList || + kwargsParam.category !== ParameterCategory.VarArgDictionary || + !isParamSpec(argsParam.type) || + !isParamSpec(kwargsParam.type) || + !isTypeSame(argsParam.type, kwargsParam.type) ) { return type; } - return FunctionType.cloneRemoveParamSpecVariadics(type); + return FunctionType.cloneRemoveParamSpecVariadics(type, argsParam.type); } -// Recursively walks a type and calls a callback for each TypeVar, allowing -// it to be replaced with something else. -function _transformTypeVars( - type: Type, - callbacks: TypeVarTransformer, - recursionMap = new Map(), - recursionLevel = 0 -): Type { - if (recursionLevel > maxTypeRecursionCount) { - return type; +function _expandVariadicUnpackedUnion(type: Type) { + if (isClassInstance(type) && isTupleClass(type) && type.tupleTypeArguments && type.isUnpacked) { + return combineTypes(type.tupleTypeArguments.map((t) => t.type)); } - // Shortcut the operation if possible. - if (!requiresSpecialization(type)) { - return type; + return type; +} + +// If the declared return type for the function is a Generator or AsyncGenerator, +// returns the type arguments for the type. +export function getGeneratorTypeArgs(returnType: Type): Type[] | undefined { + if (isClassInstance(returnType)) { + if (ClassType.isBuiltIn(returnType, ['Generator', 'AsyncGenerator'])) { + return returnType.typeArguments; + } else if (ClassType.isBuiltIn(returnType, 'AwaitableGenerator')) { + // AwaitableGenerator has four type arguments, and the last 3 + // correspond to the generator. + return returnType.typeArguments?.slice(1); + } } - if (isAnyOrUnknown(type)) { - return type; + return undefined; +} + +export function requiresTypeArguments(classType: ClassType) { + if (classType.details.typeParameters.length > 0) { + // If there are type parameters, type arguments are needed. + // The exception is if type parameters have been synthesized + // for classes that have untyped constructors. + return !classType.details.typeParameters[0].details.isSynthesized; } - if (isNone(type)) { - return type; + // There are a few built-in special classes that require + // type arguments even though typeParameters is empty. + if (ClassType.isSpecialBuiltIn(classType)) { + const specialClasses = [ + 'Tuple', + 'Callable', + 'Generic', + 'Type', + 'Optional', + 'Union', + 'Literal', + 'Annotated', + 'TypeGuard', + 'StrictTypeGuard', + ]; + + if (specialClasses.some((t) => t === (classType.aliasName || classType.details.name))) { + return true; + } } - if (isTypeVar(type)) { - // Handle recursive type aliases specially. In particular, - // we need to specialize type arguments for generic recursive - // type aliases. - if (type.details.recursiveTypeAliasName) { - if (!type.typeAliasInfo?.typeArguments) { - return type; - } + return false; +} - let requiresUpdate = false; - const typeArgs = type.typeAliasInfo.typeArguments.map((typeArg) => { - const replacementType = _transformTypeVars(typeArg, callbacks, recursionMap, recursionLevel + 1); - if (replacementType !== typeArg) { - requiresUpdate = true; - } - return replacementType; - }); +export function requiresSpecialization( + type: Type, + ignorePseudoGeneric = false, + ignoreSelf = false, + recursionCount = 0 +): boolean { + if (recursionCount > maxTypeRecursionCount) { + return false; + } + recursionCount++; + + switch (type.category) { + case TypeCategory.Class: { + if (ClassType.isPseudoGenericClass(type) && ignorePseudoGeneric) { + return false; + } - if (requiresUpdate) { - return TypeBase.cloneForTypeAlias( - type, - type.typeAliasInfo.name, - type.typeAliasInfo.fullName, - type.typeAliasInfo.typeVarScopeId, - type.typeAliasInfo.typeParameters, - typeArgs + if (type.typeArguments) { + return ( + type.typeArguments.find((typeArg) => + requiresSpecialization(typeArg, ignorePseudoGeneric, ignoreSelf, recursionCount) + ) !== undefined ); } - return type; + return ClassType.getTypeParameters(type).length > 0; } - let replacementType: Type = type; + case TypeCategory.Function: { + if (type.details.paramSpec) { + return true; + } - // Recursively transform the results, but ensure that we don't replace the - // same type variable recursively by setting it in the recursionMap. - const typeVarName = TypeVarType.getNameWithScope(type); - if (!recursionMap.has(typeVarName)) { - replacementType = callbacks.transformTypeVar(type); - recursionMap.set(typeVarName, type); - replacementType = _transformTypeVars(replacementType, callbacks, recursionMap, recursionLevel + 1); + for (let i = 0; i < type.details.parameters.length; i++) { + if ( + requiresSpecialization( + FunctionType.getEffectiveParameterType(type, i), + ignorePseudoGeneric, + ignoreSelf, + recursionCount + ) + ) { + return true; + } + } - // If we're transforming a variadic type variable that was in a union, - // expand the union types. - if (isVariadicTypeVar(type) && type.isVariadicInUnion) { - replacementType = _expandVariadicUnpackedUnion(replacementType); + const declaredReturnType = + type.specializedTypes && type.specializedTypes.returnType + ? type.specializedTypes.returnType + : type.details.declaredReturnType; + if (declaredReturnType) { + if (requiresSpecialization(declaredReturnType, ignorePseudoGeneric, ignoreSelf, recursionCount)) { + return true; + } + } else if (type.inferredReturnType) { + if (requiresSpecialization(type.inferredReturnType, ignorePseudoGeneric, ignoreSelf, recursionCount)) { + return true; + } } - recursionMap.delete(typeVarName); + return false; } - return replacementType; - } - - if (isUnion(type)) { - const newUnionType = mapSubtypes(type, (subtype) => { - let transformedType = _transformTypeVars(subtype, callbacks, recursionMap, recursionLevel + 1); - - // If we're transforming a variadic type variable within a union, - // combine the individual types within the variadic type variable. - if (isVariadicTypeVar(subtype) && !isVariadicTypeVar(transformedType)) { - const subtypesToCombine: Type[] = []; - doForEachSubtype(transformedType, (transformedSubtype) => { - subtypesToCombine.push(_expandVariadicUnpackedUnion(transformedSubtype)); - }); - - transformedType = combineTypes(subtypesToCombine); - } - - return transformedType; - }); + case TypeCategory.OverloadedFunction: { + return ( + type.overloads.find((overload) => + requiresSpecialization(overload, ignorePseudoGeneric, ignoreSelf, recursionCount) + ) !== undefined + ); + } - if (callbacks.transformUnion && isUnion(newUnionType)) { - return callbacks.transformUnion(newUnionType); + case TypeCategory.Union: { + return ( + findSubtype(type, (subtype) => + requiresSpecialization(subtype, ignorePseudoGeneric, ignoreSelf, recursionCount) + ) !== undefined + ); } - return newUnionType; - } + case TypeCategory.TypeVar: { + // Most TypeVar types need to be specialized. + if (!type.details.recursiveTypeAliasName) { + if (type.details.isSynthesizedSelf && ignoreSelf) { + return false; + } - if (isClassInstance(type)) { - const classType = _transformTypeVarsInClassType( - ClassType.cloneAsInstantiable(type), - callbacks, - recursionMap, - recursionLevel + 1 - ); + return true; + } - return ClassType.cloneAsInstance(classType); + // If this is a recursive type alias, it may need to be specialized + // if it has generic type arguments. + if (type.typeAliasInfo?.typeArguments) { + return type.typeAliasInfo.typeArguments.some((typeArg) => + requiresSpecialization(typeArg, ignorePseudoGeneric, ignoreSelf, recursionCount) + ); + } + } } - if (isInstantiableClass(type)) { - return _transformTypeVarsInClassType(type, callbacks, recursionMap, recursionLevel + 1); - } + return false; +} - if (isFunction(type)) { - return _transformTypeVarsInFunctionType(type, callbacks, recursionMap, recursionLevel + 1); - } - - if (isOverloadedFunction(type)) { - let requiresUpdate = false; - - // Specialize each of the functions in the overload. - const newOverloads: FunctionType[] = []; - type.overloads.forEach((entry) => { - const replacementType = _transformTypeVarsInFunctionType(entry, callbacks, recursionMap, recursionLevel); - newOverloads.push(replacementType); - if (replacementType !== entry) { - requiresUpdate = true; - } - }); - - // Construct a new overload with the specialized function types. - return requiresUpdate ? OverloadedFunctionType.create(newOverloads) : type; - } - - return type; -} - -function _transformTypeVarsInClassType( - classType: ClassType, - callbacks: TypeVarTransformer, - recursionMap: Map, - recursionLevel: number -): ClassType { - // Handle the common case where the class has no type parameters. - if (ClassType.getTypeParameters(classType).length === 0 && !ClassType.isSpecialBuiltIn(classType)) { - return classType; - } - - let newTypeArgs: Type[] = []; - let newVariadicTypeArgs: Type[] | undefined; - let specializationNeeded = false; - const typeParams = ClassType.getTypeParameters(classType); - - const transformParamSpec = (paramSpec: TypeVarType) => { - const paramSpecEntries = callbacks.transformParamSpec(paramSpec); - if (paramSpecEntries) { - if (paramSpecEntries.concrete) { - // Create a function type from the param spec entries. - const functionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.ParamSpecValue); - - paramSpecEntries.concrete.parameters.forEach((entry) => { - FunctionType.addParameter(functionType, { - category: entry.category, - name: entry.name, - hasDefault: entry.hasDefault, - hasDeclaredType: true, - type: entry.type, - }); - }); - - return functionType; - } - - if (paramSpecEntries.paramSpec) { - return paramSpecEntries.paramSpec; - } - } - - return paramSpec; - }; - - // If type args were previously provided, specialize them. - if (classType.typeArguments) { - newTypeArgs = classType.typeArguments.map((oldTypeArgType) => { - if (isTypeVar(oldTypeArgType) && oldTypeArgType.details.isParamSpec) { - return transformParamSpec(oldTypeArgType); - } - - let newTypeArgType = _transformTypeVars(oldTypeArgType, callbacks, recursionMap, recursionLevel + 1); - if (newTypeArgType !== oldTypeArgType) { - specializationNeeded = true; - - // If this was a variadic type variable that was part of a union - // (e.g. Union[Unpack[Vs]]), expand the subtypes into a union here. - if ( - isTypeVar(oldTypeArgType) && - isVariadicTypeVar(oldTypeArgType) && - oldTypeArgType.isVariadicInUnion - ) { - newTypeArgType = _expandVariadicUnpackedUnion(newTypeArgType); - } - } - return newTypeArgType; - }); - } else { - typeParams.forEach((typeParam) => { - let replacementType: Type = typeParam; - - if (typeParam.details.isParamSpec) { - replacementType = transformParamSpec(typeParam); - if (replacementType !== typeParam) { - specializationNeeded = true; - } - } else { - const typeParamName = TypeVarType.getNameWithScope(typeParam); - if (!recursionMap.has(typeParamName)) { - replacementType = callbacks.transformTypeVar(typeParam); - if (replacementType !== typeParam) { - recursionMap.set(typeParamName, typeParam); - replacementType = _transformTypeVars( - replacementType, - callbacks, - recursionMap, - recursionLevel + 1 - ); - recursionMap.delete(typeParamName); - specializationNeeded = true; - } - } - } - - newTypeArgs.push(replacementType); - }); - } +// Computes the method resolution ordering for a class whose base classes +// have already been filled in. The algorithm for computing MRO is described +// here: https://www.python.org/download/releases/2.3/mro/. It returns true +// if an MRO was possible, false otherwise. +export function computeMroLinearization(classType: ClassType): boolean { + let isMroFound = true; - if (ClassType.isTupleClass(classType)) { - if (classType.tupleTypeArguments) { - newVariadicTypeArgs = []; - classType.tupleTypeArguments.forEach((oldTypeArgType) => { - const newTypeArgType = _transformTypeVars(oldTypeArgType, callbacks, recursionMap, recursionLevel + 1); - if (newTypeArgType !== oldTypeArgType) { - specializationNeeded = true; + const filteredBaseClasses = classType.details.baseClasses.filter((baseClass, index) => { + if (isInstantiableClass(baseClass)) { + // Generic has some special-case logic (see description of __mro_entries__ + // in PEP 560) that we need to account for here. + if (ClassType.isBuiltIn(baseClass, 'Generic')) { + // If the class is a Protocol, the generic is ignored for the purposes + // of computing the MRO. + if (ClassType.isProtocolClass(classType)) { + return false; } + // If the class contains any specialized generic classes after + // the Generic base, the Generic base is ignored for purposes + // of computing the MRO. if ( - isVariadicTypeVar(oldTypeArgType) && - isClassInstance(newTypeArgType) && - isTupleClass(newTypeArgType) && - newTypeArgType.tupleTypeArguments + classType.details.baseClasses.some((innerBaseClass, innerIndex) => { + return ( + innerIndex > index && + isInstantiableClass(innerBaseClass) && + innerBaseClass.typeArguments && + innerBaseClass.isTypeArgumentExplicit + ); + }) ) { - newVariadicTypeArgs!.push(...newTypeArgType.tupleTypeArguments); - } else { - newVariadicTypeArgs!.push(newTypeArgType); - } - }); - } else if (typeParams.length > 0) { - newVariadicTypeArgs = callbacks.transformVariadicTypeVar(typeParams[0]); - if (newVariadicTypeArgs) { - specializationNeeded = true; - } - } - } - - // If specialization wasn't needed, don't allocate a new class. - if (!specializationNeeded) { - return classType; - } - - return ClassType.cloneForSpecialization( - classType, - newTypeArgs, - /* isTypeArgumentExplicit */ true, - /* includeSubclasses */ undefined, - newVariadicTypeArgs - ); -} - -function _transformTypeVarsInFunctionType( - sourceType: FunctionType, - callbacks: TypeVarTransformer, - recursionMap: Map, - recursionLevel: number -): FunctionType { - let functionType = sourceType; - - // Handle functions with a parameter specification in a special manner. - if (functionType.details.paramSpec) { - const paramSpec = callbacks.transformParamSpec(functionType.details.paramSpec); - if (paramSpec) { - functionType = FunctionType.cloneForParamSpec(functionType, paramSpec); - } - } - - const declaredReturnType = - functionType.specializedTypes && functionType.specializedTypes.returnType - ? functionType.specializedTypes.returnType - : functionType.details.declaredReturnType; - const specializedReturnType = declaredReturnType - ? _transformTypeVars(declaredReturnType, callbacks, recursionMap, recursionLevel + 1) - : undefined; - let typesRequiredSpecialization = declaredReturnType !== specializedReturnType; - - const specializedParameters: SpecializedFunctionTypes = { - parameterTypes: [], - returnType: specializedReturnType, - }; - - // Does this function end with *args: P.args, **args: P.kwargs? If so, we'll - // modify the function and replace these parameters with the signature captured - // by the ParamSpec. - if (functionType.details.parameters.length >= 2) { - const argsParam = functionType.details.parameters[functionType.details.parameters.length - 2]; - const kwargsParam = functionType.details.parameters[functionType.details.parameters.length - 1]; - const argsParamType = FunctionType.getEffectiveParameterType( - functionType, - functionType.details.parameters.length - 2 - ); - const kwargsParamType = FunctionType.getEffectiveParameterType( - functionType, - functionType.details.parameters.length - 1 - ); - - if ( - argsParam.category === ParameterCategory.VarArgList && - kwargsParam.category === ParameterCategory.VarArgDictionary && - isParamSpec(argsParamType) && - isParamSpec(kwargsParamType) && - isTypeSame(argsParamType, kwargsParamType) - ) { - const paramSpecType = callbacks.transformParamSpec(argsParamType); - if (paramSpecType) { - functionType = FunctionType.cloneForParamSpecApplication(functionType, paramSpecType); - } - } - } - - let variadicParamIndex: number | undefined; - let variadicTypesToUnpack: Type[] | undefined; - - for (let i = 0; i < functionType.details.parameters.length; i++) { - const paramType = FunctionType.getEffectiveParameterType(functionType, i); - const specializedType = _transformTypeVars(paramType, callbacks, recursionMap, recursionLevel + 1); - specializedParameters.parameterTypes.push(specializedType); - if ( - variadicParamIndex === undefined && - isVariadicTypeVar(paramType) && - functionType.details.parameters[i].category === ParameterCategory.Simple - ) { - variadicParamIndex = i; - - if ( - isClassInstance(specializedType) && - isTupleClass(specializedType) && - specializedType.isTupleForUnpackedVariadicTypeVar - ) { - variadicTypesToUnpack = specializedType.tupleTypeArguments; - } - } - - if (paramType !== specializedType) { - typesRequiredSpecialization = true; - } - } - - if (!typesRequiredSpecialization) { - return functionType; - } - - let specializedInferredReturnType: Type | undefined; - if (functionType.inferredReturnType) { - specializedInferredReturnType = _transformTypeVars( - functionType.inferredReturnType, - callbacks, - recursionMap, - recursionLevel + 1 - ); - } - - // If there was no unpacked variadic type variable, we're done. - if (!variadicTypesToUnpack) { - return FunctionType.cloneForSpecialization(functionType, specializedParameters, specializedInferredReturnType); - } - - // Unpack the tuple and synthesize a new function in the process. - const newFunctionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.SynthesizedMethod); - specializedParameters.parameterTypes.forEach((paramType, index) => { - if (index === variadicParamIndex) { - // Unpack the tuple into individual parameters. - variadicTypesToUnpack!.forEach((unpackedType) => { - FunctionType.addParameter(newFunctionType, { - category: ParameterCategory.Simple, - name: `_p${newFunctionType.details.parameters.length}`, - isNameSynthesized: true, - type: unpackedType, - hasDeclaredType: true, - }); - }); - } else { - const param = { ...functionType.details.parameters[index] }; - param.type = paramType; - if (param.name && param.isNameSynthesized) { - param.name = `_p${newFunctionType.details.parameters.length}`; - } - - FunctionType.addParameter(newFunctionType, param); - } - }); - - newFunctionType.details.declaredReturnType = FunctionType.getSpecializedReturnType(functionType); - - return newFunctionType; -} - -function _expandVariadicUnpackedUnion(type: Type) { - if ( - isClassInstance(type) && - isTupleClass(type) && - type.tupleTypeArguments && - type.isTupleForUnpackedVariadicTypeVar - ) { - return combineTypes(type.tupleTypeArguments); - } - - return type; -} - -// If the declared return type for the function is a Generator or AsyncGenerator, -// returns the type arguments for the type. -export function getGeneratorTypeArgs(returnType: Type): Type[] | undefined { - if (isClassInstance(returnType)) { - if (ClassType.isBuiltIn(returnType)) { - const className = returnType.details.name; - if (className === 'Generator' || className === 'AsyncGenerator') { - return returnType.typeArguments; - } - } - } - - return undefined; -} - -export function requiresTypeArguments(classType: ClassType) { - if (classType.details.typeParameters.length > 0) { - // If there are type parameters, type arguments are needed. - // The exception is if type parameters have been synthesized - // for classes that have untyped constructors. - return !classType.details.typeParameters[0].details.isSynthesized; - } - - // There are a few built-in special classes that require - // type arguments even though typeParameters is empty. - if (ClassType.isSpecialBuiltIn(classType)) { - const specialClasses = [ - 'Tuple', - 'Callable', - 'Generic', - 'Type', - 'Optional', - 'Union', - 'Literal', - 'Annotated', - 'TypeGuard', - ]; - if (specialClasses.some((t) => t === (classType.aliasName || classType.details.name))) { - return true; - } - } - - return false; -} - -export function requiresSpecialization(type: Type, recursionCount = 0): boolean { - switch (type.category) { - case TypeCategory.Class: { - if (type.typeArguments) { - if (recursionCount > maxTypeRecursionCount) { return false; } - - return ( - type.typeArguments.find((typeArg) => requiresSpecialization(typeArg, recursionCount + 1)) !== - undefined - ); - } - - // If there are any type parameters, we need to specialize - // since there are no corresponding type arguments. - return ClassType.getTypeParameters(type).length > 0; - } - - case TypeCategory.Function: { - if (recursionCount > maxTypeRecursionCount) { - return false; - } - - if (type.details.paramSpec) { - return true; - } - - for (let i = 0; i < type.details.parameters.length; i++) { - if (requiresSpecialization(FunctionType.getEffectiveParameterType(type, i), recursionCount + 1)) { - return true; - } - } - - const declaredReturnType = - type.specializedTypes && type.specializedTypes.returnType - ? type.specializedTypes.returnType - : type.details.declaredReturnType; - if (declaredReturnType) { - if (requiresSpecialization(declaredReturnType, recursionCount + 1)) { - return true; - } - } else if (type.inferredReturnType) { - if (requiresSpecialization(type.inferredReturnType, recursionCount + 1)) { - return true; - } - } - - return false; - } - - case TypeCategory.OverloadedFunction: { - return ( - type.overloads.find((overload) => requiresSpecialization(overload, recursionCount + 1)) !== undefined - ); - } - - case TypeCategory.Union: { - return findSubtype(type, (subtype) => requiresSpecialization(subtype, recursionCount + 1)) !== undefined; - } - - case TypeCategory.TypeVar: { - // Most TypeVar types need to be specialized. - if (!type.details.recursiveTypeAliasName) { - return true; - } - - // If this is a recursive type alias, it may need to be specialized - // if it has generic type arguments. - if (type.typeAliasInfo?.typeArguments) { - return type.typeAliasInfo.typeArguments.some((typeArg) => - requiresSpecialization(typeArg, recursionCount + 1) - ); } } - } - - return false; -} -// Computes the method resolution ordering for a class whose base classes -// have already been filled in. The algorithm for computing MRO is described -// here: https://www.python.org/download/releases/2.3/mro/. It returns true -// if an MRO was possible, false otherwise. -export function computeMroLinearization(classType: ClassType): boolean { - let isMroFound = true; + return true; + }); // Construct the list of class lists that need to be merged. const classListsToMerge: Type[][] = []; - // Remove any Generic class. It appears not to participate in MRO calculations. - const baseClassesToInclude = classType.details.baseClasses.filter( - (baseClass) => !isInstantiableClass(baseClass) || !ClassType.isBuiltIn(baseClass, 'Generic') - ); - - baseClassesToInclude.forEach((baseClass) => { + filteredBaseClasses.forEach((baseClass, index) => { if (isInstantiableClass(baseClass)) { const typeVarMap = buildTypeVarMapFromSpecializedClass(baseClass, /* makeConcrete */ false); classListsToMerge.push( @@ -2349,7 +2169,7 @@ export function computeMroLinearization(classType: ClassType): boolean { }); classListsToMerge.push( - baseClassesToInclude.map((baseClass) => { + filteredBaseClasses.map((baseClass) => { const typeVarMap = buildTypeVarMapFromSpecializedClass(classType, /* makeConcrete */ false); return applySolvedTypeVars(baseClass, typeVarMap); }) @@ -2445,6 +2265,7 @@ function addDeclaringModuleNamesForType(type: Type, moduleList: string[], recurs if (recursionCount > maxTypeRecursionCount) { return; } + recursionCount++; const addIfUnique = (moduleName: string) => { if (moduleName && !moduleList.some((n) => n === moduleName)) { @@ -2465,14 +2286,14 @@ function addDeclaringModuleNamesForType(type: Type, moduleList: string[], recurs case TypeCategory.OverloadedFunction: { type.overloads.forEach((overload) => { - addDeclaringModuleNamesForType(overload, moduleList, recursionCount + 1); + addDeclaringModuleNamesForType(overload, moduleList, recursionCount); }); break; } case TypeCategory.Union: { doForEachSubtype(type, (subtype) => { - addDeclaringModuleNamesForType(subtype, moduleList, recursionCount + 1); + addDeclaringModuleNamesForType(subtype, moduleList, recursionCount); }); break; } @@ -2483,3 +2304,619 @@ function addDeclaringModuleNamesForType(type: Type, moduleList: string[], recurs } } } + +export function convertParamSpecValueToType(paramSpecEntry: ParamSpecValue): Type { + let hasParameters = paramSpecEntry.parameters.length > 0; + + if (paramSpecEntry.parameters.length === 1) { + // If the ParamSpec has a position-only separator as its only parameter, + // treat it as though there are no parameters. + const onlyParam = paramSpecEntry.parameters[0]; + if (onlyParam.category === ParameterCategory.Simple && !onlyParam.name) { + hasParameters = false; + } + } + + if (hasParameters || !paramSpecEntry.paramSpec) { + // Create a function type from the param spec entries. + const functionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.ParamSpecValue); + + paramSpecEntry.parameters.forEach((entry) => { + FunctionType.addParameter(functionType, { + category: entry.category, + name: entry.name, + hasDefault: entry.hasDefault, + isNameSynthesized: entry.isNameSynthesized, + hasDeclaredType: true, + type: entry.type, + }); + }); + + functionType.details.paramSpec = paramSpecEntry.paramSpec; + functionType.details.docString = paramSpecEntry.docString; + + return functionType; + } + + return paramSpecEntry.paramSpec; +} + +// Recursively walks a type and calls a callback for each TypeVar, allowing +// it to be replaced with something else. +class TypeVarTransformer { + private _isTransformingTypeArg = false; + + apply(type: Type, recursionSet = new Set(), recursionCount = 0): Type { + if (recursionCount > maxTypeRecursionCount) { + return type; + } + recursionCount++; + + // Shortcut the operation if possible. + if (!requiresSpecialization(type)) { + return type; + } + + if (isAnyOrUnknown(type)) { + return type; + } + + if (isNoneInstance(type)) { + return type; + } + + if (isTypeVar(type)) { + // Handle recursive type aliases specially. In particular, + // we need to specialize type arguments for generic recursive + // type aliases. + if (type.details.recursiveTypeAliasName) { + if (!type.typeAliasInfo?.typeArguments) { + return type; + } + + let requiresUpdate = false; + const typeArgs = type.typeAliasInfo.typeArguments.map((typeArg) => { + const replacementType = this.apply(typeArg, recursionSet, recursionCount); + if (replacementType !== typeArg) { + requiresUpdate = true; + } + return replacementType; + }); + + if (requiresUpdate) { + return TypeBase.cloneForTypeAlias( + type, + type.typeAliasInfo.name, + type.typeAliasInfo.fullName, + type.typeAliasInfo.typeVarScopeId, + type.typeAliasInfo.typeParameters, + typeArgs + ); + } + + return type; + } + + let replacementType: Type = type; + + // Recursively transform the results, but ensure that we don't replace the + // same type variable recursively by setting it in the recursionSet. + const typeVarName = TypeVarType.getNameWithScope(type); + if (!recursionSet.has(typeVarName)) { + replacementType = this.transformTypeVar(type); + + if (!this._isTransformingTypeArg) { + recursionSet.add(typeVarName); + replacementType = this.apply(replacementType, recursionSet, recursionCount); + recursionSet.delete(typeVarName); + } + + // If we're transforming a variadic type variable that was in a union, + // expand the union types. + if (isVariadicTypeVar(type) && type.isVariadicInUnion) { + replacementType = _expandVariadicUnpackedUnion(replacementType); + } + } + + return replacementType; + } + + if (isUnion(type)) { + const newUnionType = mapSubtypes(type, (subtype) => { + let transformedType = this.apply(subtype, recursionSet, recursionCount); + + // If we're transforming a variadic type variable within a union, + // combine the individual types within the variadic type variable. + if (isVariadicTypeVar(subtype) && !isVariadicTypeVar(transformedType)) { + const subtypesToCombine: Type[] = []; + doForEachSubtype(transformedType, (transformedSubtype) => { + subtypesToCombine.push(_expandVariadicUnpackedUnion(transformedSubtype)); + }); + + transformedType = combineTypes(subtypesToCombine); + } + + return transformedType; + }); + + if (this.transformUnion && isUnion(newUnionType)) { + return this.transformUnion(newUnionType); + } + + return newUnionType; + } + + if (isClass(type)) { + return this._transformTypeVarsInClassType(type, recursionSet, recursionCount); + } + + if (isFunction(type)) { + return this._transformTypeVarsInFunctionType(type, recursionSet, recursionCount); + } + + if (isOverloadedFunction(type)) { + let requiresUpdate = false; + + // Specialize each of the functions in the overload. + const newOverloads: FunctionType[] = []; + type.overloads.forEach((entry) => { + const replacementType = this._transformTypeVarsInFunctionType(entry, recursionSet, recursionCount); + newOverloads.push(replacementType); + if (replacementType !== entry) { + requiresUpdate = true; + } + }); + + // Construct a new overload with the specialized function types. + return requiresUpdate ? OverloadedFunctionType.create(newOverloads) : type; + } + + return type; + } + + transformTypeVar(typeVar: TypeVarType): Type { + return typeVar; + } + + transformVariadicTypeVar(paramSpec: TypeVarType): TupleTypeArgument[] | undefined { + return undefined; + } + + transformParamSpec(paramSpec: TypeVarType): ParamSpecValue | undefined { + return undefined; + } + + transformUnion(type: UnionType): Type { + return type; + } + + private _transformTypeVarsInClassType( + classType: ClassType, + recursionSet: Set, + recursionCount: number + ): ClassType { + // Handle the common case where the class has no type parameters. + if (ClassType.getTypeParameters(classType).length === 0 && !ClassType.isSpecialBuiltIn(classType)) { + return classType; + } + + let newTypeArgs: Type[] = []; + let newVariadicTypeArgs: TupleTypeArgument[] | undefined; + let specializationNeeded = false; + const typeParams = ClassType.getTypeParameters(classType); + + const transformParamSpec = (paramSpec: TypeVarType) => { + const paramSpecValue = this.transformParamSpec(paramSpec); + if (paramSpecValue) { + specializationNeeded = true; + return convertParamSpecValueToType(paramSpecValue); + } else { + return paramSpec; + } + }; + + const wasTransformingTypeArg = this._isTransformingTypeArg; + this._isTransformingTypeArg = true; + + // If type args were previously provided, specialize them. + if (classType.typeArguments) { + newTypeArgs = classType.typeArguments.map((oldTypeArgType) => { + if (isTypeVar(oldTypeArgType) && oldTypeArgType.details.isParamSpec) { + return transformParamSpec(oldTypeArgType); + } + + let newTypeArgType = this.apply(oldTypeArgType, recursionSet, recursionCount); + if (newTypeArgType !== oldTypeArgType) { + specializationNeeded = true; + + // If this was a variadic type variable that was part of a union + // (e.g. Union[Unpack[Vs]]), expand the subtypes into a union here. + if ( + isTypeVar(oldTypeArgType) && + isVariadicTypeVar(oldTypeArgType) && + oldTypeArgType.isVariadicInUnion + ) { + newTypeArgType = _expandVariadicUnpackedUnion(newTypeArgType); + } + } + return newTypeArgType; + }); + } else { + typeParams.forEach((typeParam) => { + let replacementType: Type = typeParam; + + if (typeParam.details.isParamSpec) { + replacementType = transformParamSpec(typeParam); + if (replacementType !== typeParam) { + specializationNeeded = true; + } + } else { + const typeParamName = TypeVarType.getNameWithScope(typeParam); + if (!recursionSet.has(typeParamName)) { + replacementType = this.transformTypeVar(typeParam); + + if (replacementType !== typeParam) { + if (!this._isTransformingTypeArg) { + recursionSet.add(typeParamName); + replacementType = this.apply(replacementType, recursionSet, recursionCount); + recursionSet.delete(typeParamName); + } + + specializationNeeded = true; + } + } + } + + newTypeArgs.push(replacementType); + }); + } + + if (ClassType.isTupleClass(classType)) { + if (classType.tupleTypeArguments) { + newVariadicTypeArgs = []; + classType.tupleTypeArguments.forEach((oldTypeArgType) => { + const newTypeArgType = this.apply(oldTypeArgType.type, recursionSet, recursionCount); + + if (newTypeArgType !== oldTypeArgType.type) { + specializationNeeded = true; + } + + if ( + isVariadicTypeVar(oldTypeArgType.type) && + isClassInstance(newTypeArgType) && + isTupleClass(newTypeArgType) && + newTypeArgType.tupleTypeArguments + ) { + newVariadicTypeArgs!.push(...newTypeArgType.tupleTypeArguments); + } else { + newVariadicTypeArgs!.push({ type: newTypeArgType, isUnbounded: oldTypeArgType.isUnbounded }); + } + }); + } else if (typeParams.length > 0) { + newVariadicTypeArgs = this.transformVariadicTypeVar(typeParams[0]); + if (newVariadicTypeArgs) { + specializationNeeded = true; + } + } + } + + this._isTransformingTypeArg = wasTransformingTypeArg; + + // If specialization wasn't needed, don't allocate a new class. + if (!specializationNeeded) { + return classType; + } + + return ClassType.cloneForSpecialization( + classType, + newTypeArgs, + /* isTypeArgumentExplicit */ true, + /* includeSubclasses */ undefined, + newVariadicTypeArgs + ); + } + + private _transformTypeVarsInFunctionType( + sourceType: FunctionType, + recursionSet: Set, + recursionCount: number + ): FunctionType { + let functionType = sourceType; + + // Handle functions with a parameter specification in a special manner. + if (functionType.details.paramSpec) { + const paramSpec = this.transformParamSpec(functionType.details.paramSpec); + if (paramSpec) { + functionType = FunctionType.cloneForParamSpec(functionType, paramSpec); + } + } + + const declaredReturnType = FunctionType.getSpecializedReturnType(functionType); + const specializedReturnType = declaredReturnType + ? this.apply(declaredReturnType, recursionSet, recursionCount) + : undefined; + let typesRequiredSpecialization = declaredReturnType !== specializedReturnType; + + const specializedParameters: SpecializedFunctionTypes = { + parameterTypes: [], + returnType: specializedReturnType, + }; + + // Does this function end with *args: P.args, **args: P.kwargs? If so, we'll + // modify the function and replace these parameters with the signature captured + // by the ParamSpec. + if (functionType.details.parameters.length >= 2) { + const argsParam = functionType.details.parameters[functionType.details.parameters.length - 2]; + const kwargsParam = functionType.details.parameters[functionType.details.parameters.length - 1]; + const argsParamType = FunctionType.getEffectiveParameterType( + functionType, + functionType.details.parameters.length - 2 + ); + const kwargsParamType = FunctionType.getEffectiveParameterType( + functionType, + functionType.details.parameters.length - 1 + ); + + if ( + argsParam.category === ParameterCategory.VarArgList && + kwargsParam.category === ParameterCategory.VarArgDictionary && + isParamSpec(argsParamType) && + isParamSpec(kwargsParamType) && + isTypeSame(argsParamType, kwargsParamType) + ) { + const paramSpecType = this.transformParamSpec(argsParamType); + if (paramSpecType) { + functionType = FunctionType.cloneForParamSpecApplication(functionType, paramSpecType); + } + } + } + + let variadicParamIndex: number | undefined; + let variadicTypesToUnpack: TupleTypeArgument[] | undefined; + + for (let i = 0; i < functionType.details.parameters.length; i++) { + const paramType = FunctionType.getEffectiveParameterType(functionType, i); + const specializedType = this.apply(paramType, recursionSet, recursionCount); + specializedParameters.parameterTypes.push(specializedType); + if ( + variadicParamIndex === undefined && + isVariadicTypeVar(paramType) && + functionType.details.parameters[i].category === ParameterCategory.VarArgList + ) { + variadicParamIndex = i; + + if (isClassInstance(specializedType) && isTupleClass(specializedType) && specializedType.isUnpacked) { + variadicTypesToUnpack = specializedType.tupleTypeArguments; + } + } + + if (paramType !== specializedType) { + typesRequiredSpecialization = true; + } + } + + if (!typesRequiredSpecialization) { + return functionType; + } + + let specializedInferredReturnType: Type | undefined; + if (functionType.inferredReturnType) { + specializedInferredReturnType = this.apply(functionType.inferredReturnType, recursionSet, recursionCount); + } + + // If there was no unpacked variadic type variable, we're done. + if (!variadicTypesToUnpack) { + return FunctionType.cloneForSpecialization( + functionType, + specializedParameters, + specializedInferredReturnType + ); + } + + // Unpack the tuple and synthesize a new function in the process. + const newFunctionType = FunctionType.createInstance('', '', '', FunctionTypeFlags.SynthesizedMethod); + let insertKeywordOnlySeparator = false; + let swallowPositionOnlySeparator = false; + + specializedParameters.parameterTypes.forEach((paramType, index) => { + if (index === variadicParamIndex) { + let sawUnboundedEntry = false; + + // Unpack the tuple into individual parameters. + variadicTypesToUnpack!.forEach((unpackedType) => { + FunctionType.addParameter(newFunctionType, { + category: unpackedType.isUnbounded ? ParameterCategory.VarArgList : ParameterCategory.Simple, + name: `__p${newFunctionType.details.parameters.length}`, + isNameSynthesized: true, + type: unpackedType.type, + hasDeclaredType: true, + }); + + if (unpackedType.isUnbounded) { + sawUnboundedEntry = true; + } + }); + + if (sawUnboundedEntry) { + swallowPositionOnlySeparator = true; + } else { + insertKeywordOnlySeparator = true; + } + } else { + const param = { ...functionType.details.parameters[index] }; + + if (param.category === ParameterCategory.VarArgList && !param.name) { + insertKeywordOnlySeparator = false; + } else if (param.category === ParameterCategory.VarArgDictionary) { + insertKeywordOnlySeparator = false; + } + + // Insert a keyword-only separator parameter if we previously + // unpacked a variadic TypeVar. + if (param.category === ParameterCategory.Simple && param.name && insertKeywordOnlySeparator) { + FunctionType.addParameter(newFunctionType, { + category: ParameterCategory.VarArgList, + type: UnknownType.create(), + }); + insertKeywordOnlySeparator = false; + } + + param.type = paramType; + if (param.name && param.isNameSynthesized) { + param.name = `__p${newFunctionType.details.parameters.length}`; + } + + if (param.category !== ParameterCategory.Simple || param.name || !swallowPositionOnlySeparator) { + FunctionType.addParameter(newFunctionType, param); + } + } + }); + + newFunctionType.details.declaredReturnType = specializedParameters.returnType; + + return newFunctionType; + } +} + +// Specializes a (potentially generic) type by substituting +// type variables from a type var map. +class ApplySolvedTypeVarsTransformer extends TypeVarTransformer { + constructor( + private _typeVarMap: TypeVarMap, + private _unknownIfNotFound = false, + private _useNarrowBoundOnly = false, + private _eliminateUnsolvedInUnions = false + ) { + super(); + } + + override transformTypeVar(typeVar: TypeVarType) { + // If the type variable is unrelated to the scopes we're solving, + // don't transform that type variable. + if (typeVar.scopeId && this._typeVarMap.hasSolveForScope(typeVar.scopeId)) { + let replacement = this._typeVarMap.getTypeVarType(typeVar, this._useNarrowBoundOnly); + + // If there was no narrow bound but there is a wide bound that + // contains literals, we'll use the wide bound even if "useNarrowBoundOnly" + // is specified. + if (!replacement && this._useNarrowBoundOnly) { + const wideType = this._typeVarMap.getTypeVarType(typeVar); + if (wideType) { + if (containsLiteralType(wideType, /* includeTypeArgs */ true)) { + replacement = wideType; + } + } + } + + if (replacement) { + if (TypeBase.isInstantiable(typeVar)) { + replacement = convertToInstantiable(replacement); + } + return replacement; + } + + // If this typeVar is in scope for what we're solving but the type + // var map doesn't contain any entry for it, replace with Unknown. + if (this._unknownIfNotFound) { + return UnknownType.create(); + } + } + + return typeVar; + } + + override transformUnion(type: UnionType) { + // If a union contains unsolved TypeVars within scope, eliminate them + // unless this results in an empty union. This elimination is needed + // in cases where TypeVars can go unmatched due to unions in parameter + // annotations, like this: + // def test(x: Union[str, T]) -> Union[str, T] + if (this._eliminateUnsolvedInUnions) { + const updatedUnion = mapSubtypes(type, (subtype) => { + if ( + isTypeVar(subtype) && + subtype.scopeId !== undefined && + this._typeVarMap.hasSolveForScope(subtype.scopeId) + ) { + return undefined; + } + return subtype; + }); + + return isNever(updatedUnion) ? type : updatedUnion; + } + + return type; + } + + override transformVariadicTypeVar(typeVar: TypeVarType) { + if (!typeVar.scopeId || !this._typeVarMap.hasSolveForScope(typeVar.scopeId)) { + return undefined; + } + + return this._typeVarMap.getVariadicTypeVar(typeVar); + } + + override transformParamSpec(paramSpec: TypeVarType) { + if (!paramSpec.scopeId || !this._typeVarMap.hasSolveForScope(paramSpec.scopeId)) { + return undefined; + } + + return this._typeVarMap.getParamSpec(paramSpec); + } +} + +class ExpectedConstructorTypeTransformer extends TypeVarTransformer { + static synthesizedTypeVarIndexForExpectedType = 1; + + dummyScopeId = '__expected_type_scope_id'; + dummyTypeVarPrefix = '__expected_type_'; + + constructor(private _typeVarMap: TypeVarMap, private _liveTypeVarScopes: TypeVarScopeId[]) { + super(); + + this._typeVarMap.addSolveForScope(this.dummyScopeId); + } + + private _isTypeVarLive(typeVar: TypeVarType) { + return this._liveTypeVarScopes.some((scopeId) => typeVar.scopeId === scopeId); + } + + private _createDummyTypeVar(prevTypeVar: TypeVarType) { + // If we previously synthesized this dummy type var, just return it. + if (prevTypeVar.details.isSynthesized && prevTypeVar.details.name.startsWith(this.dummyTypeVarPrefix)) { + return prevTypeVar; + } + + const isInstance = TypeBase.isInstance(prevTypeVar); + let newTypeVar = TypeVarType.createInstance( + `__expected_type_${ExpectedConstructorTypeTransformer.synthesizedTypeVarIndexForExpectedType}` + ); + newTypeVar.details.isSynthesized = true; + newTypeVar.scopeId = this.dummyScopeId; + newTypeVar.nameWithScope = TypeVarType.makeNameWithScope(newTypeVar.details.name, this.dummyScopeId); + if (!isInstance) { + newTypeVar = convertToInstantiable(newTypeVar) as TypeVarType; + } + + // If the original TypeVar was bound or constrained, make the replacement as well. + newTypeVar.details.boundType = prevTypeVar.details.boundType; + newTypeVar.details.constraints = prevTypeVar.details.constraints; + + // Also copy the variance. + newTypeVar.details.variance = prevTypeVar.details.variance; + + ExpectedConstructorTypeTransformer.synthesizedTypeVarIndexForExpectedType++; + return newTypeVar; + } + + override transformTypeVar(typeVar: TypeVarType) { + // If the type variable is unrelated to the scopes we're solving, + // don't transform that type variable. + if (this._isTypeVarLive(typeVar)) { + return typeVar; + } + + return this._createDummyTypeVar(typeVar); + } +} diff --git a/packages/pyright-internal/src/analyzer/typeVarMap.ts b/packages/pyright-internal/src/analyzer/typeVarMap.ts index 987053a676f1..aa0d7f34fa9d 100644 --- a/packages/pyright-internal/src/analyzer/typeVarMap.ts +++ b/packages/pyright-internal/src/analyzer/typeVarMap.ts @@ -11,9 +11,11 @@ import { assert } from '../common/debug'; import { + AnyType, ClassType, maxTypeRecursionCount, ParamSpecValue, + TupleTypeArgument, Type, TypeCategory, TypeVarScopeId, @@ -40,7 +42,7 @@ export interface ParamSpecMapEntry { export interface VariadicTypeVarMapEntry { typeVar: TypeVarType; - types: Type[]; + types: TupleTypeArgument[]; } export class TypeVarMap { @@ -127,7 +129,7 @@ export class TypeVarMap { } // Provides a "score" - a value that values completeness (number - // of type variables that are assigned) and completeness. + // of type variables that are assigned) and simplicity. getScore() { let score = 0; @@ -136,11 +138,11 @@ export class TypeVarMap { // Add 1 to the score for each type variable defined. score += 1; - // Add a fractional amount based on the complexity of the definition. + // Add a fractional amount based on the simplicity of the definition. // The more complex, the lower the score. In the spirit of Occam's // Razor, we always want to favor simple answers. const typeVarType = this.getTypeVarType(value.typeVar)!; - score += this._getComplexityScoreForType(typeVarType); + score += 1.0 - this._getComplexityScoreForType(typeVarType); }); score += this._paramSpecMap.size; @@ -172,11 +174,11 @@ export class TypeVarMap { this._typeVarMap.set(key, { typeVar: reference, narrowBound, wideBound, retainLiteral }); } - getVariadicTypeVar(reference: TypeVarType): Type[] | undefined { + getVariadicTypeVar(reference: TypeVarType): TupleTypeArgument[] | undefined { return this._variadicTypeVarMap?.get(this._getKey(reference))?.types; } - setVariadicTypeVar(reference: TypeVarType, types: Type[]) { + setVariadicTypeVar(reference: TypeVarType, types: TupleTypeArgument[]) { assert(!this._isLocked); const key = this._getKey(reference); @@ -254,50 +256,46 @@ export class TypeVarMap { // Returns a "score" for a type that captures the relative complexity // of the type. Scores should all be between 0 and 1 where 0 means - // very complex and 1 means simple. This is a heuristic, so there's + // very simple and 1 means complex. This is a heuristic, so there's // often no objectively correct answer. private _getComplexityScoreForType(type: Type, recursionCount = 0): number { if (recursionCount > maxTypeRecursionCount) { - return 0; + return 1; } + recursionCount++; switch (type.category) { + case TypeCategory.Unknown: + case TypeCategory.Any: + case TypeCategory.None: case TypeCategory.Function: - case TypeCategory.OverloadedFunction: { - // For now, return a constant for functions. We may want - // to make this heuristic in the future. + case TypeCategory.OverloadedFunction: + case TypeCategory.TypeVar: { return 0.5; } - case TypeCategory.TypeVar: { - // A bare TypeVar is less desirable (and therefore considered - // more complex) than a concrete type. - return 1; - } + case TypeCategory.Unbound: + case TypeCategory.Never: + return 1.0; case TypeCategory.Union: { - let minScore = 1; + let maxScore = 0; // If this union has a very large number of subtypes, don't bother // accurately computing the score. Assume a fixed value. if (type.subtypes.length < 16) { doForEachSubtype(type, (subtype) => { - const subtypeScore = this._getComplexityScoreForType(subtype, recursionCount + 1); - if (subtypeScore < minScore) { - minScore = subtypeScore; - } + const subtypeScore = this._getComplexityScoreForType(subtype, recursionCount); + maxScore = Math.max(maxScore, subtypeScore); }); } - // Assume that a union is more complex than a non-union, - // and return half of the minimum score of the subtypes. - return minScore / 2; + // Assume that a union is more complex than a non-union. + return 0.75 + maxScore / 4; } case TypeCategory.Class: { - // Score a class as 0.5 plus half of the average complexity - // score of its type arguments. - return this._getComplexityScoreForClass(type, recursionCount + 1); + return this._getComplexityScoreForClass(type, recursionCount); } } @@ -310,22 +308,23 @@ export class TypeVarMap { let typeArgCount = 0; if (classType.tupleTypeArguments) { - classType.tupleTypeArguments.forEach((type) => { - typeArgScoreSum += this._getComplexityScoreForType(type, recursionCount + 1); + classType.tupleTypeArguments.forEach((typeArg) => { + typeArgScoreSum += this._getComplexityScoreForType(typeArg.type, recursionCount); typeArgCount++; }); } else if (classType.typeArguments) { classType.typeArguments.forEach((type) => { - typeArgScoreSum += this._getComplexityScoreForType(type, recursionCount + 1); + typeArgScoreSum += this._getComplexityScoreForType(type, recursionCount); + typeArgCount++; + }); + } else if (classType.details.typeParameters) { + classType.details.typeParameters.forEach((type) => { + typeArgScoreSum += this._getComplexityScoreForType(AnyType.create(), recursionCount); typeArgCount++; }); } - let score = 0.5; - if (typeArgCount > 0) { - score += (typeArgScoreSum / typeArgCount) * 0.5; - } - - return score; + const averageTypeArgComplexity = typeArgCount > 0 ? typeArgScoreSum / typeArgCount : 0; + return 0.5 + averageTypeArgComplexity * 0.25; } } diff --git a/packages/pyright-internal/src/analyzer/typedDicts.ts b/packages/pyright-internal/src/analyzer/typedDicts.ts index 936619cf4f14..6606e59d6b26 100644 --- a/packages/pyright-internal/src/analyzer/typedDicts.ts +++ b/packages/pyright-internal/src/analyzer/typedDicts.ts @@ -50,6 +50,7 @@ import { OverloadedFunctionType, Type, TypedDictEntry, + TypeVarScopeType, TypeVarType, UnknownType, } from './types'; @@ -143,7 +144,11 @@ export function createTypedDictType( entryMap.set(entryName, true); // Cache the annotation type. - evaluator.getTypeForExpressionExpectingType(entry.valueExpression, /* allowFinal */ true); + const annotatedType = evaluator.getTypeForExpressionExpectingType( + entry.valueExpression, + /* allowFinal */ true, + /* allowRequired */ true + ); const newSymbol = new Symbol(SymbolFlags.InstanceMember); const declaration: VariableDeclaration = { @@ -151,17 +156,25 @@ export function createTypedDictType( node: entry.keyExpression, path: fileInfo.filePath, typeAnnotationNode: entry.valueExpression, + isRequired: annotatedType.isRequired, + isNotRequired: annotatedType.isNotRequired, + isRuntimeTypeExpression: true, range: convertOffsetsToRange( entry.keyExpression.start, TextRange.getEnd(entry.keyExpression), fileInfo.lines ), moduleName: fileInfo.moduleName, + isInExceptSuite: false, }; newSymbol.addDeclaration(declaration); classFields.set(entryName, newSymbol); }); + + // Set the type in the type cache for the dict node so it doesn't + // get evaluated again. + evaluator.setTypeForNode(entryDict); } else if (entriesArg.name) { for (let i = 1; i < argList.length; i++) { const entry = argList[i]; @@ -179,7 +192,11 @@ export function createTypedDictType( // Evaluate the type with specific evaluation flags. The // type will be cached for later. - evaluator.getTypeForExpressionExpectingType(entry.valueExpression, /* allowFinal */ true); + const annotatedType = evaluator.getTypeForExpressionExpectingType( + entry.valueExpression, + /* allowFinal */ true, + /* allowRequired */ true + ); const newSymbol = new Symbol(SymbolFlags.InstanceMember); const fileInfo = AnalyzerNodeInfo.getFileInfo(errorNode); @@ -188,12 +205,16 @@ export function createTypedDictType( node: entry.name, path: fileInfo.filePath, typeAnnotationNode: entry.valueExpression, + isRequired: annotatedType.isRequired, + isNotRequired: annotatedType.isNotRequired, + isRuntimeTypeExpression: true, range: convertOffsetsToRange( entry.name.start, TextRange.getEnd(entry.valueExpression), fileInfo.lines ), moduleName: fileInfo.moduleName, + isInExceptSuite: false, }; newSymbol.addDeclaration(declaration); @@ -297,12 +318,24 @@ export function synthesizeTypedDictClassMethods( type: ClassType.cloneAsInstance(classType), hasDeclaredType: true, }; - const typeVarScopeId = evaluator.getScopeIdForNode(node); - let defaultTypeVar = TypeVarType.createInstance(`__${classType.details.name}_default`); - defaultTypeVar.details.isSynthesized = true; - defaultTypeVar = TypeVarType.cloneForScopeId(defaultTypeVar, typeVarScopeId, classType.details.name); + const createDefaultTypeVar = (func: FunctionType) => { + let defaultTypeVar = TypeVarType.createInstance(`__${func.details.name}_default`); + defaultTypeVar.details.isSynthesized = true; + defaultTypeVar = TypeVarType.cloneForScopeId( + defaultTypeVar, + func.details.typeVarScopeId!, + classType.details.name, + TypeVarScopeType.Function + ); + return defaultTypeVar; + }; - const createGetMethod = (keyType: Type, valueType: Type, includeDefault: boolean) => { + const createGetMethod = ( + keyType: Type, + valueType: Type, + includeDefault: boolean, + defaultTypeMatchesField = false + ) => { const getOverload = FunctionType.createInstance( 'get', '', @@ -310,6 +343,7 @@ export function synthesizeTypedDictClassMethods( FunctionTypeFlags.SynthesizedMethod | FunctionTypeFlags.Overloaded ); FunctionType.addParameter(getOverload, selfParam); + getOverload.details.typeVarScopeId = evaluator.getScopeIdForNode(node); FunctionType.addParameter(getOverload, { category: ParameterCategory.Simple, name: 'k', @@ -317,14 +351,16 @@ export function synthesizeTypedDictClassMethods( hasDeclaredType: true, }); if (includeDefault) { + const defaultTypeVar = createDefaultTypeVar(getOverload); FunctionType.addParameter(getOverload, { category: ParameterCategory.Simple, name: 'default', - type: valueType, + type: defaultTypeMatchesField ? valueType : defaultTypeVar, hasDeclaredType: true, - hasDefault: true, }); - getOverload.details.declaredReturnType = valueType; + getOverload.details.declaredReturnType = defaultTypeMatchesField + ? valueType + : combineTypes([valueType, defaultTypeVar]); } else { getOverload.details.declaredReturnType = combineTypes([valueType, NoneType.createInstance()]); } @@ -357,6 +393,8 @@ export function synthesizeTypedDictClassMethods( ); FunctionType.addParameter(popOverload2, selfParam); FunctionType.addParameter(popOverload2, keyParam); + popOverload2.details.typeVarScopeId = evaluator.getScopeIdForNode(node); + const defaultTypeVar = createDefaultTypeVar(popOverload2); FunctionType.addParameter(popOverload2, { category: ParameterCategory.Simple, name: 'default', @@ -365,11 +403,10 @@ export function synthesizeTypedDictClassMethods( hasDefault: true, }); popOverload2.details.declaredReturnType = combineTypes([valueType, defaultTypeVar]); - popOverload2.details.typeVarScopeId = typeVarScopeId; return [popOverload1, popOverload2]; }; - const createSetDefaultMethod = (keyType: Type, valueType: Type, isEntryRequired = false) => { + const createSetDefaultMethod = (keyType: Type, valueType: Type) => { const setDefaultOverload = FunctionType.createInstance( 'setdefault', '', @@ -387,13 +424,9 @@ export function synthesizeTypedDictClassMethods( category: ParameterCategory.Simple, name: 'default', hasDeclaredType: true, - type: isEntryRequired ? AnyType.create() : defaultTypeVar, - hasDefault: true, + type: valueType, }); - setDefaultOverload.details.declaredReturnType = isEntryRequired - ? valueType - : combineTypes([valueType, defaultTypeVar]); - setDefaultOverload.details.typeVarScopeId = typeVarScopeId; + setDefaultOverload.details.declaredReturnType = valueType; return setDefaultOverload; }; @@ -422,34 +455,49 @@ export function synthesizeTypedDictClassMethods( entries.forEach((entry, name) => { const nameLiteralType = ClassType.cloneAsInstance(ClassType.cloneWithLiteral(strClass, name)); - if (!entry.isRequired) { - getOverloads.push(createGetMethod(nameLiteralType, entry.valueType, /* includeDefault */ false)); - } - getOverloads.push(createGetMethod(nameLiteralType, entry.valueType, /* includeDefault */ true)); + getOverloads.push(createGetMethod(nameLiteralType, entry.valueType, /* includeDefault */ false)); + getOverloads.push( + createGetMethod( + nameLiteralType, + entry.valueType, + /* includeDefault */ true, + /* defaultTypeMatchesField */ true + ) + ); + getOverloads.push( + createGetMethod( + nameLiteralType, + entry.valueType, + /* includeDefault */ true, + /* defaultTypeMatchesField */ false + ) + ); popOverloads.push(...createPopMethods(nameLiteralType, entry.valueType)); - setDefaultOverloads.push(createSetDefaultMethod(nameLiteralType, entry.valueType, entry.isRequired)); + setDefaultOverloads.push(createSetDefaultMethod(nameLiteralType, entry.valueType)); }); - // Provide a final overload that handles the general case where the key is - // a str but the literal value isn't known. + // Provide a final `get` overload that handles the general case where + // the key is a str but the literal value isn't known. const strType = ClassType.cloneAsInstance(strClass); getOverloads.push(createGetMethod(strType, AnyType.create(), /* includeDefault */ false)); getOverloads.push(createGetMethod(strType, AnyType.create(), /* includeDefault */ true)); - popOverloads.push(...createPopMethods(strType, AnyType.create())); - setDefaultOverloads.push(createSetDefaultMethod(strType, AnyType.create())); symbolTable.set( 'get', Symbol.createWithType(SymbolFlags.ClassMember, OverloadedFunctionType.create(getOverloads)) ); - symbolTable.set( - 'pop', - Symbol.createWithType(SymbolFlags.ClassMember, OverloadedFunctionType.create(popOverloads)) - ); - symbolTable.set( - 'setdefault', - Symbol.createWithType(SymbolFlags.ClassMember, OverloadedFunctionType.create(setDefaultOverloads)) - ); + if (popOverloads.length > 0) { + symbolTable.set( + 'pop', + Symbol.createWithType(SymbolFlags.ClassMember, OverloadedFunctionType.create(popOverloads)) + ); + } + if (setDefaultOverloads.length > 0) { + symbolTable.set( + 'setdefault', + Symbol.createWithType(SymbolFlags.ClassMember, OverloadedFunctionType.create(setDefaultOverloads)) + ); + } symbolTable.set('__delitem__', Symbol.createWithType(SymbolFlags.ClassMember, createDelItemMethod(strType))); } } @@ -490,10 +538,11 @@ function getTypedDictMembersForClassRecursive( if (recursionCount > maxTypeRecursionCount) { return; } + recursionCount++; classType.details.baseClasses.forEach((baseClassType) => { if (isInstantiableClass(baseClassType) && ClassType.isTypedDictClass(baseClassType)) { - getTypedDictMembersForClassRecursive(evaluator, baseClassType, keyMap, recursionCount + 1); + getTypedDictMembersForClassRecursive(evaluator, baseClassType, keyMap, recursionCount); } }); @@ -550,7 +599,7 @@ export function canAssignTypedDict( evaluator: TypeEvaluator, destType: ClassType, srcType: ClassType, - diag: DiagnosticAddendum, + diag: DiagnosticAddendum | undefined, recursionCount = 0 ) { let typesAreConsistent = true; @@ -560,36 +609,51 @@ export function canAssignTypedDict( destEntries.forEach((destEntry, name) => { const srcEntry = srcEntries.get(name); if (!srcEntry) { - diag.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldMissing().format({ - name, - type: evaluator.printType(srcType), - }) - ); - typesAreConsistent = false; - } else { - if (destEntry.isRequired && !srcEntry.isRequired) { + if (diag) { diag.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldRequired().format({ + Localizer.DiagnosticAddendum.typedDictFieldMissing().format({ name, - type: evaluator.printType(destType), + type: evaluator.printType(srcType), }) ); + } + typesAreConsistent = false; + } else { + if (destEntry.isRequired && !srcEntry.isRequired) { + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typedDictFieldRequired().format({ + name, + type: evaluator.printType(destType), + }) + ); + } typesAreConsistent = false; } else if (!destEntry.isRequired && srcEntry.isRequired) { - diag.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldNotRequired().format({ - name, - type: evaluator.printType(destType), - }) - ); + if (diag) { + diag.addMessage( + Localizer.DiagnosticAddendum.typedDictFieldNotRequired().format({ + name, + type: evaluator.printType(destType), + }) + ); + } typesAreConsistent = false; } if ( - !isTypeSame(destEntry.valueType, srcEntry.valueType, /* ignorePseudoGeneric */ true, recursionCount + 1) + !evaluator.canAssignType( + destEntry.valueType, + srcEntry.valueType, + /* diag */ undefined, + /* typeVarMap */ undefined, + /* flags */ undefined, + recursionCount + ) ) { - diag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + if (diag) { + diag.addMessage(Localizer.DiagnosticAddendum.memberTypeMismatch().format({ name })); + } typesAreConsistent = false; } } @@ -600,18 +664,23 @@ export function canAssignTypedDict( // Determines whether the specified keys and values can be assigned to // a typed dictionary class. The caller should have already validated -// that the class is indeed a typed dict. -export function canAssignToTypedDict( +// that the class is indeed a typed dict. If the types are compatible, +// the typed dict class or a narrowed form of the class is returned. +// Narrowing is possible when not-required keys are provided. If the +// types are not compatible, the function returns undefined. +export function assignToTypedDict( evaluator: TypeEvaluator, classType: ClassType, keyTypes: Type[], valueTypes: Type[], - diagAddendum: DiagnosticAddendum -): boolean { + diagAddendum?: DiagnosticAddendum +): ClassType | undefined { + assert(isClassInstance(classType)); assert(ClassType.isTypedDictClass(classType)); assert(keyTypes.length === valueTypes.length); let isMatch = true; + const narrowedEntries = new Map(); const symbolMap = getTypedDictMembersForClass(evaluator, classType); @@ -625,47 +694,67 @@ export function canAssignToTypedDict( if (!symbolEntry) { // The provided key name doesn't exist. isMatch = false; - diagAddendum.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldUndefined().format({ - name: keyType.literalValue as string, - type: evaluator.printType(ClassType.cloneAsInstance(classType)), - }) - ); - } else { - // Can we assign the value to the declared type? - const assignDiag = new DiagnosticAddendum(); - if (!evaluator.canAssignType(symbolEntry.valueType, valueTypes[index], assignDiag)) { + if (diagAddendum) { diagAddendum.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldTypeMismatch().format({ + Localizer.DiagnosticAddendum.typedDictFieldUndefined().format({ name: keyType.literalValue as string, - type: evaluator.printType(valueTypes[index]), + type: evaluator.printType(ClassType.cloneAsInstance(classType)), }) ); + } + } else { + // Can we assign the value to the declared type? + if (!evaluator.canAssignType(symbolEntry.valueType, valueTypes[index])) { + if (diagAddendum) { + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.typedDictFieldTypeMismatch().format({ + name: keyType.literalValue as string, + type: evaluator.printType(valueTypes[index]), + }) + ); + } isMatch = false; } + + if (!symbolEntry.isRequired) { + narrowedEntries.set(keyValue, { + valueType: valueTypes[index], + isRequired: false, + isProvided: true, + }); + } + symbolEntry.isProvided = true; } } }); if (!isMatch) { - return false; + return undefined; } // See if any required keys are missing. symbolMap.forEach((entry, name) => { if (entry.isRequired && !entry.isProvided) { - diagAddendum.addMessage( - Localizer.DiagnosticAddendum.typedDictFieldRequired().format({ - name, - type: evaluator.printType(ClassType.cloneAsInstance(classType)), - }) - ); + if (diagAddendum) { + diagAddendum.addMessage( + Localizer.DiagnosticAddendum.typedDictFieldRequired().format({ + name, + type: evaluator.printType(classType), + }) + ); + } isMatch = false; } }); - return isMatch; + if (!isMatch) { + return undefined; + } + + return narrowedEntries.size === 0 + ? classType + : ClassType.cloneForNarrowedTypedDictEntries(classType, narrowedEntries); } export function getTypeFromIndexedTypedDict( @@ -684,7 +773,7 @@ export function getTypeFromIndexedTypedDict( return undefined; } - const entries = getTypedDictMembersForClass(evaluator, baseType, /* allowNarrowed */ true); + const entries = getTypedDictMembersForClass(evaluator, baseType, /* allowNarrowed */ usage.method === 'get'); const indexTypeResult = evaluator.getTypeOfExpression(node.items[0].valueExpression); const indexType = indexTypeResult.type; @@ -716,7 +805,7 @@ export function getTypeFromIndexedTypedDict( allDiagsInvolveNotRequiredKeys = false; return UnknownType.create(); } else if (!(entry.isRequired || entry.isProvided) && usage.method === 'get') { - if (!ParseTreeUtils.isWithinTryBlock(node)) { + if (!ParseTreeUtils.isWithinTryBlock(node, /* treatWithAsTryBlock */ true)) { diag.addMessage( Localizer.DiagnosticAddendum.keyNotRequired().format({ name: entryName, @@ -727,7 +816,9 @@ export function getTypeFromIndexedTypedDict( } if (usage.method === 'set') { - evaluator.canAssignType(entry.valueType, usage.setType || AnyType.create(), diag); + if (!evaluator.canAssignType(entry.valueType, usage.setType || AnyType.create(), diag)) { + allDiagsInvolveNotRequiredKeys = false; + } } else if (usage.method === 'del' && entry.isRequired) { diag.addMessage( Localizer.DiagnosticAddendum.keyRequiredDeleted().format({ diff --git a/packages/pyright-internal/src/analyzer/types.ts b/packages/pyright-internal/src/analyzer/types.ts index 3abae2d011e8..da3045616c13 100644 --- a/packages/pyright-internal/src/analyzer/types.ts +++ b/packages/pyright-internal/src/analyzer/types.ts @@ -66,8 +66,8 @@ export const enum TypeFlags { // (PEP 593) annotation. Annotated = 1 << 2, - // This type is a non-callable special type like "Union". - NonCallable = 1 << 3, + // This type is a special form like "UnionType". + SpecialForm = 1 << 3, } export type UnionableType = @@ -90,10 +90,10 @@ export class EnumLiteral { constructor(public className: string, public itemName: string, public itemType: Type) {} } -export type LiteralValue = number | boolean | string | EnumLiteral; +export type LiteralValue = number | bigint | boolean | string | EnumLiteral; export type TypeSourceId = number; -export const maxTypeRecursionCount = 16; +export const maxTypeRecursionCount = 14; export type InheritanceChain = (ClassType | UnknownType)[]; @@ -114,6 +114,12 @@ interface TypeBase { // Used only for conditional (constrained) types condition?: TypeCondition[] | undefined; + + // This type is inferred within a py.typed source file and could be + // inferred differently by other type checkers. We don't model this + // with a TypeFlags because we don't want an ambiguous and unambiguous + // type to be seen as distinct when comparing types. + isAmbiguous?: boolean; } export namespace TypeBase { @@ -129,12 +135,20 @@ export namespace TypeBase { return (type.flags & TypeFlags.Annotated) !== 0; } - export function isNonCallable(type: TypeBase) { - return (type.flags & TypeFlags.NonCallable) !== 0; + export function isSpecialForm(type: TypeBase) { + return (type.flags & TypeFlags.SpecialForm) !== 0; + } + + export function setSpecialForm(type: TypeBase) { + return (type.flags |= TypeFlags.SpecialForm); } - export function setNonCallable(type: TypeBase) { - return (type.flags |= TypeFlags.NonCallable); + export function isAmbiguous(type: TypeBase) { + return !!type.isAmbiguous; + } + + export function cloneType(type: T): T { + return { ...type }; } export function cloneForTypeAlias( @@ -145,7 +159,7 @@ export namespace TypeBase { typeParams?: TypeVarType[], typeArgs?: Type[] ): Type { - const typeClone = { ...type }; + const typeClone = cloneType(type); typeClone.typeAliasInfo = { name, @@ -159,22 +173,32 @@ export namespace TypeBase { } export function cloneForAnnotated(type: Type) { - const typeClone = { ...type }; + const typeClone = cloneType(type); typeClone.flags |= TypeFlags.Annotated; return typeClone; } - export function cloneForCondition(type: Type, condition: TypeCondition[] | undefined) { + export function cloneForCondition(type: T, condition: TypeCondition[] | undefined): T { // Handle the common case where there are no conditions. In this case, // cloning isn't necessary. if (type.condition === undefined && condition === undefined) { return type; } - const typeClone = { ...type }; + const typeClone = cloneType(type); typeClone.condition = condition; return typeClone; } + + export function cloneForAmbiguousType(type: Type) { + if (type.isAmbiguous) { + return type; + } + + const typeClone = cloneType(type); + typeClone.isAmbiguous = true; + return typeClone; + } } export interface UnboundType extends TypeBase { @@ -244,8 +268,18 @@ export namespace ModuleType { // will be overwritten by the module. let symbol = moduleType.fields.get(name); - if (!symbol && moduleType.loaderFields) { - symbol = moduleType.loaderFields.get(name); + if (moduleType.loaderFields) { + if (!symbol) { + symbol = moduleType.loaderFields.get(name); + } else if (symbol.isExternallyHidden()) { + // If the symbol is hidden when accessed via the module but is + // also accessible through a loader field, use the latter so it + // isn't flagged as an error. + const loaderSymbol = moduleType.loaderFields.get(name); + if (loaderSymbol && !loaderSymbol.isExternallyHidden()) { + symbol = loaderSymbol; + } + } } return symbol; } @@ -253,6 +287,7 @@ export namespace ModuleType { export interface DataClassEntry { name: string; + classType: ClassType; isClassVar: boolean; isKeywordOnly: boolean; alias?: string | undefined; @@ -359,12 +394,23 @@ export const enum ClassTypeFlags { // Class is declared within a type stub file. DefinedInStub = 1 << 23, + + // Class does not allow writing or deleting its instance variables + // through a member access. Used with named tuples. + ReadOnlyInstanceVariables = 1 << 24, + + // For dataclasses, should __slots__ be generated? + GenerateDataClassSlots = 1 << 25, + + // For dataclasses, should __hash__ be generated? + SynthesizeDataClassUnsafeHash = 1 << 26, } export interface DataClassBehaviors { keywordOnlyParams: boolean; generateEq: boolean; generateOrder: boolean; + transformDescriptorTypes: boolean; fieldDescriptorNames: string[]; } @@ -389,8 +435,17 @@ interface ClassDetails { inheritedSlotsNames?: string[]; localSlotsNames?: string[]; - // Transforms to apply if this class is used as a metaclass. - metaclassDataClassTransform?: DataClassBehaviors | undefined; + // Transforms to apply if this class is used as a metaclass + // or a base class. + classDataClassTransform?: DataClassBehaviors | undefined; +} + +export interface TupleTypeArgument { + type: Type; + + // Does the type argument represent a single value or + // an "unbounded" (zero or more) arguments? + isUnbounded: boolean; } export interface ClassType extends TypeBase { @@ -408,6 +463,7 @@ export interface ClassType extends TypeBase { // that indicates how a type should be narrowed. This field will // be used only in a bool class. typeGuardType?: Type | undefined; + isStrictTypeGuard?: boolean; // If a generic container class (like a list or dict) is known // to contain no elements, its type arguments may be "Unknown". @@ -416,15 +472,16 @@ export interface ClassType extends TypeBase { isEmptyContainer?: boolean | undefined; // For tuples, the class definition calls for a single type parameter but - // the spec allows the programmer to provide variadic type arguments. - // To make these compatible, we need to derive a single typeArgument value - // based on the variadic arguments. - tupleTypeArguments?: Type[] | undefined; + // the spec allows the programmer to provide an arbitrary number of + // type arguments. This field holds the individual type arguments + // while the "typeArguments" field holds the derived non-variadic + // type argument, which is the union of the tuple type arguments. + tupleTypeArguments?: TupleTypeArgument[] | undefined; // We sometimes package multiple types into a tuple internally - // for matching against a variadic type variable. We need to be - // able to distinguish this case from normal tuples. - isTupleForUnpackedVariadicTypeVar?: boolean | undefined; + // for matching against a variadic type variable or another unpacked + // tuple. We need to be able to distinguish this case from normal tuples. + isUnpacked?: boolean | undefined; // If type arguments are present, were they explicit (i.e. // provided explicitly in the code)? @@ -449,6 +506,11 @@ export interface ClassType extends TypeBase { // that are not required have been confirmed to be present // through the use of a guard expression. typedDictNarrowedEntries?: Map | undefined; + + // Indicates whether the class is an asymmetric descriptor + // or property - one where the __get__ and __set__ types differ. + // If undefined, it hasn't been tested yet for asymmetry. + isAsymmetricDescriptor?: boolean; } export namespace ClassType { @@ -486,24 +548,24 @@ export namespace ClassType { return newClass; } - export function cloneAsInstance(classType: ClassType) { + export function cloneAsInstance(classType: ClassType): ClassType { if (TypeBase.isInstance(classType)) { return classType; } - const objectType = { ...classType }; - objectType.flags &= ~(TypeFlags.Instantiable | TypeFlags.NonCallable); + const objectType = TypeBase.cloneType(classType); + objectType.flags &= ~(TypeFlags.Instantiable | TypeFlags.SpecialForm); objectType.flags |= TypeFlags.Instance; objectType.includeSubclasses = true; return objectType; } - export function cloneAsInstantiable(objectType: ClassType) { + export function cloneAsInstantiable(objectType: ClassType): ClassType { if (TypeBase.isInstantiable(objectType)) { return objectType; } - const classType = { ...objectType }; + const classType = TypeBase.cloneType(objectType); classType.flags &= ~TypeFlags.Instance; classType.flags |= TypeFlags.Instantiable; return classType; @@ -514,14 +576,14 @@ export namespace ClassType { typeArguments: Type[] | undefined, isTypeArgumentExplicit: boolean, includeSubclasses = false, - tupleTypeArguments?: Type[], + tupleTypeArguments?: TupleTypeArgument[], isEmptyContainer?: boolean ): ClassType { - const newClassType = { ...classType }; + const newClassType = TypeBase.cloneType(classType); // Never should never appear as a type argument, so replace it with newClassType.typeArguments = typeArguments - ? typeArguments.map((t) => (isNever(t) ? UnknownType.create() : t)) + ? typeArguments.map((t) => (isNever(t) && !t.isNoReturn ? UnknownType.create() : t)) : undefined; newClassType.isTypeArgumentExplicit = isTypeArgumentExplicit; @@ -529,7 +591,9 @@ export namespace ClassType { newClassType.includeSubclasses = true; } newClassType.tupleTypeArguments = tupleTypeArguments - ? tupleTypeArguments.map((t) => (isNever(t) ? UnknownType.create() : t)) + ? tupleTypeArguments.map((t) => + isNever(t.type) ? { type: UnknownType.create(), isUnbounded: t.isUnbounded } : t + ) : undefined; if (isEmptyContainer !== undefined) { @@ -540,13 +604,13 @@ export namespace ClassType { } export function cloneWithLiteral(classType: ClassType, value: LiteralValue | undefined): ClassType { - const newClassType = { ...classType }; + const newClassType = TypeBase.cloneType(classType); newClassType.literalValue = value; return newClassType; } export function cloneForTypingAlias(classType: ClassType, aliasName: string): ClassType { - const newClassType = { ...classType }; + const newClassType = TypeBase.cloneType(classType); newClassType.aliasName = aliasName; return newClassType; } @@ -554,26 +618,46 @@ export namespace ClassType { export function cloneForNarrowedTypedDictEntries( classType: ClassType, narrowedEntries?: Map - ) { - const newClassType = { ...classType }; + ): ClassType { + const newClassType = TypeBase.cloneType(classType); newClassType.typedDictNarrowedEntries = narrowedEntries; return newClassType; } export function cloneWithNewTypeParameters(classType: ClassType, typeParams: TypeVarType[]): ClassType { - const newClassType = { ...classType }; + const newClassType = TypeBase.cloneType(classType); newClassType.details = { ...newClassType.details }; newClassType.details.typeParameters = typeParams; return newClassType; } - export function cloneForTypeGuard(classType: ClassType, typeGuardType: Type): ClassType { - const newClassType = { ...classType }; + export function cloneForTypeGuard( + classType: ClassType, + typeGuardType: Type, + isStrictTypeGuard: boolean + ): ClassType { + const newClassType = TypeBase.cloneType(classType); newClassType.typeGuardType = typeGuardType; + newClassType.isStrictTypeGuard = isStrictTypeGuard; return newClassType; } - export function isLiteralValueSame(type1: ClassType, type2: ClassType) { + export function cloneForSymbolTableUpdate(classType: ClassType): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.details = { ...newClassType.details }; + newClassType.details.fields = new Map(newClassType.details.fields); + newClassType.details.mro = [...newClassType.details.mro]; + newClassType.details.mro[0] = cloneAsInstantiable(newClassType); + return newClassType; + } + + export function cloneForUnpacked(classType: ClassType, isUnpacked = true): ClassType { + const newClassType = TypeBase.cloneType(classType); + newClassType.isUnpacked = isUnpacked; + return newClassType; + } + + export function isLiteralValueSame(type1: ClassType, type2: ClassType): boolean { if (type1.literalValue === undefined) { return type2.literalValue === undefined; } else if (type2.literalValue === undefined) { @@ -590,9 +674,8 @@ export namespace ClassType { return type1.literalValue === type2.literalValue; } - // Specifies whether the class type is generic (unspecialized) - // or specialized. - export function isGeneric(classType: ClassType) { + // Is the class generic but not specialized? + export function isUnspecialized(classType: ClassType) { return classType.details.typeParameters.length > 0 && classType.typeArguments === undefined; } @@ -608,13 +691,17 @@ export namespace ClassType { return true; } - export function isBuiltIn(classType: ClassType, className?: string) { + export function isBuiltIn(classType: ClassType, className?: string | string[]) { if (!(classType.details.flags & ClassTypeFlags.BuiltInClass)) { return false; } if (className !== undefined) { - return classType.details.name === className || classType.aliasName === className; + const classArray = Array.isArray(className) ? className : [className]; + return ( + classArray.some((name) => name === classType.details.name) || + classArray.some((name) => name === classType.aliasName) + ); } return true; @@ -648,6 +735,14 @@ export namespace ClassType { return !!(classType.details.flags & ClassTypeFlags.DataClassKeywordOnlyParams); } + export function isGeneratedDataClassSlots(classType: ClassType) { + return !!(classType.details.flags & ClassTypeFlags.GenerateDataClassSlots); + } + + export function isSynthesizeDataClassUnsafeHash(classType: ClassType) { + return !!(classType.details.flags & ClassTypeFlags.SynthesizeDataClassUnsafeHash); + } + export function isTypedDictClass(classType: ClassType) { return !!(classType.details.flags & ClassTypeFlags.TypedDictClass); } @@ -708,6 +803,10 @@ export namespace ClassType { return !!(classType.details.flags & ClassTypeFlags.TupleClass); } + export function isReadOnlyInstanceVariables(classType: ClassType) { + return !!(classType.details.flags & ClassTypeFlags.ReadOnlyInstanceVariables); + } + export function getTypeParameters(classType: ClassType) { return classType.details.typeParameters; } @@ -721,6 +820,7 @@ export namespace ClassType { if (recursionCount > maxTypeRecursionCount) { return true; } + recursionCount++; // If the class details match, it's definitely the same class. if (classType.details === type2.details) { @@ -764,7 +864,8 @@ export namespace ClassType { class1Details.baseClasses[i], class2Details.baseClasses[i], /* ignorePseudoGeneric */ true, - recursionCount + 1 + /* ignoreTypeFlags */ undefined, + recursionCount ) ) { return false; @@ -779,7 +880,8 @@ export namespace ClassType { class1Details.declaredMetaclass, class2Details.declaredMetaclass, /* ignorePseudoGeneric */ true, - recursionCount + 1 + /* ignoreTypeFlags */ undefined, + recursionCount ) ) { return false; @@ -792,7 +894,8 @@ export namespace ClassType { class1Details.typeParameters[i], class2Details.typeParameters[i], /* ignorePseudoGeneric */ true, - recursionCount + 1 + /* ignoreTypeFlags */ undefined, + recursionCount ) ) { return false; @@ -850,17 +953,20 @@ export namespace ClassType { } } -export interface FunctionParameter { +export interface ParamSpecEntry { category: ParameterCategory; name?: string | undefined; - isNameSynthesized?: boolean | undefined; - isTypeInferred?: boolean | undefined; + isNameSynthesized?: boolean; hasDefault?: boolean | undefined; + type: Type; +} + +export interface FunctionParameter extends ParamSpecEntry { + isTypeInferred?: boolean | undefined; defaultValueExpression?: ExpressionNode | undefined; defaultType?: Type | undefined; hasDeclaredType?: boolean | undefined; typeAnnotation?: ExpressionNode | undefined; - type: Type; } export const enum FunctionTypeFlags { @@ -918,9 +1024,10 @@ export const enum FunctionTypeFlags { // Function has one or more parameters that are missing type annotations UnannotatedParams = 1 << 14, - // Any collection of parameters will match this function. This is used - // for Callable[..., x]. - SkipParamCompatibilityCheck = 1 << 15, + // The *args and **kwargs parameters do not need to be present for this + // function to be compatible. This is used for Callable[..., x] and + // ... type arguments to ParamSpec and Concatenate. + SkipArgsKwargsCompatibilityCheck = 1 << 15, // This function represents the value bound to a ParamSpec, so its return // type is not meaningful. @@ -975,26 +1082,19 @@ export interface FunctionType extends TypeBase { // the class or object to which the function was bound. boundToType?: ClassType | undefined; + // The flags for the function prior to binding + preBoundFlags?: FunctionTypeFlags; + // The type var scope for the class that the function was bound to boundTypeVarScopeId?: TypeVarScopeId | undefined; } -export interface ParamSpecEntry { - category: ParameterCategory; - name?: string | undefined; - hasDefault: boolean; - type: Type; -} - export interface ParamSpecValue { - concrete?: { - flags: FunctionTypeFlags; - parameters: ParamSpecEntry[]; - }; - - // If the param spec is assigned to another param spec, - // this will contain that type, and concrete will be undefined. - paramSpec?: TypeVarType | undefined; + flags: FunctionTypeFlags; + parameters: ParamSpecEntry[]; + typeVarScopeId: TypeVarScopeId | undefined; + docString: string | undefined; + paramSpec: TypeVarType | undefined; } export namespace FunctionType { @@ -1059,6 +1159,8 @@ export namespace FunctionType { ); newFunction.details = { ...type.details }; + newFunction.boundToType = boundToType; + newFunction.preBoundFlags = newFunction.details.flags; if (stripFirstParam) { if ( @@ -1075,8 +1177,6 @@ export namespace FunctionType { stripFirstParam = false; } - newFunction.boundToType = boundToType; - // If we strip off the first parameter, this is no longer an // instance method or class method. newFunction.details.flags &= ~(FunctionTypeFlags.ConstructorMethod | FunctionTypeFlags.ClassMethod); @@ -1102,18 +1202,20 @@ export namespace FunctionType { return newFunction; } - export function cloneAsInstance(type: FunctionType) { + export function cloneAsInstance(type: FunctionType): FunctionType { assert(TypeBase.isInstantiable(type)); - const newInstance: FunctionType = { ...type }; - newInstance.flags &= ~(TypeFlags.Instantiable | TypeFlags.NonCallable); + + const newInstance: FunctionType = TypeBase.cloneType(type); + newInstance.flags &= ~(TypeFlags.Instantiable | TypeFlags.SpecialForm); newInstance.flags |= TypeFlags.Instance; return newInstance; } - export function cloneAsInstantiable(type: FunctionType) { + export function cloneAsInstantiable(type: FunctionType): FunctionType { assert(TypeBase.isInstance(type)); - const newInstance: FunctionType = { ...type }; - newInstance.flags &= ~(TypeFlags.Instance | TypeFlags.NonCallable); + + const newInstance: FunctionType = TypeBase.cloneType(type); + newInstance.flags &= ~(TypeFlags.Instance | TypeFlags.SpecialForm); newInstance.flags |= TypeFlags.Instantiable; return newInstance; } @@ -1147,7 +1249,7 @@ export namespace FunctionType { } // Creates a new function based on the parameters of another function. - export function cloneForParamSpec(type: FunctionType, paramTypes: ParamSpecValue | undefined) { + export function cloneForParamSpec(type: FunctionType, paramSpecValue: ParamSpecValue | undefined): FunctionType { const newFunction = create( type.details.name, type.details.fullName, @@ -1166,81 +1268,106 @@ export namespace FunctionType { // since we're replacing it. delete newFunction.details.paramSpec; - if (paramTypes) { - if (paramTypes.concrete) { - newFunction.details.parameters = [ - ...type.details.parameters, - ...paramTypes.concrete.parameters.map((specEntry) => { - return { - category: specEntry.category, - name: specEntry.name, - hasDefault: specEntry.hasDefault, - isNameSynthesized: false, - hasDeclaredType: true, - type: specEntry.type, - }; - }), - ]; - - newFunction.details.flags = - (paramTypes.concrete.flags & - (FunctionTypeFlags.ClassMethod | - FunctionTypeFlags.StaticMethod | - FunctionTypeFlags.ConstructorMethod)) | - FunctionTypeFlags.SynthesizedMethod; - - // Update the specialized parameter types as well. - if (newFunction.specializedTypes) { - paramTypes.concrete.parameters.forEach((paramInfo) => { - newFunction.specializedTypes!.parameterTypes.push(paramInfo.type); - }); - } - } else if (paramTypes.paramSpec) { - newFunction.details.paramSpec = paramTypes.paramSpec; + if (paramSpecValue) { + newFunction.details.parameters = [ + ...type.details.parameters, + ...paramSpecValue.parameters.map((specEntry) => { + return { + category: specEntry.category, + name: specEntry.name, + hasDefault: specEntry.hasDefault, + isNameSynthesized: specEntry.isNameSynthesized, + hasDeclaredType: true, + type: specEntry.type, + }; + }), + ]; + + if (!newFunction.details.docString) { + newFunction.details.docString = paramSpecValue.docString; + } + + newFunction.details.flags = + (paramSpecValue.flags & + (FunctionTypeFlags.ClassMethod | + FunctionTypeFlags.StaticMethod | + FunctionTypeFlags.ConstructorMethod | + FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck | + FunctionTypeFlags.ParamSpecValue)) | + FunctionTypeFlags.SynthesizedMethod; + + if (FunctionType.isParamSpecValue(type)) { + newFunction.details.flags |= FunctionTypeFlags.ParamSpecValue; + } + + // Update the specialized parameter types as well. + if (newFunction.specializedTypes) { + paramSpecValue.parameters.forEach((paramInfo) => { + newFunction.specializedTypes!.parameterTypes.push(paramInfo.type); + }); } + + newFunction.details.paramSpec = paramSpecValue.paramSpec; } return newFunction; } - export function cloneForParamSpecApplication(type: FunctionType, paramTypes: ParamSpecValue) { - const newFunction = create( - type.details.name, - type.details.fullName, - type.details.moduleName, - type.details.flags, - type.flags, - type.details.docString - ); + export function cloneWithNewFlags(type: FunctionType, flags: FunctionTypeFlags): FunctionType { + const newFunction = TypeBase.cloneType(type); // Make a shallow clone of the details. newFunction.details = { ...type.details }; - if (paramTypes.concrete) { - // Remove the last two parameters, which are the *args and **kwargs. - newFunction.details.parameters = newFunction.details.parameters.slice( - 0, - newFunction.details.parameters.length - 2 - ); + newFunction.details.flags = flags; - paramTypes.concrete.parameters.forEach((specEntry) => { - newFunction.details.parameters.push({ - category: specEntry.category, - name: specEntry.name, - hasDefault: specEntry.hasDefault, - isNameSynthesized: false, - hasDeclaredType: true, - type: specEntry.type, - }); + return newFunction; + } + + export function cloneForParamSpecApplication(type: FunctionType, paramSpecValue: ParamSpecValue): FunctionType { + const newFunction = TypeBase.cloneType(type); + + // Make a shallow clone of the details. + newFunction.details = { ...type.details }; + + // Remove the last two parameters, which are the *args and **kwargs. + newFunction.details.parameters = newFunction.details.parameters.slice( + 0, + newFunction.details.parameters.length - 2 + ); + + // If there is a position-only separator in the captured param spec signature, + // remove the position-only separator in the existing signature. Otherwise, + // we'll end up with redundant position-only separators. + if (paramSpecValue.parameters.some((entry) => entry.category === ParameterCategory.Simple && !entry.name)) { + if (newFunction.details.parameters.length > 0) { + const lastParam = newFunction.details.parameters[newFunction.details.parameters.length - 1]; + if (lastParam.category === ParameterCategory.Simple && !lastParam.name) { + newFunction.details.parameters.pop(); + } + } + } + + paramSpecValue.parameters.forEach((specEntry) => { + newFunction.details.parameters.push({ + category: specEntry.category, + name: specEntry.name, + hasDefault: specEntry.hasDefault, + isNameSynthesized: specEntry.isNameSynthesized, + hasDeclaredType: true, + type: specEntry.type, }); - } else if (paramTypes.paramSpec) { - newFunction.details.paramSpec = paramTypes.paramSpec; + }); + + newFunction.details.paramSpec = paramSpecValue.paramSpec; + if (!newFunction.details.docString) { + newFunction.details.docString = paramSpecValue.docString; } return newFunction; } - export function cloneRemoveParamSpecVariadics(type: FunctionType) { + export function cloneRemoveParamSpecVariadics(type: FunctionType, paramSpec: TypeVarType): FunctionType { const newFunction = create( type.details.name, type.details.fullName, @@ -1259,6 +1386,18 @@ export namespace FunctionType { newFunction.details.parameters.length - 2 ); + if (type.specializedTypes) { + newFunction.specializedTypes = { ...type.specializedTypes }; + newFunction.specializedTypes.parameterTypes = newFunction.specializedTypes.parameterTypes.slice( + 0, + newFunction.specializedTypes.parameterTypes.length - 2 + ); + } + + if (!newFunction.details.paramSpec) { + newFunction.details.paramSpec = paramSpec; + } + return newFunction; } @@ -1277,6 +1416,35 @@ export namespace FunctionType { }); } + // Indicates whether the input signature consists of (*args: Any, **kwargs: Any). + export function hasDefaultParameters(functionType: FunctionType): boolean { + let sawArgs = false; + let sawKwargs = false; + + for (let i = 0; i < functionType.details.parameters.length; i++) { + const param = functionType.details.parameters[i]; + + // Ignore nameless separator parameters. + if (!param.name) { + continue; + } + + if (param.category === ParameterCategory.Simple) { + return false; + } else if (param.category === ParameterCategory.VarArgList) { + sawArgs = true; + } else if (param.category === ParameterCategory.VarArgDictionary) { + sawKwargs = true; + } + + if (!isAnyOrUnknown(FunctionType.getEffectiveParameterType(functionType, i))) { + return false; + } + } + + return sawArgs && sawKwargs; + } + export function isInstanceMethod(type: FunctionType): boolean { return ( (type.details.flags & @@ -1347,8 +1515,8 @@ export namespace FunctionType { return (type.details.flags & FunctionTypeFlags.UnannotatedParams) !== 0; } - export function shouldSkipParamCompatibilityCheck(type: FunctionType) { - return (type.details.flags & FunctionTypeFlags.SkipParamCompatibilityCheck) !== 0; + export function shouldSkipArgsKwargsCompatibilityCheck(type: FunctionType) { + return (type.details.flags & FunctionTypeFlags.SkipArgsKwargsCompatibilityCheck) !== 0; } export function isParamSpecValue(type: FunctionType) { @@ -1356,9 +1524,9 @@ export namespace FunctionType { } export function getEffectiveParameterType(type: FunctionType, index: number): Type { - assert(index < type.details.parameters.length); - if (type.specializedTypes) { - assert(index < type.specializedTypes.parameterTypes.length); + assert(index < type.details.parameters.length, 'Parameter types array overflow'); + + if (type.specializedTypes && index < type.specializedTypes.parameterTypes.length) { return type.specializedTypes.parameterTypes[index]; } @@ -1382,7 +1550,7 @@ export interface OverloadedFunctionType extends TypeBase { } export namespace OverloadedFunctionType { - export function create(overloads: FunctionType[] = []) { + export function create(overloads: FunctionType[]) { const newType: OverloadedFunctionType = { category: TypeCategory.OverloadedFunction, overloads, @@ -1422,17 +1590,29 @@ export namespace NoneType { export interface NeverType extends TypeBase { category: TypeCategory.Never; + isNoReturn: boolean; } export namespace NeverType { const _neverInstance: NeverType = { category: TypeCategory.Never, flags: TypeFlags.Instance | TypeFlags.Instantiable, + isNoReturn: false, }; - export function create() { + const _noReturnInstance: NeverType = { + category: TypeCategory.Never, + flags: TypeFlags.Instance | TypeFlags.Instantiable, + isNoReturn: true, + }; + + export function createNever() { return _neverInstance; } + + export function createNoReturn() { + return _noReturnInstance; + } } export interface AnyType extends TypeBase { @@ -1446,6 +1626,7 @@ export namespace AnyType { isEllipsis: false, flags: TypeFlags.Instance | TypeFlags.Instantiable, }; + const _ellipsisInstance: AnyType = { category: TypeCategory.Any, isEllipsis: true, @@ -1461,6 +1642,7 @@ export namespace AnyType { export interface TypeCondition { typeVarName: string; constraintIndex: number; + isConstrainedTypeVar: boolean; } export namespace TypeCondition { @@ -1558,7 +1740,8 @@ export interface UnionType extends TypeBase { category: TypeCategory.Union; subtypes: UnionableType[]; literalStrMap?: Map | undefined; - literalIntMap?: Map | undefined; + literalIntMap?: Map | undefined; + typeAliasSources?: Set; } export namespace UnionType { @@ -1593,9 +1776,9 @@ export namespace UnionType { newType.condition === undefined ) { if (unionType.literalIntMap === undefined) { - unionType.literalIntMap = new Map(); + unionType.literalIntMap = new Map(); } - unionType.literalIntMap.set(newType.literalValue as number, newType); + unionType.literalIntMap.set(newType.literalValue as number | bigint, newType); } unionType.flags &= newType.flags; @@ -1617,16 +1800,38 @@ export namespace UnionType { subtype.literalValue !== undefined && unionType.literalIntMap !== undefined ) { - return unionType.literalIntMap.has(subtype.literalValue as number); + return unionType.literalIntMap.has(subtype.literalValue as number | bigint); } } return ( unionType.subtypes.find((t) => - isTypeSame(t, subtype, /* ignorePseudoGeneric */ false, recursionCount + 1) + isTypeSame( + t, + subtype, + /* ignorePseudoGeneric */ undefined, + /* ignoreTypeFlags */ undefined, + recursionCount + ) ) !== undefined ); } + + export function addTypeAliasSource(unionType: UnionType, typeAliasSource: Type) { + if (typeAliasSource.category === TypeCategory.Union) { + const sourcesToAdd = typeAliasSource.typeAliasInfo ? [typeAliasSource] : typeAliasSource.typeAliasSources; + + if (sourcesToAdd) { + if (!unionType.typeAliasSources) { + unionType.typeAliasSources = new Set(); + } + + sourcesToAdd.forEach((source) => { + unionType.typeAliasSources!.add(source); + }); + } + } + } } export const enum Variance { @@ -1645,8 +1850,9 @@ export interface TypeVarDetails { // Internally created (e.g. for pseudo-generic classes) isSynthesized: boolean; - isSynthesizedSelfCls?: boolean | undefined; + isSynthesizedSelf?: boolean | undefined; synthesizedIndex?: number | undefined; + isExemptFromBoundCheck?: boolean; // Used for recursive type aliases. recursiveTypeAliasName?: string | undefined; @@ -1659,6 +1865,12 @@ export interface TypeVarDetails { export type ParamSpecAccess = 'args' | 'kwargs'; +export const enum TypeVarScopeType { + Class, + Function, + TypeAlias, +} + export interface TypeVarType extends TypeBase { category: TypeCategory.TypeVar; details: TypeVarDetails; @@ -1671,6 +1883,9 @@ export interface TypeVarType extends TypeBase { // so it should be used only for error messages. scopeName?: string | undefined; + // If the TypeVar is bound to a scope, this is the scope type. + scopeType?: TypeVarScopeType; + // String formatted as .. nameWithScope?: string | undefined; @@ -1694,33 +1909,41 @@ export namespace TypeVarType { return create(name, isParamSpec, TypeFlags.Instantiable); } - export function cloneAsInstance(type: TypeVarType) { + export function cloneAsInstance(type: TypeVarType): TypeVarType { assert(TypeBase.isInstantiable(type)); - const newInstance: TypeVarType = { ...type }; - newInstance.flags &= ~(TypeFlags.Instantiable | TypeFlags.NonCallable); + + const newInstance: TypeVarType = TypeBase.cloneType(type); + newInstance.flags &= ~(TypeFlags.Instantiable | TypeFlags.SpecialForm); newInstance.flags |= TypeFlags.Instance; return newInstance; } - export function cloneAsInstantiable(type: TypeVarType) { + export function cloneAsInstantiable(type: TypeVarType): TypeVarType { assert(TypeBase.isInstance(type)); - const newInstance: TypeVarType = { ...type }; - newInstance.flags &= ~(TypeFlags.Instance | TypeFlags.NonCallable); + + const newInstance: TypeVarType = TypeBase.cloneType(type); + newInstance.flags &= ~(TypeFlags.Instance | TypeFlags.SpecialForm); newInstance.flags |= TypeFlags.Instantiable; return newInstance; } - export function cloneForScopeId(type: TypeVarType, scopeId: string, scopeName: string) { - const newInstance: TypeVarType = { ...type }; + export function cloneForScopeId( + type: TypeVarType, + scopeId: string, + scopeName: string, + scopeType: TypeVarScopeType + ): TypeVarType { + const newInstance = TypeBase.cloneType(type); newInstance.nameWithScope = makeNameWithScope(type.details.name, scopeId); newInstance.scopeId = scopeId; newInstance.scopeName = scopeName; + newInstance.scopeType = scopeType; return newInstance; } export function cloneForUnpacked(type: TypeVarType, isInUnion = false) { assert(type.details.isVariadic); - const newInstance: TypeVarType = { ...type }; + const newInstance = TypeBase.cloneType(type); newInstance.isVariadicUnpacked = true; newInstance.isVariadicInUnion = isInUnion; return newInstance; @@ -1728,7 +1951,7 @@ export namespace TypeVarType { export function cloneForPacked(type: TypeVarType) { assert(type.details.isVariadic); - const newInstance: TypeVarType = { ...type }; + const newInstance = TypeBase.cloneType(type); newInstance.isVariadicUnpacked = false; newInstance.isVariadicInUnion = false; return newInstance; @@ -1736,7 +1959,7 @@ export namespace TypeVarType { // Creates a "simplified" version of the TypeVar with invariance // and no bound or constraints. ParamSpecs and variadics are left unmodified. - export function cloneAsInvariant(type: TypeVarType) { + export function cloneAsInvariant(type: TypeVarType): TypeVarType { if (type.details.isParamSpec || type.details.isVariadic) { return type; } @@ -1747,7 +1970,7 @@ export namespace TypeVarType { } } - const newInstance: TypeVarType = { ...type }; + const newInstance = TypeBase.cloneType(type); newInstance.details = { ...newInstance.details }; newInstance.details.variance = Variance.Invariant; newInstance.details.boundType = undefined; @@ -1755,17 +1978,25 @@ export namespace TypeVarType { return newInstance; } - export function cloneForParamSpecAccess(type: TypeVarType, access: ParamSpecAccess | undefined) { - const newInstance: TypeVarType = { ...type }; + export function cloneForParamSpecAccess(type: TypeVarType, access: ParamSpecAccess | undefined): TypeVarType { + const newInstance = TypeBase.cloneType(type); newInstance.paramSpecAccess = access; return newInstance; } + export function cloneAsSpecializedSelf(type: TypeVarType, specializedBoundType: Type): TypeVarType { + assert(type.details.isSynthesizedSelf); + const newInstance = TypeBase.cloneType(type); + newInstance.details = { ...newInstance.details }; + newInstance.details.boundType = specializedBoundType; + return newInstance; + } + export function makeNameWithScope(name: string, scopeId: string) { return `${name}.${scopeId}`; } - function create(name: string, isParamSpec: boolean, typeFlags: TypeFlags) { + function create(name: string, isParamSpec: boolean, typeFlags: TypeFlags): TypeVarType { const newTypeVarType: TypeVarType = { category: TypeCategory.TypeVar, details: { @@ -1803,8 +2034,12 @@ export function isNever(type: Type): type is NeverType { return type.category === TypeCategory.Never; } -export function isNone(type: Type): type is NoneType { - return type.category === TypeCategory.None; +export function isNoneInstance(type: Type): type is NoneType { + return type.category === TypeCategory.None && TypeBase.isInstance(type); +} + +export function isNoneTypeClass(type: Type): type is NoneType { + return type.category === TypeCategory.None && TypeBase.isInstantiable(type); } export function isAny(type: Type): type is AnyType { @@ -1878,6 +2113,18 @@ export function isUnpackedVariadicTypeVar(type: Type): boolean { return type.category === TypeCategory.TypeVar && type.details.isVariadic && !!type.isVariadicUnpacked; } +export function isUnpackedClass(type: Type): type is ClassType { + if (!isClass(type) || !type.isUnpacked) { + return false; + } + + return true; +} + +export function isUnpacked(type: Type): boolean { + return isUnpackedVariadicTypeVar(type) || isUnpackedClass(type); +} + export function isParamSpec(type: Type): type is TypeVarType { return type.category === TypeCategory.TypeVar && type.details.isParamSpec; } @@ -1910,7 +2157,13 @@ export function getTypeAliasInfo(type: Type) { // Determines whether two types are the same. If ignorePseudoGeneric is true, // type arguments for "pseudo-generic" classes (non-generic classes whose init // methods are not annotated and are therefore treated as generic) are ignored. -export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false, recursionCount = 0): boolean { +export function isTypeSame( + type1: Type, + type2: Type, + ignorePseudoGeneric = false, + ignoreTypeFlags = false, + recursionCount = 0 +): boolean { if (type1 === type2) { return true; } @@ -1919,20 +2172,21 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false return false; } - if (recursionCount > maxTypeRecursionCount) { - return true; + if (!ignoreTypeFlags && type1.flags !== type2.flags) { + return false; } - if (type1.flags !== type2.flags) { - return false; + if (recursionCount > maxTypeRecursionCount) { + return true; } + recursionCount++; switch (type1.category) { case TypeCategory.Class: { const classType2 = type2 as ClassType; // If the details are not the same it's not the same class. - if (!ClassType.isSameGenericClass(type1, classType2, recursionCount + 1)) { + if (!ClassType.isSameGenericClass(type1, classType2, recursionCount)) { return false; } @@ -1952,14 +2206,19 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false for (let i = 0; i < type1TupleTypeArgs.length; i++) { if ( !isTypeSame( - type1TupleTypeArgs[i], - type2TupleTypeArgs[i], + type1TupleTypeArgs[i].type, + type2TupleTypeArgs[i].type, ignorePseudoGeneric, - recursionCount + 1 + /* ignoreTypeFlags */ false, + recursionCount ) ) { return false; } + + if (type1TupleTypeArgs[i].isUnbounded !== type2TupleTypeArgs[i].isUnbounded) { + return false; + } } } else { const type1TypeArgs = type1.typeArguments || []; @@ -1971,7 +2230,15 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false const typeArg1 = i < type1TypeArgs.length ? type1TypeArgs[i] : AnyType.create(); const typeArg2 = i < type2TypeArgs.length ? type2TypeArgs[i] : AnyType.create(); - if (!isTypeSame(typeArg1, typeArg2, ignorePseudoGeneric, recursionCount + 1)) { + if ( + !isTypeSame( + typeArg1, + typeArg2, + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) + ) { return false; } } @@ -1995,6 +2262,13 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false return false; } + const positionalOnlyIndex1 = params1.findIndex( + (param) => param.category === ParameterCategory.Simple && !param.name + ); + const positionalOnlyIndex2 = params2.findIndex( + (param) => param.category === ParameterCategory.Simple && !param.name + ); + // Make sure the parameter details match. for (let i = 0; i < params1.length; i++) { const param1 = params1[i]; @@ -2004,13 +2278,30 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false return false; } - if (param1.name !== param2.name) { + const isName1Relevant = positionalOnlyIndex1 !== undefined && i >= positionalOnlyIndex1; + const isName2Relevant = positionalOnlyIndex2 !== undefined && i >= positionalOnlyIndex2; + + if (isName1Relevant !== isName2Relevant) { return false; } + if (isName1Relevant) { + if (param1.name !== param2.name) { + return false; + } + } + const param1Type = FunctionType.getEffectiveParameterType(type1, i); const param2Type = FunctionType.getEffectiveParameterType(functionType2, i); - if (!isTypeSame(param1Type, param2Type, ignorePseudoGeneric, recursionCount + 1)) { + if ( + !isTypeSame( + param1Type, + param2Type, + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) + ) { return false; } } @@ -2020,24 +2311,34 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false if (type1.specializedTypes && type1.specializedTypes.returnType) { return1Type = type1.specializedTypes.returnType; } + if (!return1Type && type1.inferredReturnType) { + return1Type = type1.inferredReturnType; + } + let return2Type = functionType2.details.declaredReturnType; if (functionType2.specializedTypes && functionType2.specializedTypes.returnType) { return2Type = functionType2.specializedTypes.returnType; } + if (!return2Type && functionType2.inferredReturnType) { + return2Type = functionType2.inferredReturnType; + } + if (return1Type || return2Type) { if ( !return1Type || !return2Type || - !isTypeSame(return1Type, return2Type, ignorePseudoGeneric, recursionCount + 1) + !isTypeSame( + return1Type, + return2Type, + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) ) { return false; } } - if (type1.details.declaration !== functionType2.details.declaration) { - return false; - } - return true; } @@ -2052,7 +2353,13 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false // in the same order from one analysis pass to another. for (let i = 0; i < type1.overloads.length; i++) { if ( - !isTypeSame(type1.overloads[i], functionType2.overloads[i], ignorePseudoGeneric, recursionCount + 1) + !isTypeSame( + type1.overloads[i], + functionType2.overloads[i], + ignorePseudoGeneric, + ignoreTypeFlags, + recursionCount + ) ) { return false; } @@ -2073,7 +2380,7 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false // The types do not have a particular order, so we need to // do the comparison in an order-independent manner. return ( - findSubtype(type1, (subtype) => !UnionType.containsType(unionType2, subtype, recursionCount + 1)) === + findSubtype(type1, (subtype) => !UnionType.containsType(unionType2, subtype, recursionCount)) === undefined ); } @@ -2087,17 +2394,27 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false // Handle the case where this is a generic recursive type alias. Make // sure that the type argument types match. - const type1TypeArgs = type1?.typeAliasInfo?.typeArguments || []; - const type2TypeArgs = type2?.typeAliasInfo?.typeArguments || []; - const typeArgCount = Math.max(type1TypeArgs.length, type2TypeArgs.length); - - for (let i = 0; i < typeArgCount; i++) { - // Assume that missing type args are "Any". - const typeArg1 = i < type1TypeArgs.length ? type1TypeArgs[i] : AnyType.create(); - const typeArg2 = i < type2TypeArgs.length ? type2TypeArgs[i] : AnyType.create(); - - if (!isTypeSame(typeArg1, typeArg2, ignorePseudoGeneric, recursionCount + 1)) { - return false; + if (type1.details.recursiveTypeParameters && type2TypeVar.details.recursiveTypeParameters) { + const type1TypeArgs = type1?.typeAliasInfo?.typeArguments || []; + const type2TypeArgs = type2?.typeAliasInfo?.typeArguments || []; + const typeArgCount = Math.max(type1TypeArgs.length, type2TypeArgs.length); + + for (let i = 0; i < typeArgCount; i++) { + // Assume that missing type args are "Any". + const typeArg1 = i < type1TypeArgs.length ? type1TypeArgs[i] : AnyType.create(); + const typeArg2 = i < type2TypeArgs.length ? type2TypeArgs[i] : AnyType.create(); + + if ( + !isTypeSame( + typeArg1, + typeArg2, + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) + ) { + return false; + } } } @@ -2110,7 +2427,8 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false type1.details.isParamSpec !== type2TypeVar.details.isParamSpec || type1.details.isVariadic !== type2TypeVar.details.isVariadic || type1.details.isSynthesized !== type2TypeVar.details.isSynthesized || - type1.details.variance !== type2TypeVar.details.variance + type1.details.variance !== type2TypeVar.details.variance || + type1.scopeId !== type2TypeVar.scopeId ) { return false; } @@ -2118,7 +2436,16 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false const boundType1 = type1.details.boundType; const boundType2 = type2TypeVar.details.boundType; if (boundType1) { - if (!boundType2 || !isTypeSame(boundType1, boundType2, ignorePseudoGeneric, recursionCount + 1)) { + if ( + !boundType2 || + !isTypeSame( + boundType1, + boundType2, + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) + ) { return false; } } else { @@ -2134,7 +2461,15 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false } for (let i = 0; i < constraints1.length; i++) { - if (!isTypeSame(constraints1[i], constraints2[i], ignorePseudoGeneric, recursionCount + 1)) { + if ( + !isTypeSame( + constraints1[i], + constraints2[i], + ignorePseudoGeneric, + /* ignoreTypeFlags */ false, + recursionCount + ) + ) { return false; } } @@ -2164,12 +2499,6 @@ export function isTypeSame(type1: Type, type2: Type, ignorePseudoGeneric = false return true; } -// If the type is a union, remove any "unknown" or "any" type -// from the union, returning only the known types. -export function removeAnyFromUnion(type: Type): Type { - return removeFromUnion(type, (t: Type) => isAnyOrUnknown(t)); -} - // If the type is a union, remove an "unknown" type from the union, // returning only the known types. export function removeUnknownFromUnion(type: Type): Type { @@ -2193,14 +2522,20 @@ export function removeUnbound(type: Type): Type { // If the type is a union, remove an "None" type from the union, // returning only the known types. export function removeNoneFromUnion(type: Type): Type { - return removeFromUnion(type, (t: Type) => isNone(t)); + return removeFromUnion(type, (t: Type) => isNoneInstance(t)); } export function removeFromUnion(type: Type, removeFilter: (type: Type) => boolean) { if (isUnion(type)) { const remainingTypes = type.subtypes.filter((t) => !removeFilter(t)); if (remainingTypes.length < type.subtypes.length) { - return combineTypes(remainingTypes); + const newType = combineTypes(remainingTypes); + + if (isUnion(newType)) { + UnionType.addTypeAliasSource(newType, type); + } + + return newType; } } @@ -2209,7 +2544,7 @@ export function removeFromUnion(type: Type, removeFilter: (type: Type) => boolea export function findSubtype(type: Type, filter: (type: UnionableType | NeverType) => boolean) { if (isUnion(type)) { - return type.subtypes.find((subtype, index) => { + return type.subtypes.find((subtype) => { return filter(subtype); }); } @@ -2222,10 +2557,16 @@ export function findSubtype(type: Type, filter: (type: UnionableType | NeverType // are combined into a UnionType. NeverTypes are filtered out. // If no types remain in the end, a NeverType is returned. export function combineTypes(subtypes: Type[], maxSubtypeCount?: number): Type { - // Filter out any "Never" types. - subtypes = subtypes.filter((subtype) => subtype.category !== TypeCategory.Never); + // Filter out any "Never" and "NoReturn" types. + let sawNoReturn = false; + subtypes = subtypes.filter((subtype) => { + if (subtype.category === TypeCategory.Never && subtype.isNoReturn) { + sawNoReturn = true; + } + return subtype.category !== TypeCategory.Never; + }); if (subtypes.length === 0) { - return NeverType.create(); + return sawNoReturn ? NeverType.createNoReturn() : NeverType.createNever(); } // Handle the common case where there is only one type. @@ -2244,9 +2585,15 @@ export function combineTypes(subtypes: Type[], maxSubtypeCount?: number): Type { // Expand all union types. let expandedTypes: Type[] = []; + const typeAliasSources: UnionType[] = []; for (const subtype of subtypes) { if (isUnion(subtype)) { expandedTypes.push(...subtype.subtypes); + if (subtype.typeAliasInfo) { + typeAliasSources.push(subtype); + } else if (subtype.typeAliasSources) { + typeAliasSources.push(...subtype.typeAliasSources); + } } else { expandedTypes.push(subtype); } @@ -2282,6 +2629,13 @@ export function combineTypes(subtypes: Type[], maxSubtypeCount?: number): Type { } const newUnionType = UnionType.create(); + if (typeAliasSources.length > 0) { + newUnionType.typeAliasSources = new Set(); + typeAliasSources.forEach((source) => { + newUnionType.typeAliasSources!.add(source); + }); + } + let hitMaxSubtypeCount = false; expandedTypes.forEach((subtype, index) => { @@ -2351,7 +2705,7 @@ function _addTypeIfUnique(unionType: UnionType, typeToAdd: UnionableType) { typeToAdd.literalValue !== undefined && unionType.literalIntMap !== undefined ) { - if (!unionType.literalIntMap.has(typeToAdd.literalValue as number)) { + if (!unionType.literalIntMap.has(typeToAdd.literalValue as number | bigint)) { UnionType.addType(unionType, typeToAdd); } return; diff --git a/packages/pyright-internal/src/backgroundAnalysisBase.ts b/packages/pyright-internal/src/backgroundAnalysisBase.ts index 579debd6059f..e8b2777445b2 100644 --- a/packages/pyright-internal/src/backgroundAnalysisBase.ts +++ b/packages/pyright-internal/src/backgroundAnalysisBase.ts @@ -12,7 +12,7 @@ import { threadId } from './common/workersHost'; import { AnalysisCompleteCallback, AnalysisResults, analyzeProgram, nullCallback } from './analyzer/analysis'; import { ImportResolver } from './analyzer/importResolver'; -import { Indices, Program } from './analyzer/program'; +import { Indices, OpenFileOptions, Program } from './analyzer/program'; import { BackgroundThreadBase, createConfigOptionsFrom, @@ -126,21 +126,27 @@ export class BackgroundAnalysisBase { filePath: string, version: number | null, contents: TextDocumentContentChangeEvent[], - isTracked: boolean + options: OpenFileOptions ) { - this.enqueueRequest({ requestType: 'setFileOpened', data: { filePath, version, contents, isTracked } }); + this.enqueueRequest({ + requestType: 'setFileOpened', + data: { filePath, version, contents, options }, + }); } setFileClosed(filePath: string) { this.enqueueRequest({ requestType: 'setFileClosed', data: filePath }); } - markAllFilesDirty(evenIfContentsAreSame: boolean) { - this.enqueueRequest({ requestType: 'markAllFilesDirty', data: evenIfContentsAreSame }); + markAllFilesDirty(evenIfContentsAreSame: boolean, indexingNeeded: boolean) { + this.enqueueRequest({ requestType: 'markAllFilesDirty', data: { evenIfContentsAreSame, indexingNeeded } }); } - markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean) { - this.enqueueRequest({ requestType: 'markFilesDirty', data: { filePaths, evenIfContentsAreSame } }); + markFilesDirty(filePaths: string[], evenIfContentsAreSame: boolean, indexingNeeded: boolean) { + this.enqueueRequest({ + requestType: 'markFilesDirty', + data: { filePaths, evenIfContentsAreSame, indexingNeeded }, + }); } startAnalysis(indices: Indices | undefined, token: CancellationToken) { @@ -196,11 +202,17 @@ export class BackgroundAnalysisBase { this.enqueueRequest({ requestType, data: cancellationId, port: port2 }); } - startIndexing(configOptions: ConfigOptions, kind: HostKind, indices: Indices) { + startIndexing( + indexOptions: IndexOptions, + configOptions: ConfigOptions, + importResolver: ImportResolver, + kind: HostKind, + indices: Indices + ) { /* noop */ } - refreshIndexing(configOptions: ConfigOptions, kind: HostKind, indices?: Indices) { + refreshIndexing(configOptions: ConfigOptions, importResolver: ImportResolver, kind: HostKind, indices?: Indices) { /* noop */ } @@ -255,8 +267,8 @@ export class BackgroundAnalysisBase { port1.close(); } - invalidateAndForceReanalysis() { - this.enqueueRequest({ requestType: 'invalidateAndForceReanalysis', data: null }); + invalidateAndForceReanalysis(rebuildUserFileIndexing: boolean) { + this.enqueueRequest({ requestType: 'invalidateAndForceReanalysis', data: rebuildUserFileIndexing }); } restart() { @@ -446,8 +458,8 @@ export abstract class BackgroundAnalysisRunnerBase extends BackgroundThreadBase } case 'setFileOpened': { - const { filePath, version, contents, isTracked } = msg.data; - this.program.setFileOpened(filePath, version, contents, isTracked); + const { filePath, version, contents, options } = msg.data; + this.program.setFileOpened(filePath, version, contents, options); break; } @@ -458,13 +470,14 @@ export abstract class BackgroundAnalysisRunnerBase extends BackgroundThreadBase } case 'markAllFilesDirty': { - this.program.markAllFilesDirty(msg.data); + const { evenIfContentsAreSame, indexingNeeded } = msg.data; + this.program.markAllFilesDirty(evenIfContentsAreSame, indexingNeeded); break; } case 'markFilesDirty': { - const { filePaths, evenIfContentsAreSame } = msg.data; - this.program.markFilesDirty(filePaths, evenIfContentsAreSame); + const { filePaths, evenIfContentsAreSame, indexingNeeded } = msg.data; + this.program.markFilesDirty(filePaths, evenIfContentsAreSame, indexingNeeded); break; } @@ -474,7 +487,7 @@ export abstract class BackgroundAnalysisRunnerBase extends BackgroundThreadBase this._importResolver.invalidateCache(); // Mark all files with one or more errors dirty. - this.program.markAllFilesDirty(true); + this.program.markAllFilesDirty(/* evenIfContentsAreSame */ true, /* indexingNeeded */ msg.data); break; } @@ -622,3 +635,7 @@ export interface AnalysisResponse { requestType: 'log' | 'telemetry' | 'analysisResult' | 'analysisPaused' | 'indexResult' | 'analysisDone'; data: any; } + +export interface IndexOptions { + packageDepths: [string, number][]; +} diff --git a/packages/pyright-internal/src/backgroundThreadBase.ts b/packages/pyright-internal/src/backgroundThreadBase.ts index f11f51b3506b..a04bff3b10c1 100644 --- a/packages/pyright-internal/src/backgroundThreadBase.ts +++ b/packages/pyright-internal/src/backgroundThreadBase.ts @@ -136,6 +136,7 @@ export interface InitializationData { rootDirectory: string; cancellationFolderName: string | undefined; runner: string | undefined; + title?: string; } export interface RequestResponse { diff --git a/packages/pyright-internal/src/commands/commandResult.ts b/packages/pyright-internal/src/commands/commandResult.ts index 349bb73fc850..431cd08df20b 100644 --- a/packages/pyright-internal/src/commands/commandResult.ts +++ b/packages/pyright-internal/src/commands/commandResult.ts @@ -10,6 +10,7 @@ import { WorkspaceEdit } from 'vscode-languageserver-types'; export interface CommandResult { data?: any; + label: string; edits: WorkspaceEdit; } diff --git a/packages/pyright-internal/src/commands/createTypeStub.ts b/packages/pyright-internal/src/commands/createTypeStub.ts index 56e400d6a6f5..a92a6ab490c4 100644 --- a/packages/pyright-internal/src/commands/createTypeStub.ts +++ b/packages/pyright-internal/src/commands/createTypeStub.ts @@ -8,11 +8,8 @@ import { CancellationToken, ExecuteCommandParams } from 'vscode-languageserver'; -import { AnalyzerService } from '../analyzer/service'; import { OperationCanceledException } from '../common/cancellationUtils'; -import { createDeferred } from '../common/deferred'; -import { convertPathToUri } from '../common/pathUtils'; -import { LanguageServerInterface, WorkspaceServiceInstance } from '../languageServerBase'; +import { LanguageServerInterface } from '../languageServerBase'; import { AnalyzerServiceExecutor } from '../languageService/analyzerServiceExecutor'; import { ServerCommand } from './commandController'; @@ -25,28 +22,20 @@ export class CreateTypeStubCommand implements ServerCommand { const importName = cmdParams.arguments[1]; const callingFile = cmdParams.arguments[2]; - const service = await this._createTypeStubService(callingFile); - - // Allocate a temporary pseudo-workspace to perform this job. - const workspace: WorkspaceServiceInstance = { - workspaceName: `Create Type Stub ${importName}`, - rootPath: workspaceRoot, - rootUri: convertPathToUri(this._ls.fs, workspaceRoot), - serviceInstance: service, - disableLanguageServices: true, - disableOrganizeImports: true, - isInitialized: createDeferred(), - }; - - const serverSettings = await this._ls.getSettings(workspace); - AnalyzerServiceExecutor.runWithOptions(this._ls.rootPath, workspace, serverSettings, importName, false); + const service = await AnalyzerServiceExecutor.cloneService( + this._ls, + await this._ls.getWorkspaceForFile(callingFile ?? workspaceRoot), + importName, + this._ls.createBackgroundAnalysis() + ); try { await service.writeTypeStubInBackground(token); service.dispose(); + const infoMessage = `Type stub was successfully created for '${importName}'.`; this._ls.window.showInformationMessage(infoMessage); - this._handlePostCreateTypeStub(); + this._ls.reanalyze(); } catch (err) { const isCancellation = OperationCanceledException.is(err); if (isCancellation) { @@ -64,24 +53,4 @@ export class CreateTypeStubCommand implements ServerCommand { } } } - - // Creates a service instance that's used for creating type - // stubs for a specified target library. - private async _createTypeStubService(callingFile?: string): Promise { - if (callingFile) { - // this should let us to inherit all execution env of the calling file - // if it is invoked from IDE through code action - const workspace = await this._ls.getWorkspaceForFile(callingFile); - - // new service has its own background analysis running on its own thread - // to not block main bg running background analysis - return workspace.serviceInstance.clone('Type stub', this._ls.createBackgroundAnalysis()); - } - - return new AnalyzerService('Type stub', this._ls.fs, this._ls.console); - } - - private _handlePostCreateTypeStub() { - this._ls.reanalyze(); - } } diff --git a/packages/pyright-internal/src/commands/quickActionCommand.ts b/packages/pyright-internal/src/commands/quickActionCommand.ts index 5a3be3bab190..477d8350ac46 100644 --- a/packages/pyright-internal/src/commands/quickActionCommand.ts +++ b/packages/pyright-internal/src/commands/quickActionCommand.ts @@ -8,7 +8,6 @@ import { CancellationToken, ExecuteCommandParams } from 'vscode-languageserver'; -import { convertUriToPath } from '../common/pathUtils'; import { convertTextEdits } from '../common/textEditUtils'; import { LanguageServerInterface } from '../languageServerBase'; import { ServerCommand } from './commandController'; @@ -21,7 +20,7 @@ export class QuickActionCommand implements ServerCommand { if (params.arguments && params.arguments.length >= 1) { const docUri = params.arguments[0]; const otherArgs = params.arguments.slice(1); - const filePath = convertUriToPath(this._ls.fs, docUri); + const filePath = this._ls.decodeTextDocumentUri(docUri); const workspace = await this._ls.getWorkspaceForFile(filePath); if (params.command === Commands.orderImports && workspace.disableOrganizeImports) { diff --git a/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts b/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts new file mode 100644 index 000000000000..0ba229025ba8 --- /dev/null +++ b/packages/pyright-internal/src/common/chokidarFileWatcherProvider.ts @@ -0,0 +1,74 @@ +/* + * chokidarFileWatcherProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Implements a FileWatcherProvider using chokidar. + */ + +import * as chokidar from 'chokidar'; + +import { ConsoleInterface } from './console'; +import { FileWatcher, FileWatcherEventHandler, FileWatcherEventType, FileWatcherProvider } from './fileSystem'; + +const _isMacintosh = process.platform === 'darwin'; +const _isLinux = process.platform === 'linux'; + +export class ChokidarFileWatcherProvider implements FileWatcherProvider { + constructor(private _console?: ConsoleInterface) {} + + createFileWatcher(paths: string[], listener: FileWatcherEventHandler): FileWatcher { + return this._createFileSystemWatcher(paths).on('all', listener); + } + + private _createFileSystemWatcher(paths: string[]): chokidar.FSWatcher { + // The following options are copied from VS Code source base. It also + // uses chokidar for its file watching. + const watcherOptions: chokidar.WatchOptions = { + ignoreInitial: true, + ignorePermissionErrors: true, + followSymlinks: true, // this is the default of chokidar and supports file events through symlinks + interval: 1000, // while not used in normal cases, if any error causes chokidar to fallback to polling, increase its intervals + binaryInterval: 1000, + disableGlobbing: true, // fix https://github.com/Microsoft/vscode/issues/4586 + awaitWriteFinish: { + // this will make sure we re-scan files once file changes are written to disk + stabilityThreshold: 1000, + pollInterval: 1000, + }, + }; + + if (_isMacintosh) { + // Explicitly disable on MacOS because it uses up large amounts of memory + // and CPU for large file hierarchies, resulting in instability and crashes. + watcherOptions.usePolling = false; + } + + const excludes: string[] = ['**/__pycache__/**']; + if (_isMacintosh || _isLinux) { + if (paths.some((path) => path === '' || path === '/')) { + excludes.push('/dev/**'); + if (_isLinux) { + excludes.push('/proc/**', '/sys/**'); + } + } + } + watcherOptions.ignored = excludes; + + const watcher = chokidar.watch(paths, watcherOptions); + watcher.on('error', (_) => { + this._console?.error('Error returned from file system watcher.'); + }); + + // Detect if for some reason the native watcher library fails to load + if (_isMacintosh && !watcher.options.useFsEvents) { + this._console?.info('Watcher could not use native fsevents library. File system watcher disabled.'); + } + + return watcher; + } + + onFileChange(_1: FileWatcherEventType, _2: string): void { + // Do nothing. + } +} diff --git a/packages/pyright-internal/src/common/collectionUtils.ts b/packages/pyright-internal/src/common/collectionUtils.ts index ba4dc50c174a..80fcbd739838 100644 --- a/packages/pyright-internal/src/common/collectionUtils.ts +++ b/packages/pyright-internal/src/common/collectionUtils.ts @@ -313,3 +313,51 @@ export function getOrAdd(map: Map, key: K, newValueFactory: () => V) return newValue; } + +/** + * Remove matching item from the array in place. + * Returns the given array itself. + * @param array The array to operate on. + * @param predicate Return true for an item to delete. + */ +export function removeArrayElements(array: T[], predicate: (item: T) => boolean): T[] { + for (let i = 0; i < array.length; i++) { + if (predicate(array[i])) { + array.splice(i, 1); + + // Array is modified in place, we need to look at the same index again. + i--; + } + } + + return array; +} + +export function createMapFromItems(items: T[], keyGetter: (t: T) => string) { + return items + .map((t) => keyGetter(t)) + .reduce((map, key, i) => { + map.set(key, (map.get(key) || []).concat(items[i])); + return map; + }, new Map()); +} + +export function addIfUnique(arr: T[], t: T, equalityComparer: EqualityComparer = equateValues): T[] { + if (contains(arr, t, equalityComparer)) { + return arr; + } + + arr.push(t); + return arr; +} + +export function getMapValues(m: Map, predicate: (k: K, v: V) => boolean): V[] { + const values: V[] = []; + m.forEach((v, k) => { + if (predicate(k, v)) { + values.push(v); + } + }); + + return values; +} diff --git a/packages/pyright-internal/src/common/commandLineOptions.ts b/packages/pyright-internal/src/common/commandLineOptions.ts index d363919d6da7..e08a627ef209 100644 --- a/packages/pyright-internal/src/common/commandLineOptions.ts +++ b/packages/pyright-internal/src/common/commandLineOptions.ts @@ -118,9 +118,15 @@ export class CommandLineOptions { // Use indexing. indexing?: boolean | undefined; - // Use type evaluator call tracking + // Use type evaluator call tracking. logTypeEvaluationTime = false; - // Minimum threshold for type eval logging + // Minimum threshold for type eval logging. typeEvaluationTimeThreshold = 50; + + // Run ambient analysis. + enableAmbientAnalysis = true; + + // Analyze functions and methods that have no type annotations? + analyzeUnannotatedFunctions = true; } diff --git a/packages/pyright-internal/src/common/configOptions.ts b/packages/pyright-internal/src/common/configOptions.ts index f2628108b5b9..862ff08ad12a 100644 --- a/packages/pyright-internal/src/common/configOptions.ts +++ b/packages/pyright-internal/src/common/configOptions.ts @@ -44,7 +44,7 @@ export class ExecutionEnvironment { this.root = root || undefined; this.pythonVersion = defaultPythonVersion || latestStablePythonVersion; this.pythonPlatform = defaultPythonPlatform; - this.extraPaths = defaultExtraPaths || []; + this.extraPaths = [...(defaultExtraPaths ?? [])]; } // Root directory for execution - absolute or relative to the @@ -188,10 +188,16 @@ export interface DiagnosticRuleSet { // the base class symbol of the same name? reportIncompatibleVariableOverride: DiagnosticLevel; + // Report inconsistencies between __init__ and __new__ signatures. + reportInconsistentConstructor: DiagnosticLevel; + // Report function overloads that overlap in signature but have // incompatible return types. reportOverlappingOverload: DiagnosticLevel; + // Report failure to call super().__init__() in __init__ method. + reportMissingSuperCall: DiagnosticLevel; + // Report instance variables that are not initialized within // the constructor. reportUninitializedInstanceVariable: DiagnosticLevel; @@ -214,6 +220,9 @@ export interface DiagnosticRuleSet { // Report usage of unknown input or return parameters? reportUnknownMemberType: DiagnosticLevel; + // Report input parameters that are missing type annotations? + reportMissingParameterType: DiagnosticLevel; + // Report usage of generic class without explicit type arguments? reportMissingTypeArgument: DiagnosticLevel; @@ -268,6 +277,14 @@ export interface DiagnosticRuleSet { // Report cases where a call expression's return result is Coroutine // and is not used in any way. reportUnusedCoroutine: DiagnosticLevel; + + // Report cases where the removal of a "# type: ignore" comment would + // have no effect. + reportUnnecessaryTypeIgnoreComment: DiagnosticLevel; + + // Report cases where the a "match" statement is not exhaustive in + // covering all possible cases. + reportMatchNotExhaustive: DiagnosticLevel; } export function cloneDiagnosticRuleSet(diagSettings: DiagnosticRuleSet): DiagnosticRuleSet { @@ -275,20 +292,28 @@ export function cloneDiagnosticRuleSet(diagSettings: DiagnosticRuleSet): Diagnos return Object.assign({}, diagSettings); } -export function getBooleanDiagnosticRules() { - return [ +// Returns a list of the diagnostic rules that are configured with +// a true or false value. +export function getBooleanDiagnosticRules(includeNonOverridable = false) { + const boolRules = [ DiagnosticRule.strictListInference, DiagnosticRule.strictSetInference, DiagnosticRule.strictDictionaryInference, DiagnosticRule.strictParameterNoneValue, + ]; + if (includeNonOverridable) { // Do not include this this one because we don't // want to override it in strict mode or support // it within pyright comments. - // DiagnosticRule.enableTypeIgnoreComments - ]; + boolRules.push(DiagnosticRule.enableTypeIgnoreComments); + } + + return boolRules; } +// Returns a list of the diagnostic rules that are configured with +// a diagnostic level ('none', 'error', etc.). export function getDiagLevelDiagnosticRules() { return [ DiagnosticRule.reportGeneralTypeIssues, @@ -320,7 +345,9 @@ export function getDiagLevelDiagnosticRules() { DiagnosticRule.reportConstantRedefinition, DiagnosticRule.reportIncompatibleMethodOverride, DiagnosticRule.reportIncompatibleVariableOverride, + DiagnosticRule.reportInconsistentConstructor, DiagnosticRule.reportOverlappingOverload, + DiagnosticRule.reportMissingSuperCall, DiagnosticRule.reportUninitializedInstanceVariable, DiagnosticRule.reportInvalidStringEscapeSequence, DiagnosticRule.reportUnknownParameterType, @@ -328,6 +355,7 @@ export function getDiagLevelDiagnosticRules() { DiagnosticRule.reportUnknownLambdaType, DiagnosticRule.reportUnknownVariableType, DiagnosticRule.reportUnknownMemberType, + DiagnosticRule.reportMissingParameterType, DiagnosticRule.reportMissingTypeArgument, DiagnosticRule.reportInvalidTypeVarUse, DiagnosticRule.reportCallInDefaultInitializer, @@ -344,6 +372,8 @@ export function getDiagLevelDiagnosticRules() { DiagnosticRule.reportUnsupportedDunderAll, DiagnosticRule.reportUnusedCallResult, DiagnosticRule.reportUnusedCoroutine, + DiagnosticRule.reportUnnecessaryTypeIgnoreComment, + DiagnosticRule.reportMatchNotExhaustive, ]; } @@ -363,7 +393,7 @@ export function getOffDiagnosticRuleSet(): DiagnosticRuleSet { strictListInference: false, strictSetInference: false, strictDictionaryInference: false, - strictParameterNoneValue: false, + strictParameterNoneValue: true, enableTypeIgnoreComments: true, reportGeneralTypeIssues: 'none', reportPropertyTypeMismatch: 'none', @@ -394,7 +424,9 @@ export function getOffDiagnosticRuleSet(): DiagnosticRuleSet { reportConstantRedefinition: 'none', reportIncompatibleMethodOverride: 'none', reportIncompatibleVariableOverride: 'none', + reportInconsistentConstructor: 'none', reportOverlappingOverload: 'none', + reportMissingSuperCall: 'none', reportUninitializedInstanceVariable: 'none', reportInvalidStringEscapeSequence: 'none', reportUnknownParameterType: 'none', @@ -402,6 +434,7 @@ export function getOffDiagnosticRuleSet(): DiagnosticRuleSet { reportUnknownLambdaType: 'none', reportUnknownVariableType: 'none', reportUnknownMemberType: 'none', + reportMissingParameterType: 'none', reportMissingTypeArgument: 'none', reportInvalidTypeVarUse: 'none', reportCallInDefaultInitializer: 'none', @@ -418,6 +451,8 @@ export function getOffDiagnosticRuleSet(): DiagnosticRuleSet { reportUnsupportedDunderAll: 'none', reportUnusedCallResult: 'none', reportUnusedCoroutine: 'none', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'none', }; return diagSettings; @@ -433,10 +468,10 @@ export function getBasicDiagnosticRuleSet(): DiagnosticRuleSet { strictListInference: false, strictSetInference: false, strictDictionaryInference: false, - strictParameterNoneValue: false, + strictParameterNoneValue: true, enableTypeIgnoreComments: true, reportGeneralTypeIssues: 'error', - reportPropertyTypeMismatch: 'error', + reportPropertyTypeMismatch: 'none', reportFunctionMemberAccess: 'none', reportMissingImports: 'error', reportMissingModuleSource: 'warning', @@ -464,7 +499,9 @@ export function getBasicDiagnosticRuleSet(): DiagnosticRuleSet { reportConstantRedefinition: 'none', reportIncompatibleMethodOverride: 'none', reportIncompatibleVariableOverride: 'none', + reportInconsistentConstructor: 'none', reportOverlappingOverload: 'none', + reportMissingSuperCall: 'none', reportUninitializedInstanceVariable: 'none', reportInvalidStringEscapeSequence: 'warning', reportUnknownParameterType: 'none', @@ -472,6 +509,7 @@ export function getBasicDiagnosticRuleSet(): DiagnosticRuleSet { reportUnknownLambdaType: 'none', reportUnknownVariableType: 'none', reportUnknownMemberType: 'none', + reportMissingParameterType: 'none', reportMissingTypeArgument: 'none', reportInvalidTypeVarUse: 'warning', reportCallInDefaultInitializer: 'none', @@ -488,6 +526,8 @@ export function getBasicDiagnosticRuleSet(): DiagnosticRuleSet { reportUnsupportedDunderAll: 'warning', reportUnusedCallResult: 'none', reportUnusedCoroutine: 'error', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'none', }; return diagSettings; @@ -506,10 +546,10 @@ export function getStrictDiagnosticRuleSet(): DiagnosticRuleSet { strictParameterNoneValue: true, enableTypeIgnoreComments: true, // Not overridden by strict mode reportGeneralTypeIssues: 'error', - reportPropertyTypeMismatch: 'error', + reportPropertyTypeMismatch: 'none', reportFunctionMemberAccess: 'error', reportMissingImports: 'error', - reportMissingModuleSource: 'warning', + reportMissingModuleSource: 'warning', // Not overridden by strict mode reportMissingTypeStubs: 'error', reportImportCycles: 'error', reportUnusedImport: 'error', @@ -534,7 +574,9 @@ export function getStrictDiagnosticRuleSet(): DiagnosticRuleSet { reportConstantRedefinition: 'error', reportIncompatibleMethodOverride: 'error', reportIncompatibleVariableOverride: 'error', + reportInconsistentConstructor: 'error', reportOverlappingOverload: 'error', + reportMissingSuperCall: 'none', reportUninitializedInstanceVariable: 'none', reportInvalidStringEscapeSequence: 'error', reportUnknownParameterType: 'error', @@ -542,6 +584,7 @@ export function getStrictDiagnosticRuleSet(): DiagnosticRuleSet { reportUnknownLambdaType: 'error', reportUnknownVariableType: 'error', reportUnknownMemberType: 'error', + reportMissingParameterType: 'error', reportMissingTypeArgument: 'error', reportInvalidTypeVarUse: 'error', reportCallInDefaultInitializer: 'none', @@ -558,6 +601,8 @@ export function getStrictDiagnosticRuleSet(): DiagnosticRuleSet { reportUnsupportedDunderAll: 'error', reportUnusedCallResult: 'none', reportUnusedCoroutine: 'error', + reportUnnecessaryTypeIgnoreComment: 'none', + reportMatchNotExhaustive: 'error', }; return diagSettings; @@ -570,14 +615,6 @@ export class ConfigOptions { this.projectRoot = projectRoot; this.typeCheckingMode = typeCheckingMode; this.diagnosticRuleSet = ConfigOptions.getDiagnosticRuleSet(typeCheckingMode); - - // If type checking mode is off, allow inference for py.typed sources - // since there is little or no downside and possible upside of discovering - // more type information in this case. If type checking is enabled, using - // type inference in this case can result in false positive errors. - if (typeCheckingMode === 'off') { - this.disableInferenceForPyTypedSources = false; - } } // Absolute directory of project. All relative paths in the config @@ -640,16 +677,16 @@ export class ConfigOptions { // Minimum threshold for type eval logging typeEvaluationTimeThreshold = 50; - // Avoid using type inference for files within packages that claim - // to contain type annotations? - disableInferenceForPyTypedSources = true; - // Current type checking mode. typeCheckingMode?: string; // Was this config initialized from JSON (pyrightconfig/pyproject)? initializedFromJson = false; + // Should we skip analysis of all functions and methods that have + // no parameter ore return type annotations? + analyzeUnannotatedFunctions = true; + //--------------------------------------------------------------- // Diagnostics Rule Set @@ -689,6 +726,10 @@ export class ConfigOptions { // Run program in index generation mode. indexGenerationMode?: boolean | undefined; + // When a symbol cannot be resolved from an import, should it be + // treated as Any rather than Unknown? + evaluateUnknownImportsAsAny?: boolean; + static getDiagnosticRuleSet(typeCheckingMode?: string): DiagnosticRuleSet { if (typeCheckingMode === 'strict') { return getStrictDiagnosticRuleSet(); @@ -846,427 +887,30 @@ export class ConfigOptions { this.typeCheckingMode = configTypeCheckingMode || typeCheckingMode; const defaultSettings = ConfigOptions.getDiagnosticRuleSet(this.typeCheckingMode); - if (this.typeCheckingMode === 'off') { - this.disableInferenceForPyTypedSources = false; - } - // Apply host provided overrides first and then overrides from the config file + // Start with the default values for all rules in the rule set. + this.diagnosticRuleSet = { ...defaultSettings }; + + // Apply host-provided overrides. this.applyDiagnosticOverrides(diagnosticOverrides); - this.diagnosticRuleSet = { - printUnknownAsAny: defaultSettings.printUnknownAsAny, - omitTypeArgsIfAny: defaultSettings.omitTypeArgsIfAny, - omitConditionalConstraint: defaultSettings.omitConditionalConstraint, - omitUnannotatedParamType: defaultSettings.omitUnannotatedParamType, - pep604Printing: defaultSettings.pep604Printing, - - // Use strict inference rules for list expressions? - strictListInference: this._convertBoolean( - configObj.strictListInference, - DiagnosticRule.strictListInference, - defaultSettings.strictListInference - ), - - // Use strict inference rules for set expressions? - strictSetInference: this._convertBoolean( - configObj.strictSetInference, - DiagnosticRule.strictSetInference, - defaultSettings.strictSetInference - ), - - // Use strict inference rules for dictionary expressions? - strictDictionaryInference: this._convertBoolean( - configObj.strictDictionaryInference, - DiagnosticRule.strictDictionaryInference, - defaultSettings.strictDictionaryInference - ), - - // Should a None default value imply that the parameter type - // is Optional? - strictParameterNoneValue: this._convertBoolean( - configObj.strictParameterNoneValue, - DiagnosticRule.strictParameterNoneValue, - defaultSettings.strictParameterNoneValue - ), - - // Should "# type: ignore" be honored? - enableTypeIgnoreComments: this._convertBoolean( - configObj.enableTypeIgnoreComments, - DiagnosticRule.enableTypeIgnoreComments, - defaultSettings.enableTypeIgnoreComments - ), - - // Read the "reportGeneralTypeIssues" entry. - reportGeneralTypeIssues: this._convertDiagnosticLevel( - configObj.reportGeneralTypeIssues, - DiagnosticRule.reportGeneralTypeIssues, - defaultSettings.reportGeneralTypeIssues - ), - - // Read the "reportPropertyTypeMismatch" entry. - reportPropertyTypeMismatch: this._convertDiagnosticLevel( - configObj.reportPropertyTypeMismatch, - DiagnosticRule.reportPropertyTypeMismatch, - defaultSettings.reportPropertyTypeMismatch - ), - - // Read the "reportFunctionMemberAccess" entry. - reportFunctionMemberAccess: this._convertDiagnosticLevel( - configObj.reportFunctionMemberAccess, - DiagnosticRule.reportFunctionMemberAccess, - defaultSettings.reportFunctionMemberAccess - ), - - // Read the "reportMissingImports" entry. - reportMissingImports: this._convertDiagnosticLevel( - configObj.reportMissingImports, - DiagnosticRule.reportMissingImports, - defaultSettings.reportMissingImports - ), - - // Read the "reportUnusedImport" entry. - reportUnusedImport: this._convertDiagnosticLevel( - configObj.reportUnusedImport, - DiagnosticRule.reportUnusedImport, - defaultSettings.reportUnusedImport - ), - - // Read the "reportUnusedClass" entry. - reportUnusedClass: this._convertDiagnosticLevel( - configObj.reportUnusedClass, - DiagnosticRule.reportUnusedClass, - defaultSettings.reportUnusedClass - ), - - // Read the "reportUnusedFunction" entry. - reportUnusedFunction: this._convertDiagnosticLevel( - configObj.reportUnusedFunction, - DiagnosticRule.reportUnusedFunction, - defaultSettings.reportUnusedFunction - ), - - // Read the "reportUnusedVariable" entry. - reportUnusedVariable: this._convertDiagnosticLevel( - configObj.reportUnusedVariable, - DiagnosticRule.reportUnusedVariable, - defaultSettings.reportUnusedVariable - ), - - // Read the "reportDuplicateImport" entry. - reportDuplicateImport: this._convertDiagnosticLevel( - configObj.reportDuplicateImport, - DiagnosticRule.reportDuplicateImport, - defaultSettings.reportDuplicateImport - ), - - // Read the "reportWildcardImportFromLibrary" entry. - reportWildcardImportFromLibrary: this._convertDiagnosticLevel( - configObj.reportWildcardImportFromLibrary, - DiagnosticRule.reportWildcardImportFromLibrary, - defaultSettings.reportWildcardImportFromLibrary - ), - - // Read the "reportMissingModuleSource" entry. - reportMissingModuleSource: this._convertDiagnosticLevel( - configObj.reportMissingModuleSource, - DiagnosticRule.reportMissingModuleSource, - defaultSettings.reportMissingModuleSource - ), - - // Read the "reportMissingTypeStubs" entry. - reportMissingTypeStubs: this._convertDiagnosticLevel( - configObj.reportMissingTypeStubs, - DiagnosticRule.reportMissingTypeStubs, - defaultSettings.reportMissingTypeStubs - ), - - // Read the "reportImportCycles" entry. - reportImportCycles: this._convertDiagnosticLevel( - configObj.reportImportCycles, - DiagnosticRule.reportImportCycles, - defaultSettings.reportImportCycles - ), - - // Read the "reportOptionalSubscript" entry. - reportOptionalSubscript: this._convertDiagnosticLevel( - configObj.reportOptionalSubscript, - DiagnosticRule.reportOptionalSubscript, - defaultSettings.reportOptionalSubscript - ), - - // Read the "reportOptionalMemberAccess" entry. - reportOptionalMemberAccess: this._convertDiagnosticLevel( - configObj.reportOptionalMemberAccess, - DiagnosticRule.reportOptionalMemberAccess, - defaultSettings.reportOptionalMemberAccess - ), - - // Read the "reportOptionalCall" entry. - reportOptionalCall: this._convertDiagnosticLevel( - configObj.reportOptionalCall, - DiagnosticRule.reportOptionalCall, - defaultSettings.reportOptionalCall - ), - - // Read the "reportOptionalIterable" entry. - reportOptionalIterable: this._convertDiagnosticLevel( - configObj.reportOptionalIterable, - DiagnosticRule.reportOptionalIterable, - defaultSettings.reportOptionalIterable - ), - - // Read the "reportOptionalContextManager" entry. - reportOptionalContextManager: this._convertDiagnosticLevel( - configObj.reportOptionalContextManager, - DiagnosticRule.reportOptionalContextManager, - defaultSettings.reportOptionalContextManager - ), - - // Read the "reportOptionalOperand" entry. - reportOptionalOperand: this._convertDiagnosticLevel( - configObj.reportOptionalOperand, - DiagnosticRule.reportOptionalOperand, - defaultSettings.reportOptionalOperand - ), - - // Read the "reportTypedDictNotRequiredAccess" entry. - reportTypedDictNotRequiredAccess: this._convertDiagnosticLevel( - configObj.reportTypedDictNotRequiredAccess, - DiagnosticRule.reportTypedDictNotRequiredAccess, - defaultSettings.reportTypedDictNotRequiredAccess - ), - - // Read the "reportUntypedFunctionDecorator" entry. - reportUntypedFunctionDecorator: this._convertDiagnosticLevel( - configObj.reportUntypedFunctionDecorator, - DiagnosticRule.reportUntypedFunctionDecorator, - defaultSettings.reportUntypedFunctionDecorator - ), - - // Read the "reportUntypedClassDecorator" entry. - reportUntypedClassDecorator: this._convertDiagnosticLevel( - configObj.reportUntypedClassDecorator, - DiagnosticRule.reportUntypedClassDecorator, - defaultSettings.reportUntypedClassDecorator - ), - - // Read the "reportUntypedBaseClass" entry. - reportUntypedBaseClass: this._convertDiagnosticLevel( - configObj.reportUntypedBaseClass, - DiagnosticRule.reportUntypedBaseClass, - defaultSettings.reportUntypedBaseClass - ), - - // Read the "reportUntypedNamedTuple" entry. - reportUntypedNamedTuple: this._convertDiagnosticLevel( - configObj.reportUntypedNamedTuple, - DiagnosticRule.reportUntypedNamedTuple, - defaultSettings.reportUntypedNamedTuple - ), - - // Read the "reportPrivateUsage" entry. - reportPrivateUsage: this._convertDiagnosticLevel( - configObj.reportPrivateUsage, - DiagnosticRule.reportPrivateUsage, - defaultSettings.reportPrivateUsage - ), - - // Read the "reportPrivateImportUsage" entry. - reportPrivateImportUsage: this._convertDiagnosticLevel( - configObj.reportPrivateImportUsage, - DiagnosticRule.reportPrivateImportUsage, - defaultSettings.reportPrivateImportUsage - ), - - // Read the "reportConstantRedefinition" entry. - reportConstantRedefinition: this._convertDiagnosticLevel( - configObj.reportConstantRedefinition, - DiagnosticRule.reportConstantRedefinition, - defaultSettings.reportConstantRedefinition - ), - - // Read the "reportIncompatibleMethodOverride" entry. - reportIncompatibleMethodOverride: this._convertDiagnosticLevel( - configObj.reportIncompatibleMethodOverride, - DiagnosticRule.reportIncompatibleMethodOverride, - defaultSettings.reportIncompatibleMethodOverride - ), - - // Read the "reportIncompatibleVariableOverride" entry. - reportIncompatibleVariableOverride: this._convertDiagnosticLevel( - configObj.reportIncompatibleVariableOverride, - DiagnosticRule.reportIncompatibleVariableOverride, - defaultSettings.reportIncompatibleVariableOverride - ), - - // Read the "reportOverlappingOverload" entry. - reportOverlappingOverload: this._convertDiagnosticLevel( - configObj.reportOverlappingOverload, - DiagnosticRule.reportOverlappingOverload, - defaultSettings.reportOverlappingOverload - ), - - // Read the "reportUninitializedInstanceVariable" entry. - reportUninitializedInstanceVariable: this._convertDiagnosticLevel( - configObj.reportUninitializedInstanceVariable, - DiagnosticRule.reportUninitializedInstanceVariable, - defaultSettings.reportUninitializedInstanceVariable - ), - - // Read the "reportInvalidStringEscapeSequence" entry. - reportInvalidStringEscapeSequence: this._convertDiagnosticLevel( - configObj.reportInvalidStringEscapeSequence, - DiagnosticRule.reportInvalidStringEscapeSequence, - defaultSettings.reportInvalidStringEscapeSequence - ), - - // Read the "reportUnknownParameterType" entry. - reportUnknownParameterType: this._convertDiagnosticLevel( - configObj.reportUnknownParameterType, - DiagnosticRule.reportUnknownParameterType, - defaultSettings.reportUnknownParameterType - ), - - // Read the "reportUnknownArgumentType" entry. - reportUnknownArgumentType: this._convertDiagnosticLevel( - configObj.reportUnknownArgumentType, - DiagnosticRule.reportUnknownArgumentType, - defaultSettings.reportUnknownArgumentType - ), - - // Read the "reportUnknownLambdaType" entry. - reportUnknownLambdaType: this._convertDiagnosticLevel( - configObj.reportUnknownLambdaType, - DiagnosticRule.reportUnknownLambdaType, - defaultSettings.reportUnknownLambdaType - ), - - // Read the "reportUnknownVariableType" entry. - reportUnknownVariableType: this._convertDiagnosticLevel( - configObj.reportUnknownVariableType, - DiagnosticRule.reportUnknownVariableType, - defaultSettings.reportUnknownVariableType - ), - - // Read the "reportUnknownMemberType" entry. - reportUnknownMemberType: this._convertDiagnosticLevel( - configObj.reportUnknownMemberType, - DiagnosticRule.reportUnknownMemberType, - defaultSettings.reportUnknownMemberType - ), - - // Read the "reportMissingTypeArgument" entry. - reportMissingTypeArgument: this._convertDiagnosticLevel( - configObj.reportMissingTypeArgument, - DiagnosticRule.reportMissingTypeArgument, - defaultSettings.reportMissingTypeArgument - ), - - // Read the "reportInvalidTypeVarUse" entry. - reportInvalidTypeVarUse: this._convertDiagnosticLevel( - configObj.reportInvalidTypeVarUse, - DiagnosticRule.reportInvalidTypeVarUse, - defaultSettings.reportInvalidTypeVarUse - ), - - // Read the "reportCallInDefaultInitializer" entry. - reportCallInDefaultInitializer: this._convertDiagnosticLevel( - configObj.reportCallInDefaultInitializer, - DiagnosticRule.reportCallInDefaultInitializer, - defaultSettings.reportCallInDefaultInitializer - ), - - // Read the "reportUnnecessaryIsInstance" entry. - reportUnnecessaryIsInstance: this._convertDiagnosticLevel( - configObj.reportUnnecessaryIsInstance, - DiagnosticRule.reportUnnecessaryIsInstance, - defaultSettings.reportUnnecessaryIsInstance - ), - - // Read the "reportUnnecessaryCast" entry. - reportUnnecessaryCast: this._convertDiagnosticLevel( - configObj.reportUnnecessaryCast, - DiagnosticRule.reportUnnecessaryCast, - defaultSettings.reportUnnecessaryCast - ), - - // Read the "reportUnnecessaryComparison" entry. - reportUnnecessaryComparison: this._convertDiagnosticLevel( - configObj.reportUnnecessaryComparison, - DiagnosticRule.reportUnnecessaryComparison, - defaultSettings.reportUnnecessaryComparison - ), - - // Read the "reportAssertAlwaysTrue" entry. - reportAssertAlwaysTrue: this._convertDiagnosticLevel( - configObj.reportAssertAlwaysTrue, - DiagnosticRule.reportAssertAlwaysTrue, - defaultSettings.reportAssertAlwaysTrue - ), - - // Read the "reportSelfClsParameterName" entry. - reportSelfClsParameterName: this._convertDiagnosticLevel( - configObj.reportSelfClsParameterName, - DiagnosticRule.reportSelfClsParameterName, - defaultSettings.reportSelfClsParameterName - ), - - // Read the "reportImplicitStringConcatenation" entry. - reportImplicitStringConcatenation: this._convertDiagnosticLevel( - configObj.reportImplicitStringConcatenation, - DiagnosticRule.reportImplicitStringConcatenation, - defaultSettings.reportImplicitStringConcatenation - ), - - // Read the "reportUndefinedVariable" entry. - reportUndefinedVariable: this._convertDiagnosticLevel( - configObj.reportUndefinedVariable, - DiagnosticRule.reportUndefinedVariable, - defaultSettings.reportUndefinedVariable - ), - - // Read the "reportUnboundVariable" entry. - reportUnboundVariable: this._convertDiagnosticLevel( - configObj.reportUnboundVariable, - DiagnosticRule.reportUnboundVariable, - defaultSettings.reportUnboundVariable - ), - - // Read the "reportInvalidStubStatement" entry. - reportInvalidStubStatement: this._convertDiagnosticLevel( - configObj.reportInvalidStubStatement, - DiagnosticRule.reportInvalidStubStatement, - defaultSettings.reportInvalidStubStatement - ), - - // Read the "reportIncompleteStub" entry. - reportIncompleteStub: this._convertDiagnosticLevel( - configObj.reportIncompleteStub, - DiagnosticRule.reportIncompleteStub, - defaultSettings.reportIncompleteStub - ), - - // Read the "reportUnsupportedDunderAll" entry. - reportUnsupportedDunderAll: this._convertDiagnosticLevel( - configObj.reportUnsupportedDunderAll, - DiagnosticRule.reportUnsupportedDunderAll, - defaultSettings.reportUnsupportedDunderAll - ), - - // Read the "reportUnusedCallResult" entry. - reportUnusedCallResult: this._convertDiagnosticLevel( - configObj.reportUnusedCallResult, - DiagnosticRule.reportUnusedCallResult, - defaultSettings.reportUnusedCallResult - ), - - // Read the "reportUnusedCoroutine" entry. - reportUnusedCoroutine: this._convertDiagnosticLevel( - configObj.reportUnusedCoroutine, - DiagnosticRule.reportUnusedCoroutine, - defaultSettings.reportUnusedCoroutine - ), - }; + // Apply overrides from the config file for the boolean rules. + getBooleanDiagnosticRules(/* includeNonOverridable */ true).forEach((ruleName) => { + (this.diagnosticRuleSet as any)[ruleName] = this._convertBoolean( + configObj[ruleName], + ruleName, + this.diagnosticRuleSet[ruleName] as boolean + ); + }); + + // Apply overrides from the config file for the diagnostic level rules. + getDiagLevelDiagnosticRules().forEach((ruleName) => { + (this.diagnosticRuleSet as any)[ruleName] = this._convertDiagnosticLevel( + configObj[ruleName], + ruleName, + this.diagnosticRuleSet[ruleName] as DiagnosticLevel + ); + }); // Read the "venvPath". this.venvPath = undefined; @@ -1501,8 +1145,11 @@ export class ConfigOptions { return; } - for (const [ruleName, severity] of Object.entries(diagnosticSeverityOverrides)) { - (this.diagnosticRuleSet as any)[ruleName] = severity; + for (const ruleName of getDiagLevelDiagnosticRules()) { + const severity = diagnosticSeverityOverrides[ruleName]; + if (severity !== undefined) { + (this.diagnosticRuleSet as any)[ruleName] = severity; + } } } diff --git a/packages/pyright-internal/src/common/console.ts b/packages/pyright-internal/src/common/console.ts index 322cf64bd976..8057198de900 100644 --- a/packages/pyright-internal/src/common/console.ts +++ b/packages/pyright-internal/src/common/console.ts @@ -68,6 +68,16 @@ export class StandardConsole implements ConsoleInterface { } } +export class StandardConsoleWithLevel extends StandardConsole { + constructor(private _maxLevel: LogLevel = LogLevel.Log) { + super(); + } + + get level(): LogLevel { + return this._maxLevel; + } +} + export class StderrConsole implements ConsoleInterface { log(message: string) { console.error(message); @@ -86,6 +96,16 @@ export class StderrConsole implements ConsoleInterface { } } +export class StderrConsoleWithLevel extends StderrConsole { + constructor(private _maxLevel: LogLevel = LogLevel.Log) { + super(); + } + + get level(): LogLevel { + return this._maxLevel; + } +} + export class ConsoleWithLogLevel implements ConsoleInterface { private _levelMap: Map = new Map([ [LogLevel.Error, 0], @@ -96,7 +116,7 @@ export class ConsoleWithLogLevel implements ConsoleInterface { private _maxLevel = 2; - constructor(private _console: ConsoleInterface) {} + constructor(private _console: ConsoleInterface, private _name = '') {} get level(): LogLevel { switch (this._maxLevel) { @@ -123,19 +143,23 @@ export class ConsoleWithLogLevel implements ConsoleInterface { } error(message: string) { - this._log(LogLevel.Error, message); + this._log(LogLevel.Error, `${this._prefix}${message}`); } warn(message: string) { - this._log(LogLevel.Warn, message); + this._log(LogLevel.Warn, `${this._prefix}${message}`); } info(message: string) { - this._log(LogLevel.Info, message); + this._log(LogLevel.Info, `${this._prefix}${message}`); } log(message: string) { - this._log(LogLevel.Log, message); + this._log(LogLevel.Log, `${this._prefix}${message}`); + } + + private get _prefix() { + return this._name ? `(${this._name}) ` : ''; } private _log(level: LogLevel, message: string): void { diff --git a/packages/pyright-internal/src/common/deferred.ts b/packages/pyright-internal/src/common/deferred.ts index 00376a46f855..a48db2cc1c98 100644 --- a/packages/pyright-internal/src/common/deferred.ts +++ b/packages/pyright-internal/src/common/deferred.ts @@ -22,7 +22,7 @@ class DeferredImpl implements Deferred { private _rejected = false; private _promise: Promise; - constructor(private scope: any = null) { + constructor(private _scope: any = null) { this._promise = new Promise((res, rej) => { this._resolve = res; this._reject = rej; @@ -31,13 +31,13 @@ class DeferredImpl implements Deferred { public resolve(_value?: T | PromiseLike) { // eslint-disable-next-line prefer-rest-params - this._resolve.apply(this.scope ? this.scope : this, arguments as any); + this._resolve.apply(this._scope ? this._scope : this, arguments as any); this._resolved = true; } public reject(_reason?: any) { // eslint-disable-next-line prefer-rest-params - this._reject.apply(this.scope ? this.scope : this, arguments as any); + this._reject.apply(this._scope ? this._scope : this, arguments as any); this._rejected = true; } diff --git a/packages/pyright-internal/src/common/diagnostic.ts b/packages/pyright-internal/src/common/diagnostic.ts index 7c41ecd4c87c..7935d929c913 100644 --- a/packages/pyright-internal/src/common/diagnostic.ts +++ b/packages/pyright-internal/src/common/diagnostic.ts @@ -20,6 +20,7 @@ export const enum DiagnosticCategory { Warning, Information, UnusedCode, + Deprecated, } export function convertLevelToCategory(level: DiagnosticLevel) { diff --git a/packages/pyright-internal/src/common/diagnosticRules.ts b/packages/pyright-internal/src/common/diagnosticRules.ts index 98a56da85e67..881d8ee1dd62 100644 --- a/packages/pyright-internal/src/common/diagnosticRules.ts +++ b/packages/pyright-internal/src/common/diagnosticRules.ts @@ -46,7 +46,9 @@ export enum DiagnosticRule { reportConstantRedefinition = 'reportConstantRedefinition', reportIncompatibleMethodOverride = 'reportIncompatibleMethodOverride', reportIncompatibleVariableOverride = 'reportIncompatibleVariableOverride', + reportInconsistentConstructor = 'reportInconsistentConstructor', reportOverlappingOverload = 'reportOverlappingOverload', + reportMissingSuperCall = 'reportMissingSuperCall', reportUninitializedInstanceVariable = 'reportUninitializedInstanceVariable', reportInvalidStringEscapeSequence = 'reportInvalidStringEscapeSequence', reportUnknownParameterType = 'reportUnknownParameterType', @@ -54,6 +56,7 @@ export enum DiagnosticRule { reportUnknownLambdaType = 'reportUnknownLambdaType', reportUnknownVariableType = 'reportUnknownVariableType', reportUnknownMemberType = 'reportUnknownMemberType', + reportMissingParameterType = 'reportMissingParameterType', reportMissingTypeArgument = 'reportMissingTypeArgument', reportInvalidTypeVarUse = 'reportInvalidTypeVarUse', reportCallInDefaultInitializer = 'reportCallInDefaultInitializer', @@ -70,4 +73,6 @@ export enum DiagnosticRule { reportUnsupportedDunderAll = 'reportUnsupportedDunderAll', reportUnusedCallResult = 'reportUnusedCallResult', reportUnusedCoroutine = 'reportUnusedCoroutine', + reportUnnecessaryTypeIgnoreComment = 'reportUnnecessaryTypeIgnoreComment', + reportMatchNotExhaustive = 'reportMatchNotExhaustive', } diff --git a/packages/pyright-internal/src/common/diagnosticSink.ts b/packages/pyright-internal/src/common/diagnosticSink.ts index f6c41e7d4925..a7341153d7e9 100644 --- a/packages/pyright-internal/src/common/diagnosticSink.ts +++ b/packages/pyright-internal/src/common/diagnosticSink.ts @@ -58,6 +58,14 @@ export class DiagnosticSink { return this.addDiagnostic(diag); } + addDeprecated(message: string, range: Range, action?: DiagnosticAction) { + const diag = new Diagnostic(DiagnosticCategory.Deprecated, message, range); + if (action) { + diag.addAction(action); + } + return this.addDiagnostic(diag); + } + addDiagnostic(diag: Diagnostic) { // Create a unique key for the diagnostic to prevent // adding duplicates. @@ -90,6 +98,10 @@ export class DiagnosticSink { getUnusedCode() { return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.UnusedCode); } + + getDeprecated() { + return this._diagnosticList.filter((diag) => diag.category === DiagnosticCategory.Deprecated); + } } // Specialized version of DiagnosticSink that works with TextRange objects @@ -126,4 +138,12 @@ export class TextRangeDiagnosticSink extends DiagnosticSink { action ); } + + addDeprecatedWithTextRange(message: string, range: TextRange, action?: DiagnosticAction) { + return this.addDeprecated( + message, + convertOffsetsToRange(range.start, range.start + range.length, this._lines), + action + ); + } } diff --git a/packages/pyright-internal/src/common/editAction.ts b/packages/pyright-internal/src/common/editAction.ts index e323f411a50a..a655b7b5b065 100644 --- a/packages/pyright-internal/src/common/editAction.ts +++ b/packages/pyright-internal/src/common/editAction.ts @@ -17,3 +17,30 @@ export interface TextEditAction { export interface FileEditAction extends TextEditAction { filePath: string; } + +export interface FileEditActions { + edits: FileEditAction[]; + fileOperations: FileOperations[]; +} + +export type FileOperations = RenameFileOperation | CreateFileOperation | DeleteFileOperation; + +export interface FileOperation { + kind: 'create' | 'delete' | 'rename'; +} + +export interface RenameFileOperation extends FileOperation { + kind: 'rename'; + oldFilePath: string; + newFilePath: string; +} + +export interface CreateFileOperation extends FileOperation { + kind: 'create'; + filePath: string; +} + +export interface DeleteFileOperation extends FileOperation { + kind: 'delete'; + filePath: string; +} diff --git a/packages/pyright-internal/src/common/extensibility.ts b/packages/pyright-internal/src/common/extensibility.ts index dafef6c0b6d2..fa5abbeea9ef 100644 --- a/packages/pyright-internal/src/common/extensibility.ts +++ b/packages/pyright-internal/src/common/extensibility.ts @@ -8,7 +8,7 @@ import { CancellationToken } from 'vscode-languageserver'; -import { CompletionResults } from '../languageService/completionProvider'; +import { CompletionResultsList } from '../languageService/completionProvider'; import { ParseResults } from '../parser/parser'; export interface LanguageServiceExtension { @@ -18,7 +18,7 @@ export interface LanguageServiceExtension { export interface CompletionListExtension { // Extension updates completion list provided by the application. updateCompletionResults( - completionResults: CompletionResults, + completionResults: CompletionResultsList, parseResults: ParseResults, position: number, token: CancellationToken diff --git a/packages/pyright-internal/src/common/fileSystem.ts b/packages/pyright-internal/src/common/fileSystem.ts index aa8cf74c77f0..29477576d64b 100644 --- a/packages/pyright-internal/src/common/fileSystem.ts +++ b/packages/pyright-internal/src/common/fileSystem.ts @@ -119,7 +119,7 @@ export const nullFileWatcherProvider: FileWatcherProvider = { }; export class VirtualDirent implements fs.Dirent { - constructor(public name: string, public _file: boolean) {} + constructor(public name: string, private _file: boolean) {} isFile(): boolean { return this._file; diff --git a/packages/pyright-internal/src/common/fullAccessHost.ts b/packages/pyright-internal/src/common/fullAccessHost.ts index 3457829cabdf..db82a48d6ac8 100644 --- a/packages/pyright-internal/src/common/fullAccessHost.ts +++ b/packages/pyright-internal/src/common/fullAccessHost.ts @@ -16,13 +16,27 @@ import { HostKind, NoAccessHost } from './host'; import { isDirectory, normalizePath } from './pathUtils'; import { PythonVersion, versionFromMajorMinor } from './pythonVersion'; -const extractSys = [ +// preventLocalImports removes the working directory from sys.path. +// The -c flag adds it automatically, which can allow some stdlib +// modules (like json) to be overridden by other files (like json.py). +const removeCwdFromSysPath = [ 'import os, os.path, sys', 'normalize = lambda p: os.path.normcase(os.path.normpath(p))', 'cwd = normalize(os.getcwd())', + 'orig_sys_path = [p for p in sys.path if p != ""]', 'sys.path[:] = [p for p in sys.path if p != "" and normalize(p) != cwd]', - 'import json', - 'json.dump(dict(path=sys.path, prefix=sys.prefix), sys.stdout)', +]; + +const extractSys = [ + ...removeCwdFromSysPath, + 'import sys, json', + 'json.dump(dict(path=orig_sys_path, prefix=sys.prefix), sys.stdout)', +].join('; '); + +const extractVersion = [ + ...removeCwdFromSysPath, + 'import sys, json', + 'json.dump(dict(major=sys.version_info[0], minor=sys.version_info[1]), sys.stdout)', ].join('; '); export class LimitedAccessHost extends NoAccessHost { @@ -67,23 +81,9 @@ export class FullAccessHost extends LimitedAccessHost { override getPythonSearchPaths(pythonPath?: string, logInfo?: string[]): PythonPathResult { const importFailureInfo = logInfo ?? []; - let result: PythonPathResult | undefined; - - if (pythonPath) { - result = this._getSearchPathResultFromInterpreter(this._fs, pythonPath, importFailureInfo); - } else { - // On non-Windows platforms, always default to python3 first. We want to - // avoid this on Windows because it might invoke a script that displays - // a dialog box indicating that python can be downloaded from the app store. - if (process.platform !== 'win32') { - result = this._getSearchPathResultFromInterpreter(this._fs, 'python3', importFailureInfo); - } - - // On some platforms, 'python3' might not exist. Try 'python' instead. - if (!result) { - result = this._getSearchPathResultFromInterpreter(this._fs, 'python', importFailureInfo); - } - } + let result = this._executePythonInterpreter(pythonPath, (p) => + this._getSearchPathResultFromInterpreter(this._fs, p, importFailureInfo) + ); if (!result) { result = { @@ -104,20 +104,12 @@ export class FullAccessHost extends LimitedAccessHost { const importFailureInfo = logInfo ?? []; try { - const commandLineArgs: string[] = [ - '-c', - 'import sys, json; json.dump(dict(major=sys.version_info[0], minor=sys.version_info[1]), sys.stdout)', - ]; - let execOutput: string; - - if (pythonPath) { - execOutput = child_process.execFileSync(pythonPath, commandLineArgs, { encoding: 'utf8' }); - } else { - execOutput = child_process.execFileSync('python', commandLineArgs, { encoding: 'utf8' }); - } - - const versionJson: { major: number; minor: number } = JSON.parse(execOutput); + const commandLineArgs: string[] = ['-c', extractVersion]; + const execOutput = this._executePythonInterpreter(pythonPath, (p) => + child_process.execFileSync(p, commandLineArgs, { encoding: 'utf8' }) + ); + const versionJson: { major: number; minor: number } = JSON.parse(execOutput!); const version = versionFromMajorMinor(versionJson.major, versionJson.minor); if (version === undefined) { importFailureInfo.push( @@ -133,6 +125,34 @@ export class FullAccessHost extends LimitedAccessHost { } } + private _executePythonInterpreter( + pythonPath: string | undefined, + execute: (path: string) => T | undefined + ): T | undefined { + if (pythonPath) { + return execute(pythonPath); + } else { + let result: T | undefined; + try { + // On non-Windows platforms, always default to python3 first. We want to + // avoid this on Windows because it might invoke a script that displays + // a dialog box indicating that python can be downloaded from the app store. + if (process.platform !== 'win32') { + result = execute('python3'); + } + } catch { + // Ignore failure on python3 + } + + if (result !== undefined) { + return result; + } + + // On some platforms, 'python3' might not exist. Try 'python' instead. + return execute('python'); + } + } + private _getSearchPathResultFromInterpreter( fs: FileSystem, interpreter: string, diff --git a/packages/pyright-internal/src/common/pathUtils.ts b/packages/pyright-internal/src/common/pathUtils.ts index 24ba68b0b9f8..0a502070753b 100644 --- a/packages/pyright-internal/src/common/pathUtils.ts +++ b/packages/pyright-internal/src/common/pathUtils.ts @@ -679,6 +679,10 @@ export function getWildcardRoot(rootPath: string, fileSpec: string): string { pathComponents[0] = stripTrailingDirectorySeparator(pathComponents[0]); } + if (pathComponents.length === 1 && !pathComponents[0]) { + return path.sep; + } + let wildcardRoot = ''; let firstComponent = true; @@ -949,3 +953,22 @@ export function getLibraryPathWithoutExtension(libraryFilePath: string) { return filePathWithoutExtension; } + +export function getDirectoryChangeKind( + fs: FileSystem, + oldDirectory: string, + newDirectory: string +): 'Same' | 'Renamed' | 'Moved' { + if (fs.realCasePath(oldDirectory) === fs.realCasePath(newDirectory)) { + return 'Same'; + } + + const relativePaths = getRelativePathComponentsFromDirectory(oldDirectory, newDirectory, (f) => fs.realCasePath(f)); + + // 3 means only last folder name has changed. + if (relativePaths.length === 3 && relativePaths[1] === '..' && relativePaths[2] !== '..') { + return 'Renamed'; + } + + return 'Moved'; +} diff --git a/packages/pyright-internal/src/common/pythonVersion.ts b/packages/pyright-internal/src/common/pythonVersion.ts index 304cecf36ffc..910608ae56a0 100644 --- a/packages/pyright-internal/src/common/pythonVersion.ts +++ b/packages/pyright-internal/src/common/pythonVersion.ts @@ -26,8 +26,7 @@ export enum PythonVersion { V3_11 = 0x030b, } -export const latestStablePythonVersion = PythonVersion.V3_9; -export const latestPythonVersion = PythonVersion.V3_9; +export const latestStablePythonVersion = PythonVersion.V3_10; export function versionToString(version: PythonVersion): string { const majorVersion = (version >> 8) & 0xff; diff --git a/packages/pyright-internal/src/common/realFileSystem.ts b/packages/pyright-internal/src/common/realFileSystem.ts index e54cf7bfc9b2..32f14d934a5c 100644 --- a/packages/pyright-internal/src/common/realFileSystem.ts +++ b/packages/pyright-internal/src/common/realFileSystem.ts @@ -101,6 +101,8 @@ function hasZipMagic(fs: FakeFS, p: PortablePath): boolean { } } +/* eslint-disable @typescript-eslint/naming-convention */ + // Patch fslib's ZipOpenFS to also consider .egg files to be .zip files. // // For now, override findZip (even though it's private), with the intent @@ -175,6 +177,8 @@ class EggZipOpenFS extends ZipOpenFS { } } +/* eslint-enable @typescript-eslint/naming-convention */ + class YarnFS extends PosixFS { private readonly _eggZipOpenFS: EggZipOpenFS; diff --git a/packages/pyright-internal/src/common/stringUtils.ts b/packages/pyright-internal/src/common/stringUtils.ts index 27d0f68ad4b1..0c0a4b6e280a 100644 --- a/packages/pyright-internal/src/common/stringUtils.ts +++ b/packages/pyright-internal/src/common/stringUtils.ts @@ -154,3 +154,8 @@ export function getCharacterCount(value: string, ch: string) { } return result; } + +export function getLastDottedString(text: string) { + const index = text.lastIndexOf('.'); + return index > 0 ? text.substring(index + 1) : text; +} diff --git a/packages/pyright-internal/src/common/textRange.ts b/packages/pyright-internal/src/common/textRange.ts index e0e148763ad2..c54bce87692d 100644 --- a/packages/pyright-internal/src/common/textRange.ts +++ b/packages/pyright-internal/src/common/textRange.ts @@ -49,6 +49,10 @@ export namespace TextRange { return position >= range.start && position <= getEnd(range); } + export function overlapsRange(range: TextRange, other: TextRange): boolean { + return overlaps(range, other.start) || overlaps(other, range.start); + } + export function extend(range: TextRange, extension: TextRange | TextRange[] | undefined) { if (extension) { if (Array.isArray(extension)) { @@ -87,11 +91,15 @@ export interface Position { character: number; } -namespace Position { +export namespace Position { export function is(value: any): value is Position { const candidate = value as Position; return candidate && candidate.line !== void 0 && candidate.character !== void 0; } + + export function print(value: Position): string { + return `(${value.line}:${value.character})`; + } } export interface Range { @@ -99,11 +107,15 @@ export interface Range { end: Position; } -namespace Range { +export namespace Range { export function is(value: any): value is Range { const candidate = value as Range; return candidate && candidate.start !== void 0 && candidate.end !== void 0; } + + export function print(value: Range): string { + return `${Position.print(value.start)}-${Position.print(value.end)}`; + } } // Represents a range within a particular document. @@ -158,8 +170,12 @@ export function doesRangeContain(range: Range, positionOrRange: Position | Range return doesRangeContain(range, positionOrRange.start) && doesRangeContain(range, positionOrRange.end); } +export function positionsAreEqual(a: Position, b: Position) { + return comparePositions(a, b) === 0; +} + export function rangesAreEqual(a: Range, b: Range) { - return comparePositions(a.start, b.start) === 0 && comparePositions(a.end, b.end) === 0; + return positionsAreEqual(a.start, b.start) && positionsAreEqual(a.end, b.end); } export function getEmptyRange(): Range { @@ -176,3 +192,34 @@ export function isEmptyPosition(pos: Position) { export function isEmptyRange(range: Range) { return isEmptyPosition(range.start) && isEmptyPosition(range.end); } + +export function extendRange(range: Range, extension: Range | Range[] | undefined) { + if (extension) { + if (Array.isArray(extension)) { + extension.forEach((r) => { + extendRange(range, r); + }); + } else { + if (comparePositions(extension.start, range.start) < 0) { + range.start = extension.start; + } + + if (comparePositions(extension.end, range.end) > 0) { + range.end = extension.end; + } + } + } +} + +export function combineRange(ranges: Range[]): Range | undefined { + if (ranges.length === 0) { + return undefined; + } + + const combinedRange = ranges[0]; + for (let i = 1; i < ranges.length; i++) { + extendRange(combinedRange, ranges[i]); + } + + return combinedRange; +} diff --git a/packages/pyright-internal/src/common/uriParser.ts b/packages/pyright-internal/src/common/uriParser.ts new file mode 100644 index 000000000000..e2e700a3f128 --- /dev/null +++ b/packages/pyright-internal/src/common/uriParser.ts @@ -0,0 +1,26 @@ +/* + * uriParser.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * URI utility functions. + */ + +import { Position } from 'vscode-languageserver'; +import { TextDocumentIdentifier } from 'vscode-languageserver-protocol'; + +import { FileSystem } from './fileSystem'; +import { convertUriToPath } from './pathUtils'; + +export class UriParser { + constructor(private _fs: FileSystem) {} + + public decodeTextDocumentPosition(textDocument: TextDocumentIdentifier, position: Position) { + const filePath = convertUriToPath(this._fs, textDocument.uri); + return { filePath, position }; + } + + public decodeTextDocumentUri(uriString: string) { + return convertUriToPath(this._fs, uriString); + } +} diff --git a/packages/pyright-internal/src/common/workspaceEditUtils.ts b/packages/pyright-internal/src/common/workspaceEditUtils.ts index a8a6f47be6d4..e8838631c063 100644 --- a/packages/pyright-internal/src/common/workspaceEditUtils.ts +++ b/packages/pyright-internal/src/common/workspaceEditUtils.ts @@ -6,10 +6,11 @@ * Convert Pyright's FileEditActions to LanguageServer's WorkspaceEdits. */ -import { WorkspaceEdit } from 'vscode-languageserver'; +import { ChangeAnnotation, TextDocumentEdit, WorkspaceEdit } from 'vscode-languageserver'; import { FileEditAction } from '../common/editAction'; import { convertPathToUri } from '../common/pathUtils'; +import { createMapFromItems } from './collectionUtils'; import { FileSystem } from './fileSystem'; export function convertWorkspaceEdits(fs: FileSystem, edits: FileEditAction[]) { @@ -25,3 +26,32 @@ export function convertWorkspaceEdits(fs: FileSystem, edits: FileEditAction[]) { return workspaceEdits; } + +export function convertWorkspaceDocumentEdits( + fs: FileSystem, + edits: FileEditAction[], + changeAnnotations?: { + [id: string]: ChangeAnnotation; + }, + defaultAnnotationId = 'default' +) { + const workspaceEdits: WorkspaceEdit = { + documentChanges: [], + changeAnnotations: changeAnnotations, + }; + + const mapPerFile = createMapFromItems(edits, (e) => e.filePath); + for (const [key, value] of mapPerFile) { + workspaceEdits.documentChanges!.push( + TextDocumentEdit.create({ uri: convertPathToUri(fs, key), version: null }, [ + ...value.map((v) => ({ + range: v.range, + newText: v.replacementText, + annotationId: defaultAnnotationId, + })), + ]) + ); + } + + return workspaceEdits; +} diff --git a/packages/pyright-internal/src/languageServerBase.ts b/packages/pyright-internal/src/languageServerBase.ts index c50aaf5dc4df..1d63cba10a06 100644 --- a/packages/pyright-internal/src/languageServerBase.ts +++ b/packages/pyright-internal/src/languageServerBase.ts @@ -12,6 +12,11 @@ import './common/extensions'; import { + CallHierarchyIncomingCallsParams, + CallHierarchyItem, + CallHierarchyOutgoingCall, + CallHierarchyOutgoingCallsParams, + CallHierarchyPrepareParams, CancellationToken, CancellationTokenSource, CodeAction, @@ -23,19 +28,37 @@ import { CompletionTriggerKind, ConfigurationItem, Connection, + Declaration, + DeclarationLink, + Definition, + DefinitionLink, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, + DidChangeConfigurationParams, + DidChangeTextDocumentParams, DidChangeWatchedFilesNotification, + DidChangeWatchedFilesParams, + DidCloseTextDocumentParams, + DidOpenTextDocumentParams, + DocumentHighlight, + DocumentHighlightParams, DocumentSymbol, + DocumentSymbolParams, ExecuteCommandParams, + HoverParams, InitializeParams, InitializeResult, Location, MarkupKind, ParameterInformation, + PublishDiagnosticsParams, + ReferenceParams, RemoteWindow, + RenameParams, + SignatureHelp, + SignatureHelpParams, SignatureHelpTriggerKind, SignatureInformation, SymbolInformation, @@ -45,7 +68,9 @@ import { WorkDoneProgressReporter, WorkspaceEdit, WorkspaceFolder, + WorkspaceSymbolParams, } from 'vscode-languageserver'; +import { attachWorkDone, ResultProgressReporter } from 'vscode-languageserver/lib/common/progress'; import { AnalysisResults } from './analyzer/analysis'; import { BackgroundAnalysisProgram } from './analyzer/backgroundAnalysisProgram'; @@ -66,20 +91,22 @@ import { ConsoleInterface, ConsoleWithLogLevel, LogLevel } from './common/consol import { createDeferred, Deferred } from './common/deferred'; import { Diagnostic as AnalyzerDiagnostic, DiagnosticCategory } from './common/diagnostic'; import { DiagnosticRule } from './common/diagnosticRules'; +import { FileDiagnostics } from './common/diagnosticSink'; import { LanguageServiceExtension } from './common/extensibility'; import { FileSystem, FileWatcherEventType, FileWatcherProvider } from './common/fileSystem'; import { Host } from './common/host'; -import { convertPathToUri, convertUriToPath } from './common/pathUtils'; +import { convertPathToUri } from './common/pathUtils'; import { ProgressReporter, ProgressReportTracker } from './common/progressReporter'; import { DocumentRange, Position } from './common/textRange'; +import { UriParser } from './common/uriParser'; import { convertWorkspaceEdits } from './common/workspaceEditUtils'; import { AnalyzerServiceExecutor } from './languageService/analyzerServiceExecutor'; -import { CompletionItemData, CompletionResults } from './languageService/completionProvider'; +import { CompletionItemData, CompletionOptions, CompletionResultsList } from './languageService/completionProvider'; import { DefinitionFilter } from './languageService/definitionProvider'; import { convertToFlatSymbols, WorkspaceSymbolCallback } from './languageService/documentSymbolProvider'; import { convertHoverResults } from './languageService/hoverProvider'; import { ReferenceCallback } from './languageService/referencesProvider'; -import { Localizer } from './localization/localize'; +import { Localizer, setLocaleOverride } from './localization/localize'; import { PyrightFileSystem } from './pyrightFileSystem'; import { WorkspaceMap } from './workspaceMap'; @@ -113,13 +140,24 @@ export interface WorkspaceServiceInstance { serviceInstance: AnalyzerService; disableLanguageServices: boolean; disableOrganizeImports: boolean; + disableWorkspaceSymbol?: boolean; isInitialized: Deferred; } +export interface MessageAction { + title: string; + id: string; +} + export interface WindowInterface { showErrorMessage(message: string): void; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise; + showWarningMessage(message: string): void; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise; + showInformationMessage(message: string): void; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise; } export interface LanguageServerInterface { @@ -128,11 +166,13 @@ export interface LanguageServerInterface { createBackgroundAnalysis(): BackgroundAnalysisBase | undefined; reanalyze(): void; restart(): void; + decodeTextDocumentUri(uriString: string): string; readonly rootPath: string; readonly console: ConsoleInterface; readonly window: WindowInterface; readonly fs: FileSystem; + readonly supportAdvancedEdits: boolean; } export interface ServerOptions { @@ -160,19 +200,37 @@ interface ClientCapabilities { hasHierarchicalDocumentSymbolCapability: boolean; hasWindowProgressCapability: boolean; hasGoToDeclarationCapability: boolean; + hasDocumentChangeCapability: boolean; + hasDocumentAnnotationCapability: boolean; hoverContentFormat: MarkupKind; completionDocFormat: MarkupKind; completionSupportsSnippet: boolean; signatureDocFormat: MarkupKind; + supportsDeprecatedDiagnosticTag: boolean; supportsUnnecessaryDiagnosticTag: boolean; completionItemResolveSupportsAdditionalTextEdits: boolean; } +const nullProgressReporter = attachWorkDone(undefined as any, undefined); + export abstract class LanguageServerBase implements LanguageServerInterface { protected _defaultClientConfig: any; protected _workspaceMap: WorkspaceMap; protected _fileWatcherProvider: FileWatcherProvider; + // We support running only one "find all reference" at a time. + private _pendingFindAllRefsCancellationSource: CancellationTokenSource | undefined; + + // We support running only one command at a time. + private _pendingCommandCancellationSource: CancellationTokenSource | undefined; + + private _progressReporter: ProgressReporter; + + private _lastTriggerKind: CompletionTriggerKind | undefined = CompletionTriggerKind.Invoked; + + // Global root path - the basis for all global settings. + rootPath = ''; + protected client: ClientCapabilities = { hasConfigurationCapability: false, hasVisualStudioExtensionsCapability: false, @@ -183,30 +241,22 @@ export abstract class LanguageServerBase implements LanguageServerInterface { hasHierarchicalDocumentSymbolCapability: false, hasWindowProgressCapability: false, hasGoToDeclarationCapability: false, + hasDocumentChangeCapability: false, + hasDocumentAnnotationCapability: false, hoverContentFormat: MarkupKind.PlainText, completionDocFormat: MarkupKind.PlainText, completionSupportsSnippet: false, signatureDocFormat: MarkupKind.PlainText, + supportsDeprecatedDiagnosticTag: false, supportsUnnecessaryDiagnosticTag: false, completionItemResolveSupportsAdditionalTextEdits: false, }; - // We support running only one "find all reference" at a time. - private _pendingFindAllRefsCancellationSource: CancellationTokenSource | undefined; - - // We support running only one command at a time. - private _pendingCommandCancellationSource: CancellationTokenSource | undefined; - - private _progressReporter: ProgressReporter; - - private _lastTriggerKind: CompletionTriggerKind | undefined = CompletionTriggerKind.Invoked; - - // Global root path - the basis for all global settings. - rootPath = ''; - // File system abstraction. fs: FileSystem; + protected _uriParser: UriParser; + constructor( protected _serverOptions: ServerOptions, protected _connection: Connection, @@ -226,7 +276,9 @@ export abstract class LanguageServerBase implements LanguageServerInterface { this._workspaceMap = this._serverOptions.workspaceMap; this._fileWatcherProvider = this._serverOptions.fileWatcherProvider; + this.fs = new PyrightFileSystem(this._serverOptions.fileSystem); + this._uriParser = new UriParser(this.fs); // Set the working directory to a known location within // the extension directory. Otherwise the execution of @@ -245,6 +297,11 @@ export abstract class LanguageServerBase implements LanguageServerInterface { this._connection.listen(); } + // Convert uri to path + decodeTextDocumentUri(uriString: string): string { + return this._uriParser.decodeTextDocumentUri(uriString); + } + abstract createBackgroundAnalysis(): BackgroundAnalysisBase | undefined; protected abstract executeCommand(params: ExecuteCommandParams, token: CancellationToken): Promise; @@ -329,6 +386,10 @@ export abstract class LanguageServerBase implements LanguageServerInterface { return this._connection.window; } + get supportAdvancedEdits(): boolean { + return this.client.hasDocumentChangeCapability && this.client.hasDocumentAnnotationCapability; + } + // Creates a service instance that's used for analyzing a // program within a workspace. createAnalyzerService(name: string): AnalyzerService { @@ -375,546 +436,733 @@ export abstract class LanguageServerBase implements LanguageServerInterface { // in the passed params the rootPath of the workspace plus the client capabilities. this._connection.onInitialize((params) => this.initialize(params, supportedCommands, supportedCodeActions)); - this._connection.onDidChangeConfiguration((params) => { - this.console.log(`Received updated settings`); - if (params?.settings) { - this._defaultClientConfig = params?.settings; - } - this.updateSettingsForAllWorkspaces(); - }); - - this._connection.onCodeAction((params, token) => this.executeCodeAction(params, token)); - - const getDefinitions = async ( - params: TextDocumentPositionParams, - token: CancellationToken, - filter: DefinitionFilter - ) => { - this.recordUserInteractionTime(); + this._connection.onInitialized(() => this.onInitialized()); - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + this._connection.onDidChangeConfiguration((params) => this.onDidChangeConfiguration(params)); - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return undefined; - } + this._connection.onCodeAction((params, token) => this.executeCodeAction(params, token)); - const locations = workspace.serviceInstance.getDefinitionForPosition(filePath, position, filter, token); - if (!locations) { - return undefined; - } - return locations - .filter((loc) => !this.fs.isInZipOrEgg(loc.path)) - .map((loc) => Location.create(convertPathToUri(this.fs, loc.path), loc.range)); - }; + this._connection.onDefinition(async (params, token) => this.onDefinition(params, token)); + this._connection.onDeclaration(async (params, token) => this.onDeclaration(params, token)); + this._connection.onTypeDefinition(async (params, token) => this.onTypeDefinition(params, token)); - this._connection.onDefinition((params, token) => - getDefinitions( - params, - token, - this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferSource : DefinitionFilter.All - ) + this._connection.onReferences(async (params, token, workDoneReporter, resultReporter) => + this.onReferences(params, token, workDoneReporter, resultReporter) ); - this._connection.onDeclaration((params, token) => - getDefinitions( - params, - token, - this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferStubs : DefinitionFilter.All - ) + this._connection.onDocumentSymbol(async (params, token) => this.onDocumentSymbol(params, token)); + this._connection.onWorkspaceSymbol(async (params, token, _, resultReporter) => + this.onWorkspaceSymbol(params, token, resultReporter) ); - this._connection.onReferences(async (params, token, workDoneReporter, resultReporter) => { - if (this._pendingFindAllRefsCancellationSource) { - this._pendingFindAllRefsCancellationSource.cancel(); - this._pendingFindAllRefsCancellationSource = undefined; - } - - // VS Code doesn't support cancellation of "final all references". - // We provide a progress bar a cancellation button so the user can cancel - // any long-running actions. - const progress = await this._getProgressReporter( - params.workDoneToken, - workDoneReporter, - Localizer.CodeAction.findingReferences() - ); - - const source = CancelAfter(token, progress.token); - this._pendingFindAllRefsCancellationSource = source; - - try { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - const position: Position = { - line: params.position.line, - character: params.position.character, - }; - - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return; - } - - const convert = (locs: DocumentRange[]): Location[] => { - return locs - .filter((loc) => !this.fs.isInZipOrEgg(loc.path)) - .map((loc) => Location.create(convertPathToUri(this.fs, loc.path), loc.range)); - }; - - const locations: Location[] = []; - const reporter: ReferenceCallback = resultReporter - ? (locs) => resultReporter.report(convert(locs)) - : (locs) => locations.push(...convert(locs)); - - workspace.serviceInstance.reportReferencesForPosition( - filePath, - position, - params.context.includeDeclaration, - reporter, - source.token - ); - - return locations; - } finally { - progress.reporter.done(); - source.dispose(); - } - }); + this._connection.onHover(async (params, token) => this.onHover(params, token)); - this._connection.onDocumentSymbol(async (params, token) => { - this.recordUserInteractionTime(); + this._connection.onDocumentHighlight(async (params, token) => this.onDocumentHighlight(params, token)); - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + this._connection.onSignatureHelp(async (params, token) => this.onSignatureHelp(params, token)); - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return undefined; - } + this._connection.onCompletion((params, token) => this.onCompletion(params, token)); - const symbolList: DocumentSymbol[] = []; - workspace.serviceInstance.addSymbolsForDocument(filePath, symbolList, token); - if (this.client.hasHierarchicalDocumentSymbolCapability) { - return symbolList; - } + this._connection.onCompletionResolve(async (params, token) => this.onCompletionResolve(params, token)); - return convertToFlatSymbols(params.textDocument.uri, symbolList); - }); + this._connection.onRenameRequest(async (params, token) => this.onRenameRequest(params, token)); - this._connection.onWorkspaceSymbol(async (params, token, _, resultReporter) => { - const symbolList: SymbolInformation[] = []; + const callHierarchy = this._connection.languages.callHierarchy; + callHierarchy.onPrepare(async (params, token) => this.onPrepare(params, token)); + callHierarchy.onIncomingCalls(async (params, token) => this.onIncomingCalls(params, token)); + callHierarchy.onOutgoingCalls(async (params, token) => this.onOutgoingCalls(params, token)); - const reporter: WorkspaceSymbolCallback = resultReporter - ? (symbols) => resultReporter.report(symbols) - : (symbols) => symbolList.push(...symbols); + this._connection.onDidOpenTextDocument(async (params) => this.onDidOpenTextDocument(params)); + this._connection.onDidChangeTextDocument(async (params) => this.onDidChangeTextDocument(params)); + this._connection.onDidCloseTextDocument(async (params) => this.onDidCloseTextDocument(params)); + this._connection.onDidChangeWatchedFiles((params) => this.onDidChangeWatchedFiles(params)); - for (const workspace of this._workspaceMap.values()) { - await workspace.isInitialized.promise; - if (!workspace.disableLanguageServices) { - workspace.serviceInstance.reportSymbolsForWorkspace(params.query, reporter, token); - } - } + this._connection.onExecuteCommand(async (params, token, reporter) => + this.onExecuteCommand(params, token, reporter) + ); + } - return symbolList; - }); + protected initialize( + params: InitializeParams, + supportedCommands: string[], + supportedCodeActions: string[] + ): InitializeResult { + if (params.locale) { + setLocaleOverride(params.locale); + } - this._connection.onHover(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + this.rootPath = params.rootPath || ''; - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + const capabilities = params.capabilities; + this.client.hasConfigurationCapability = !!capabilities.workspace?.configuration; + this.client.hasWatchFileCapability = !!capabilities.workspace?.didChangeWatchedFiles?.dynamicRegistration; + this.client.hasWorkspaceFoldersCapability = !!capabilities.workspace?.workspaceFolders; + this.client.hasVisualStudioExtensionsCapability = !!(capabilities as any).supportsVisualStudioExtensions; + this.client.hasActiveParameterCapability = + !!capabilities.textDocument?.signatureHelp?.signatureInformation?.activeParameterSupport; + this.client.hasSignatureLabelOffsetCapability = + !!capabilities.textDocument?.signatureHelp?.signatureInformation?.parameterInformation?.labelOffsetSupport; + this.client.hasHierarchicalDocumentSymbolCapability = + !!capabilities.textDocument?.documentSymbol?.hierarchicalDocumentSymbolSupport; + this.client.hasDocumentChangeCapability = + !!capabilities.workspace?.workspaceEdit?.documentChanges && + !!capabilities.workspace.workspaceEdit?.resourceOperations; + this.client.hasDocumentAnnotationCapability = !!capabilities.workspace?.workspaceEdit?.changeAnnotationSupport; - const workspace = await this.getWorkspaceForFile(filePath); - const hoverResults = workspace.serviceInstance.getHoverForPosition( - filePath, - position, - this.client.hoverContentFormat, - token + this.client.hoverContentFormat = this._getCompatibleMarkupKind(capabilities.textDocument?.hover?.contentFormat); + this.client.completionDocFormat = this._getCompatibleMarkupKind( + capabilities.textDocument?.completion?.completionItem?.documentationFormat + ); + this.client.completionSupportsSnippet = !!capabilities.textDocument?.completion?.completionItem?.snippetSupport; + this.client.signatureDocFormat = this._getCompatibleMarkupKind( + capabilities.textDocument?.signatureHelp?.signatureInformation?.documentationFormat + ); + const supportedDiagnosticTags = capabilities.textDocument?.publishDiagnostics?.tagSupport?.valueSet || []; + this.client.supportsUnnecessaryDiagnosticTag = supportedDiagnosticTags.some( + (tag) => tag === DiagnosticTag.Unnecessary + ); + this.client.supportsDeprecatedDiagnosticTag = supportedDiagnosticTags.some( + (tag) => tag === DiagnosticTag.Deprecated + ); + this.client.hasWindowProgressCapability = !!capabilities.window?.workDoneProgress; + this.client.hasGoToDeclarationCapability = !!capabilities.textDocument?.declaration; + this.client.completionItemResolveSupportsAdditionalTextEdits = + !!capabilities.textDocument?.completion?.completionItem?.resolveSupport?.properties.some( + (p) => p === 'additionalTextEdits' ); - return convertHoverResults(this.client.hoverContentFormat, hoverResults); - }); - this._connection.onDocumentHighlight(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + // Create a service instance for each of the workspace folders. + if (params.workspaceFolders) { + params.workspaceFolders.forEach((folder) => { + const path = this._uriParser.decodeTextDocumentUri(folder.uri); + this._workspaceMap.set(path, this.createWorkspaceServiceInstance(folder, path)); + }); + } else if (params.rootPath) { + this._workspaceMap.set(params.rootPath, this.createWorkspaceServiceInstance(undefined, params.rootPath)); + } + // Bug? Or do we need to send another event always? + this.updateSettingsForAllWorkspaces(); - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + const result: InitializeResult = { + capabilities: { + textDocumentSync: TextDocumentSyncKind.Incremental, + definitionProvider: { workDoneProgress: true }, + declarationProvider: { workDoneProgress: true }, + typeDefinitionProvider: { workDoneProgress: true }, + referencesProvider: { workDoneProgress: true }, + documentSymbolProvider: { workDoneProgress: true }, + workspaceSymbolProvider: { workDoneProgress: true }, + hoverProvider: { workDoneProgress: true }, + documentHighlightProvider: { workDoneProgress: true }, + renameProvider: { workDoneProgress: true }, + completionProvider: { + triggerCharacters: this.client.hasVisualStudioExtensionsCapability ? ['.', '[', '@'] : ['.', '['], + resolveProvider: true, + workDoneProgress: true, + }, + signatureHelpProvider: { + triggerCharacters: ['(', ',', ')'], + workDoneProgress: true, + }, + codeActionProvider: { + codeActionKinds: supportedCodeActions, + workDoneProgress: true, + }, + executeCommandProvider: { + commands: supportedCommands, + workDoneProgress: true, + }, + callHierarchyProvider: true, + }, + }; - const workspace = await this.getWorkspaceForFile(filePath); - return workspace.serviceInstance.getDocumentHighlight(filePath, position, token); - }); + return result; + } - this._connection.onSignatureHelp(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + protected onInitialized() { + if (this.client.hasWorkspaceFoldersCapability) { + this._connection.workspace.onDidChangeWorkspaceFolders((event) => { + event.removed.forEach((workspace) => { + const rootPath = this._uriParser.decodeTextDocumentUri(workspace.uri); + this._workspaceMap.delete(rootPath); + }); - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + event.added.forEach(async (workspace) => { + const rootPath = this._uriParser.decodeTextDocumentUri(workspace.uri); + const newWorkspace = this.createWorkspaceServiceInstance(workspace, rootPath); + this._workspaceMap.set(rootPath, newWorkspace); + await this.updateSettingsForWorkspace(newWorkspace); + }); + }); + } - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return; - } - const signatureHelpResults = workspace.serviceInstance.getSignatureHelpForPosition( - filePath, - position, - this.client.signatureDocFormat, - token - ); - if (!signatureHelpResults) { - return undefined; - } + // Set up our file watchers. + if (this.client.hasWatchFileCapability) { + this._connection.client.register(DidChangeWatchedFilesNotification.type, { + watchers: [ + ...configFileNames.map((fileName) => { + return { + globPattern: `**/${fileName}`, + kind: WatchKind.Create | WatchKind.Change | WatchKind.Delete, + }; + }), + { + globPattern: '**', + kind: WatchKind.Create | WatchKind.Change | WatchKind.Delete, + }, + ], + }); + } + } - const signatures = signatureHelpResults.signatures.map((sig) => { - let paramInfo: ParameterInformation[] = []; - if (sig.parameters) { - paramInfo = sig.parameters.map((param) => - ParameterInformation.create( - this.client.hasSignatureLabelOffsetCapability - ? [param.startOffset, param.endOffset] - : param.text, - param.documentation - ) - ); - } + protected onDidChangeConfiguration(params: DidChangeConfigurationParams) { + this.console.log(`Received updated settings`); + if (params?.settings) { + this._defaultClientConfig = params?.settings; + } + this.updateSettingsForAllWorkspaces(); + } - const sigInfo = SignatureInformation.create(sig.label, undefined, ...paramInfo); - if (sig.documentation !== undefined) { - sigInfo.documentation = sig.documentation; - } - if (sig.activeParameter !== undefined) { - sigInfo.activeParameter = sig.activeParameter; - } - return sigInfo; - }); + protected async onDefinition( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions( + params, + token, + this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferSource : DefinitionFilter.All, + (workspace, filePath, position, filter, token) => + workspace.serviceInstance.getDefinitionForPosition(filePath, position, filter, token) + ); + } - // A signature is active if it contains an active parameter, - // or if both the signature and its invocation have no parameters. - const isActive = (sig: SignatureInformation) => - sig.activeParameter !== undefined || - (!signatureHelpResults.callHasParameters && !sig.parameters?.length); + protected async onDeclaration( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions( + params, + token, + this.client.hasGoToDeclarationCapability ? DefinitionFilter.PreferStubs : DefinitionFilter.All, + (workspace, filePath, position, filter, token) => + workspace.serviceInstance.getDefinitionForPosition(filePath, position, filter, token) + ); + } - let activeSignature: number | null = signatures.findIndex(isActive); - if (activeSignature === -1) { - activeSignature = null; - } + protected async onTypeDefinition( + params: TextDocumentPositionParams, + token: CancellationToken + ): Promise { + return this.getDefinitions(params, token, DefinitionFilter.All, (workspace, filePath, position, _, token) => + workspace.serviceInstance.getTypeDefinitionForPosition(filePath, position, token) + ); + } - let activeParameter = activeSignature !== null ? signatures[activeSignature].activeParameter! : null; + protected async getDefinitions( + params: TextDocumentPositionParams, + token: CancellationToken, + filter: DefinitionFilter, + getDefinitionsFunc: ( + workspace: WorkspaceServiceInstance, + filePath: string, + position: Position, + filter: DefinitionFilter, + token: CancellationToken + ) => DocumentRange[] | undefined + ) { + this.recordUserInteractionTime(); - // Check if we should reuse the user's signature selection. If the retrigger was not "invoked" - // (i.e., the signature help call was automatically generated by the client due to some navigation - // or text change), check to see if the previous signature is still "active". If so, we mark it as - // active in our response. - // - // This isn't a perfect method. For nested calls, we can't tell when we are moving between them. - // Ideally, we would include a token in the signature help responses to compare later, allowing us - // to know when the user's navigated to a nested call (and therefore the old signature's info does - // not apply), but for now manually retriggering the signature help will work around the issue. - if (params.context?.isRetrigger && params.context.triggerKind !== SignatureHelpTriggerKind.Invoked) { - const prevActiveSignature = params.context.activeSignatureHelp?.activeSignature ?? null; - if (prevActiveSignature !== null && prevActiveSignature < signatures.length) { - const sig = signatures[prevActiveSignature]; - if (isActive(sig)) { - activeSignature = prevActiveSignature; - activeParameter = sig.activeParameter ?? null; - } - } - } + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); - if (this.client.hasActiveParameterCapability || activeSignature === null) { - // If there is no active parameter, then we want the client to not highlight anything. - // Unfortunately, the LSP spec says that "undefined" or "out of bounds" values should be - // treated as 0, which is the first parameter. That's not what we want, but thankfully - // VS Code (and potentially other clients) choose to handle out of bounds values by - // not highlighting them, which is what we want. - // - // The spec defines activeParameter as uinteger, so use the maximum length of any - // signature's parameter list to ensure that the value is always out of range. - // - // We always set this even if some signature has an active parameter, as this - // value is used as the fallback for signatures that don't explicitly specify an - // active parameter (and we use "undefined" to mean "no active parameter"). - // - // We could apply this hack to each individual signature such that they all specify - // activeParameter, but that would make it more difficult to determine which actually - // are active when comparing, and we already have to set this for clients which don't - // support per-signature activeParameter. - // - // See: - // - https://github.com/microsoft/language-server-protocol/issues/1271 - // - https://github.com/microsoft/pyright/pull/1783 - activeParameter = Math.max(...signatures.map((s) => s.parameters?.length ?? 0)); - } + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return undefined; + } - return { signatures, activeSignature, activeParameter }; - }); + const locations = getDefinitionsFunc(workspace, filePath, position, filter, token); + if (!locations) { + return undefined; + } + return locations + .filter((loc) => !this.fs.isInZipOrEgg(loc.path)) + .map((loc) => Location.create(convertPathToUri(this.fs, loc.path), loc.range)); + } - this._connection.onCompletion((params, token) => this.onCompletion(params, token)); + protected async onReferences( + params: ReferenceParams, + token: CancellationToken, + workDoneReporter: WorkDoneProgressReporter, + resultReporter: ResultProgressReporter | undefined + ): Promise { + if (this._pendingFindAllRefsCancellationSource) { + this._pendingFindAllRefsCancellationSource.cancel(); + this._pendingFindAllRefsCancellationSource = undefined; + } - this._connection.onCompletionResolve(async (params, token) => { - // Cancellation bugs in vscode and LSP: - // https://github.com/microsoft/vscode-languageserver-node/issues/615 - // https://github.com/microsoft/vscode/issues/95485 - // - // If resolver throws cancellation exception, LSP and VSCode - // cache that result and never call us back. - const completionItemData = params.data as CompletionItemData; - if (completionItemData && completionItemData.filePath) { - const workspace = await this.getWorkspaceForFile(completionItemData.workspacePath); - this.resolveWorkspaceCompletionItem(workspace, completionItemData.filePath, params, token); - } - return params; - }); + // VS Code doesn't support cancellation of "final all references". + // We provide a progress bar a cancellation button so the user can cancel + // any long-running actions. + const progress = await this._getProgressReporter( + workDoneReporter, + Localizer.CodeAction.findingReferences(), + token + ); - this._connection.onRenameRequest(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + const source = progress.source; + this._pendingFindAllRefsCancellationSource = source; - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + try { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition( + params.textDocument, + params.position + ); const workspace = await this.getWorkspaceForFile(filePath); if (workspace.disableLanguageServices) { return; } - const editActions = workspace.serviceInstance.renameSymbolAtPosition( + const convert = (locs: DocumentRange[]): Location[] => { + return locs + .filter((loc) => !this.fs.isInZipOrEgg(loc.path)) + .map((loc) => Location.create(convertPathToUri(this.fs, loc.path), loc.range)); + }; + + const locations: Location[] = []; + const reporter: ReferenceCallback = resultReporter + ? (locs) => resultReporter.report(convert(locs)) + : (locs) => locations.push(...convert(locs)); + + workspace.serviceInstance.reportReferencesForPosition( filePath, position, - params.newName, - workspace.rootPath === '', - token + params.context.includeDeclaration, + reporter, + source.token ); - if (!editActions) { - return undefined; - } + return locations; + } finally { + progress.reporter.done(); + source.dispose(); + } + } - return convertWorkspaceEdits(this.fs, editActions); - }); + protected async onDocumentSymbol( + params: DocumentSymbolParams, + token: CancellationToken + ): Promise { + this.recordUserInteractionTime(); - this._connection.languages.callHierarchy.onPrepare(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); - const position: Position = { - line: params.position.line, - character: params.position.character, - }; + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return undefined; + } - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return null; - } + const symbolList: DocumentSymbol[] = []; + workspace.serviceInstance.addSymbolsForDocument(filePath, symbolList, token); + if (this.client.hasHierarchicalDocumentSymbolCapability) { + return symbolList; + } - const callItem = workspace.serviceInstance.getCallForPosition(filePath, position, token) || null; - if (!callItem) { - return null; - } + return convertToFlatSymbols(params.textDocument.uri, symbolList); + } - if (this.fs.isInZipOrEgg(callItem.uri)) { - return null; + protected async onWorkspaceSymbol( + params: WorkspaceSymbolParams, + token: CancellationToken, + resultReporter: ResultProgressReporter | undefined + ): Promise { + const symbolList: SymbolInformation[] = []; + + const reporter: WorkspaceSymbolCallback = resultReporter + ? (symbols) => resultReporter.report(symbols) + : (symbols) => symbolList.push(...symbols); + + for (const workspace of this._workspaceMap.values()) { + await workspace.isInitialized.promise; + if (!workspace.disableLanguageServices && !workspace.disableWorkspaceSymbol) { + workspace.serviceInstance.reportSymbolsForWorkspace(params.query, reporter, token); } + } - // Convert the file path in the item to proper URI. - callItem.uri = convertPathToUri(this.fs, callItem.uri); + return symbolList; + } - return [callItem]; - }); + protected async onHover(params: HoverParams, token: CancellationToken) { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); - this._connection.languages.callHierarchy.onIncomingCalls(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.item.uri); + const workspace = await this.getWorkspaceForFile(filePath); + const hoverResults = workspace.serviceInstance.getHoverForPosition( + filePath, + position, + this.client.hoverContentFormat, + token + ); + return convertHoverResults(this.client.hoverContentFormat, hoverResults); + } - const position: Position = { - line: params.item.range.start.line, - character: params.item.range.start.character, - }; + protected async onDocumentHighlight( + params: DocumentHighlightParams, + token: CancellationToken + ): Promise { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); + const workspace = await this.getWorkspaceForFile(filePath); + return workspace.serviceInstance.getDocumentHighlight(filePath, position, token); + } - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return null; - } - - let callItems = workspace.serviceInstance.getIncomingCallsForPosition(filePath, position, token) || null; - if (!callItems || callItems.length === 0) { - return null; - } + protected async onSignatureHelp( + params: SignatureHelpParams, + token: CancellationToken + ): Promise { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); - callItems = callItems.filter((item) => !this.fs.isInZipOrEgg(item.from.uri)); + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return; + } + const signatureHelpResults = workspace.serviceInstance.getSignatureHelpForPosition( + filePath, + position, + this.client.signatureDocFormat, + token + ); + if (!signatureHelpResults) { + return undefined; + } - // Convert the file paths in the items to proper URIs. - callItems.forEach((item) => { - item.from.uri = convertPathToUri(this.fs, item.from.uri); - }); + const signatures = signatureHelpResults.signatures.map((sig) => { + let paramInfo: ParameterInformation[] = []; + if (sig.parameters) { + paramInfo = sig.parameters.map((param) => + ParameterInformation.create( + this.client.hasSignatureLabelOffsetCapability + ? [param.startOffset, param.endOffset] + : param.text, + param.documentation + ) + ); + } - return callItems; + const sigInfo = SignatureInformation.create(sig.label, undefined, ...paramInfo); + if (sig.documentation !== undefined) { + sigInfo.documentation = sig.documentation; + } + if (sig.activeParameter !== undefined) { + sigInfo.activeParameter = sig.activeParameter; + } + return sigInfo; }); - this._connection.languages.callHierarchy.onOutgoingCalls(async (params, token) => { - const filePath = convertUriToPath(this.fs, params.item.uri); + // A signature is active if it contains an active parameter, + // or if both the signature and its invocation have no parameters. + const isActive = (sig: SignatureInformation) => + sig.activeParameter !== undefined || (!signatureHelpResults.callHasParameters && !sig.parameters?.length); - const position: Position = { - line: params.item.range.start.line, - character: params.item.range.start.character, - }; + let activeSignature: number | null = signatures.findIndex(isActive); + if (activeSignature === -1) { + activeSignature = null; + } - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return null; + let activeParameter = activeSignature !== null ? signatures[activeSignature].activeParameter! : null; + + // Check if we should reuse the user's signature selection. If the retrigger was not "invoked" + // (i.e., the signature help call was automatically generated by the client due to some navigation + // or text change), check to see if the previous signature is still "active". If so, we mark it as + // active in our response. + // + // This isn't a perfect method. For nested calls, we can't tell when we are moving between them. + // Ideally, we would include a token in the signature help responses to compare later, allowing us + // to know when the user's navigated to a nested call (and therefore the old signature's info does + // not apply), but for now manually retriggering the signature help will work around the issue. + if (params.context?.isRetrigger && params.context.triggerKind !== SignatureHelpTriggerKind.Invoked) { + const prevActiveSignature = params.context.activeSignatureHelp?.activeSignature ?? null; + if (prevActiveSignature !== null && prevActiveSignature < signatures.length) { + const sig = signatures[prevActiveSignature]; + if (isActive(sig)) { + activeSignature = prevActiveSignature; + activeParameter = sig.activeParameter ?? null; + } } + } - let callItems = workspace.serviceInstance.getOutgoingCallsForPosition(filePath, position, token) || null; - if (!callItems || callItems.length === 0) { - return null; - } + if (this.client.hasActiveParameterCapability || activeSignature === null) { + // If there is no active parameter, then we want the client to not highlight anything. + // Unfortunately, the LSP spec says that "undefined" or "out of bounds" values should be + // treated as 0, which is the first parameter. That's not what we want, but thankfully + // VS Code (and potentially other clients) choose to handle out of bounds values by + // not highlighting them, which is what we want. + // + // The spec defines activeParameter as uinteger, so use the maximum length of any + // signature's parameter list to ensure that the value is always out of range. + // + // We always set this even if some signature has an active parameter, as this + // value is used as the fallback for signatures that don't explicitly specify an + // active parameter (and we use "undefined" to mean "no active parameter"). + // + // We could apply this hack to each individual signature such that they all specify + // activeParameter, but that would make it more difficult to determine which actually + // are active when comparing, and we already have to set this for clients which don't + // support per-signature activeParameter. + // + // See: + // - https://github.com/microsoft/language-server-protocol/issues/1271 + // - https://github.com/microsoft/pyright/pull/1783 + activeParameter = Math.max(...signatures.map((s) => s.parameters?.length ?? 0)); + } - callItems = callItems.filter((item) => !this.fs.isInZipOrEgg(item.to.uri)); + return { signatures, activeSignature, activeParameter }; + } - // Convert the file paths in the items to proper URIs. - callItems.forEach((item) => { - item.to.uri = convertPathToUri(this.fs, item.to.uri); - }); + protected async onCompletion( + params: CompletionParams, + token: CancellationToken + ): Promise { + // We set completion incomplete for the first invocation and next consecutive call, + // but after that we mark it as completed so the client doesn't repeatedly call back. + // We mark the first one as incomplete because completion could be invoked without + // any meaningful character provided, such as an explicit completion invocation (ctrl+space) + // or a period. That might cause us to not include some items (e.g., auto-imports). + // The next consecutive call provides some characters to help us to pick + // better completion items. After that, we are not going to introduce new items, + // so we can let the client to do the filtering and caching. + const completionIncomplete = + this._lastTriggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions || + params.context?.triggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions; - return callItems; - }); + this._lastTriggerKind = params.context?.triggerKind; - this._connection.onDidOpenTextDocument(async (params) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - if (!(this.fs as PyrightFileSystem).addUriMap(params.textDocument.uri, filePath)) { - // We do not support opening 1 file with 2 different uri. - return; - } + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); - const workspace = await this.getWorkspaceForFile(filePath); - workspace.serviceInstance.setFileOpened(filePath, params.textDocument.version, params.textDocument.text); - }); + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return; + } - this._connection.onDidChangeTextDocument(async (params) => { - this.recordUserInteractionTime(); + const completions = await this.getWorkspaceCompletionsForPosition( + workspace, + filePath, + position, + workspace.rootPath, + this.getCompletionOptions(params), + token + ); - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - if (!(this.fs as PyrightFileSystem).hasUriMapEntry(params.textDocument.uri, filePath)) { - // We do not support opening 1 file with 2 different uri. - return; - } + if (completions && completions.completionList) { + completions.completionList.isIncomplete = completionIncomplete; + } - const workspace = await this.getWorkspaceForFile(filePath); - workspace.serviceInstance.updateOpenFileContents( - filePath, - params.textDocument.version, - params.contentChanges - ); + return completions?.completionList; + } + + // Cancellation bugs in vscode and LSP: + // https://github.com/microsoft/vscode-languageserver-node/issues/615 + // https://github.com/microsoft/vscode/issues/95485 + // + // If resolver throws cancellation exception, LSP and VSCode + // cache that result and never call us back. + protected async onCompletionResolve(params: CompletionItem, token: CancellationToken): Promise { + const completionItemData = params.data as CompletionItemData; + if (completionItemData && completionItemData.filePath) { + const workspace = await this.getWorkspaceForFile(completionItemData.workspacePath); + this.resolveWorkspaceCompletionItem(workspace, completionItemData.filePath, params, token); + } + return params; + } + + protected async onRenameRequest( + params: RenameParams, + token: CancellationToken + ): Promise { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); + + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return; + } + + const editActions = workspace.serviceInstance.renameSymbolAtPosition( + filePath, + position, + params.newName, + workspace.rootPath === '', + token + ); + + if (!editActions) { + return undefined; + } + + return convertWorkspaceEdits(this.fs, editActions); + } + + protected async onPrepare( + params: CallHierarchyPrepareParams, + token: CancellationToken + ): Promise { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.textDocument, params.position); + + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return null; + } + + const callItem = workspace.serviceInstance.getCallForPosition(filePath, position, token) || null; + if (!callItem) { + return null; + } + + if (this.fs.isInZipOrEgg(callItem.uri)) { + return null; + } + + // Convert the file path in the item to proper URI. + callItem.uri = convertPathToUri(this.fs, callItem.uri); + + return [callItem]; + } + + protected async onIncomingCalls(params: CallHierarchyIncomingCallsParams, token: CancellationToken) { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.item, params.item.range.start); + + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return null; + } + + let callItems = workspace.serviceInstance.getIncomingCallsForPosition(filePath, position, token) || null; + if (!callItems || callItems.length === 0) { + return null; + } + + callItems = callItems.filter((item) => !this.fs.isInZipOrEgg(item.from.uri)); + + // Convert the file paths in the items to proper URIs. + callItems.forEach((item) => { + item.from.uri = convertPathToUri(this.fs, item.from.uri); }); - this._connection.onDidCloseTextDocument(async (params) => { - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - if (!(this.fs as PyrightFileSystem).removeUriMap(params.textDocument.uri, filePath)) { - // We do not support opening 1 file with 2 different uri. - return; - } + return callItems; + } - const workspace = await this.getWorkspaceForFile(filePath); - workspace.serviceInstance.setFileClosed(filePath); + protected async onOutgoingCalls( + params: CallHierarchyOutgoingCallsParams, + token: CancellationToken + ): Promise { + const { filePath, position } = this._uriParser.decodeTextDocumentPosition(params.item, params.item.range.start); + + const workspace = await this.getWorkspaceForFile(filePath); + if (workspace.disableLanguageServices) { + return null; + } + + let callItems = workspace.serviceInstance.getOutgoingCallsForPosition(filePath, position, token) || null; + if (!callItems || callItems.length === 0) { + return null; + } + + callItems = callItems.filter((item) => !this.fs.isInZipOrEgg(item.to.uri)); + + // Convert the file paths in the items to proper URIs. + callItems.forEach((item) => { + item.to.uri = convertPathToUri(this.fs, item.to.uri); }); - this._connection.onDidChangeWatchedFiles((params) => { - params.changes.forEach((change) => { - const filePath = convertUriToPath(this.fs, change.uri); - const eventType: FileWatcherEventType = change.type === 1 ? 'add' : 'change'; - this._fileWatcherProvider.onFileChange(eventType, filePath); - }); + return callItems; + } + + protected async onDidOpenTextDocument(params: DidOpenTextDocumentParams) { + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); + + if (!(this.fs as PyrightFileSystem).addUriMap(params.textDocument.uri, filePath)) { + // We do not support opening 1 file with 2 different uri. + return; + } + + const workspace = await this.getWorkspaceForFile(filePath); + workspace.serviceInstance.setFileOpened(filePath, params.textDocument.version, params.textDocument.text); + } + + protected async onDidChangeTextDocument(params: DidChangeTextDocumentParams) { + this.recordUserInteractionTime(); + + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); + if (!(this.fs as PyrightFileSystem).hasUriMapEntry(params.textDocument.uri, filePath)) { + // We do not support opening 1 file with 2 different uri. + return; + } + + const workspace = await this.getWorkspaceForFile(filePath); + workspace.serviceInstance.updateOpenFileContents(filePath, params.textDocument.version, params.contentChanges); + } + + protected async onDidCloseTextDocument(params: DidCloseTextDocumentParams) { + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); + if (!(this.fs as PyrightFileSystem).removeUriMap(params.textDocument.uri, filePath)) { + // We do not support opening 1 file with 2 different uri. + return; + } + + const workspace = await this.getWorkspaceForFile(filePath); + workspace.serviceInstance.setFileClosed(filePath); + } + + protected onDidChangeWatchedFiles(params: DidChangeWatchedFilesParams) { + params.changes.forEach((change) => { + const filePath = this._uriParser.decodeTextDocumentUri(change.uri); + const eventType: FileWatcherEventType = change.type === 1 ? 'add' : 'change'; + this._fileWatcherProvider.onFileChange(eventType, filePath); }); + } - this._connection.onInitialized(() => { - if (this.client.hasWorkspaceFoldersCapability) { - this._connection.workspace.onDidChangeWorkspaceFolders((event) => { - event.removed.forEach((workspace) => { - const rootPath = convertUriToPath(this.fs, workspace.uri); - this._workspaceMap.delete(rootPath); - }); - - event.added.forEach(async (workspace) => { - const rootPath = convertUriToPath(this.fs, workspace.uri); - const newWorkspace = this.createWorkspaceServiceInstance(workspace, rootPath); - this._workspaceMap.set(rootPath, newWorkspace); - await this.updateSettingsForWorkspace(newWorkspace); - }); - }); - } + protected async onExecuteCommand( + params: ExecuteCommandParams, + token: CancellationToken, + reporter: WorkDoneProgressReporter + ) { + // Cancel running command if there is one. + if (this._pendingCommandCancellationSource) { + this._pendingCommandCancellationSource.cancel(); + this._pendingCommandCancellationSource = undefined; + } - // Set up our file watchers. - if (this.client.hasWatchFileCapability) { - this._connection.client.register(DidChangeWatchedFilesNotification.type, { - watchers: [ - ...configFileNames.map((fileName) => { - return { - globPattern: `**/${fileName}`, - kind: WatchKind.Create | WatchKind.Change | WatchKind.Delete, - }; - }), - { - globPattern: '**', - kind: WatchKind.Create | WatchKind.Change | WatchKind.Delete, - }, - ], - }); + const executeCommand = async (token: CancellationToken) => { + const result = await this.executeCommand(params, token); + if (WorkspaceEdit.is(result)) { + // Tell client to apply edits. + // Do not await; the client isn't expecting a result. + this._connection.workspace.applyEdit({ label: `Command '${params.command}'`, edit: result }); } - }); - this._connection.onExecuteCommand(async (params, token, reporter) => { - // Cancel running command if there is one. - if (this._pendingCommandCancellationSource) { - this._pendingCommandCancellationSource.cancel(); - this._pendingCommandCancellationSource = undefined; + if (CommandResult.is(result)) { + // Tell client to apply edits. + // Await so that we return after the edit is complete. + await this._connection.workspace.applyEdit({ label: result.label, edit: result.edits }); } - const executeCommand = async (token: CancellationToken) => { - const result = await this.executeCommand(params, token); - if (WorkspaceEdit.is(result)) { - // Tell client to apply edits. - // Do not await; the client isn't expecting a result. - this._connection.workspace.applyEdit(result); - } + return result; + }; - if (CommandResult.is(result)) { - // Tell client to apply edits. - // Await so that we return after the edit is complete. - await this._connection.workspace.applyEdit(result.edits); - } + if (this.isLongRunningCommand(params.command)) { + // Create a progress dialog for long-running commands. + const progress = await this._getProgressReporter(reporter, Localizer.CodeAction.executingCommand(), token); - return result; - }; + const source = progress.source; + this._pendingCommandCancellationSource = source; - if (this.isLongRunningCommand(params.command)) { - // Create a progress dialog for long-running commands. - const progress = await this._getProgressReporter( - params.workDoneToken, - reporter, - Localizer.CodeAction.executingCommand() - ); - const source = CancelAfter(token, progress.token); - this._pendingCommandCancellationSource = source; - - try { - const result = await executeCommand(source.token); - return result; - } finally { - progress.reporter.done(); - source.dispose(); - } - } else { - const result = await executeCommand(token); + try { + const result = await executeCommand(source.token); return result; + } finally { + progress.reporter.done(); + source.dispose(); } - }); + } else { + const result = await executeCommand(token); + return result; + } } protected resolveWorkspaceCompletionItem( @@ -931,13 +1179,14 @@ export abstract class LanguageServerBase implements LanguageServerInterface { filePath: string, position: Position, workspacePath: string, + options: CompletionOptions, token: CancellationToken - ): Promise { + ): Promise { return workspace.serviceInstance.getCompletionsForPosition( filePath, position, workspacePath, - this.getCompletionOptions(), + options, undefined, token ); @@ -949,98 +1198,15 @@ export abstract class LanguageServerBase implements LanguageServerInterface { }); } - protected getCompletionOptions() { + protected getCompletionOptions(params?: CompletionParams) { return { format: this.client.completionDocFormat, snippet: this.client.completionSupportsSnippet, lazyEdit: this.client.completionItemResolveSupportsAdditionalTextEdits, + autoImport: true, }; } - protected initialize( - params: InitializeParams, - supportedCommands: string[], - supportedCodeActions: string[] - ): InitializeResult { - this.rootPath = params.rootPath || ''; - - const capabilities = params.capabilities; - this.client.hasConfigurationCapability = !!capabilities.workspace?.configuration; - this.client.hasWatchFileCapability = !!capabilities.workspace?.didChangeWatchedFiles?.dynamicRegistration; - this.client.hasWorkspaceFoldersCapability = !!capabilities.workspace?.workspaceFolders; - this.client.hasVisualStudioExtensionsCapability = !!(capabilities as any).supportsVisualStudioExtensions; - this.client.hasActiveParameterCapability = - !!capabilities.textDocument?.signatureHelp?.signatureInformation?.activeParameterSupport; - this.client.hasSignatureLabelOffsetCapability = - !!capabilities.textDocument?.signatureHelp?.signatureInformation?.parameterInformation?.labelOffsetSupport; - this.client.hasHierarchicalDocumentSymbolCapability = - !!capabilities.textDocument?.documentSymbol?.hierarchicalDocumentSymbolSupport; - this.client.hoverContentFormat = this._getCompatibleMarkupKind(capabilities.textDocument?.hover?.contentFormat); - this.client.completionDocFormat = this._getCompatibleMarkupKind( - capabilities.textDocument?.completion?.completionItem?.documentationFormat - ); - this.client.completionSupportsSnippet = !!capabilities.textDocument?.completion?.completionItem?.snippetSupport; - this.client.signatureDocFormat = this._getCompatibleMarkupKind( - capabilities.textDocument?.signatureHelp?.signatureInformation?.documentationFormat - ); - const supportedDiagnosticTags = capabilities.textDocument?.publishDiagnostics?.tagSupport?.valueSet || []; - this.client.supportsUnnecessaryDiagnosticTag = supportedDiagnosticTags.some( - (tag) => tag === DiagnosticTag.Unnecessary - ); - this.client.hasWindowProgressCapability = !!capabilities.window?.workDoneProgress; - this.client.hasGoToDeclarationCapability = !!capabilities.textDocument?.declaration; - this.client.completionItemResolveSupportsAdditionalTextEdits = - !!capabilities.textDocument?.completion?.completionItem?.resolveSupport?.properties.some( - (p) => p === 'additionalTextEdits' - ); - - // Create a service instance for each of the workspace folders. - if (params.workspaceFolders) { - params.workspaceFolders.forEach((folder) => { - const path = convertUriToPath(this.fs, folder.uri); - this._workspaceMap.set(path, this.createWorkspaceServiceInstance(folder, path)); - }); - } else if (params.rootPath) { - this._workspaceMap.set(params.rootPath, this.createWorkspaceServiceInstance(undefined, params.rootPath)); - } - // Bug? Or do we need to send another event always? - this.updateSettingsForAllWorkspaces(); - - const result: InitializeResult = { - capabilities: { - textDocumentSync: TextDocumentSyncKind.Incremental, - definitionProvider: { workDoneProgress: true }, - declarationProvider: { workDoneProgress: true }, - referencesProvider: { workDoneProgress: true }, - documentSymbolProvider: { workDoneProgress: true }, - workspaceSymbolProvider: { workDoneProgress: true }, - hoverProvider: { workDoneProgress: true }, - documentHighlightProvider: { workDoneProgress: true }, - renameProvider: { workDoneProgress: true }, - completionProvider: { - triggerCharacters: this.client.hasVisualStudioExtensionsCapability ? ['.', '[', '@'] : ['.', '['], - resolveProvider: true, - workDoneProgress: true, - }, - signatureHelpProvider: { - triggerCharacters: ['(', ',', ')'], - workDoneProgress: true, - }, - codeActionProvider: { - codeActionKinds: supportedCodeActions, - workDoneProgress: true, - }, - executeCommandProvider: { - commands: supportedCommands, - workDoneProgress: true, - }, - callHierarchyProvider: true, - }, - }; - - return result; - } - protected createWorkspaceServiceInstance( workspace: WorkspaceFolder | undefined, rootPath: string @@ -1052,10 +1218,21 @@ export abstract class LanguageServerBase implements LanguageServerInterface { serviceInstance: this.createAnalyzerService(workspace?.name ?? rootPath), disableLanguageServices: false, disableOrganizeImports: false, + disableWorkspaceSymbol: false, isInitialized: createDeferred(), }; } + protected convertDiagnostics(fileDiagnostics: FileDiagnostics): PublishDiagnosticsParams[] { + return [ + { + uri: convertPathToUri(this.fs, fileDiagnostics.filePath), + version: fileDiagnostics.version, + diagnostics: this._convertDiagnostics(fileDiagnostics.diagnostics), + }, + ]; + } + protected onAnalysisCompletedHandler(results: AnalysisResults): void { // Send the computed diagnostics to the client. results.diagnostics.forEach((fileDiag) => { @@ -1063,12 +1240,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface { return; } - this._connection.sendDiagnostics({ - uri: convertPathToUri(this.fs, fileDiag.filePath), - version: fileDiag.version, - diagnostics: this._convertDiagnostics(fileDiag.diagnostics), - }); - + this._sendDiagnostics(this.convertDiagnostics(fileDiag)); (this.fs as PyrightFileSystem).pendingRequest(fileDiag.filePath, fileDiag.diagnostics.length > 0); }); @@ -1122,50 +1294,6 @@ export abstract class LanguageServerBase implements LanguageServerInterface { AnalyzerServiceExecutor.runWithOptions(this.rootPath, workspace, serverSettings, typeStubTargetImportName); } - protected async onCompletion( - params: CompletionParams, - token: CancellationToken - ): Promise { - // We set completion incomplete for the first invocation and next consecutive call, - // but after that we mark it as completed so the client doesn't repeatedly call back. - // We mark the first one as incomplete because completion could be invoked without - // any meaningful character provided, such as an explicit completion invocation (ctrl+space) - // or a period. That might cause us to not include some items (e.g., auto-imports). - // The next consecutive call provides some characters to help us to pick - // better completion items. After that, we are not going to introduce new items, - // so we can let the client to do the filtering and caching. - const completionIncomplete = - this._lastTriggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions || - params.context?.triggerKind !== CompletionTriggerKind.TriggerForIncompleteCompletions; - - this._lastTriggerKind = params.context?.triggerKind; - - const filePath = convertUriToPath(this.fs, params.textDocument.uri); - const position: Position = { - line: params.position.line, - character: params.position.character, - }; - - const workspace = await this.getWorkspaceForFile(filePath); - if (workspace.disableLanguageServices) { - return; - } - - const completions = await this.getWorkspaceCompletionsForPosition( - workspace, - filePath, - position, - workspace.rootPath, - token - ); - - if (completions && completions.completionList) { - completions.completionList.isIncomplete = completionIncomplete; - } - - return completions?.completionList; - } - protected convertLogLevel(logLevelValue?: string): LogLevel { if (!logLevelValue) { return LogLevel.Info; @@ -1189,6 +1317,12 @@ export abstract class LanguageServerBase implements LanguageServerInterface { } } + private _sendDiagnostics(params: PublishDiagnosticsParams[]) { + for (const param of params) { + this._connection.sendDiagnostics(param); + } + } + private _getCompatibleMarkupKind(clientSupportedFormats: MarkupKind[] | undefined) { const serverSupportedFormats = [MarkupKind.PlainText, MarkupKind.Markdown]; @@ -1201,13 +1335,13 @@ export abstract class LanguageServerBase implements LanguageServerInterface { return MarkupKind.PlainText; } - private async _getProgressReporter( - workDoneToken: string | number | undefined, - clientReporter: WorkDoneProgressReporter, - title: string - ) { - if (workDoneToken) { - return { reporter: clientReporter, token: CancellationToken.None }; + private async _getProgressReporter(reporter: WorkDoneProgressReporter, title: string, token: CancellationToken) { + // This is a bit ugly, but we need to determine whether the provided reporter + // is an actual client-side progress reporter or a dummy (null) progress reporter + // created by the LSP library. If it's the latter, we'll create a server-initiated + // progress reporter. + if (reporter.constructor !== nullProgressReporter.constructor) { + return { reporter: reporter, source: CancelAfter(token) }; } const serverInitiatedReporter = await this._connection.window.createWorkDoneProgress(); @@ -1215,7 +1349,7 @@ export abstract class LanguageServerBase implements LanguageServerInterface { return { reporter: serverInitiatedReporter, - token: serverInitiatedReporter.token, + source: CancelAfter(token, serverInitiatedReporter.token), }; } @@ -1235,6 +1369,14 @@ export abstract class LanguageServerBase implements LanguageServerInterface { if (!this.client.supportsUnnecessaryDiagnosticTag) { return; } + } else if (diag.category === DiagnosticCategory.Deprecated) { + vsDiag.tags = [DiagnosticTag.Deprecated]; + vsDiag.severity = DiagnosticSeverity.Hint; + + // If the client doesn't support "deprecated" tags, don't report. + if (!this.client.supportsDeprecatedDiagnosticTag) { + return; + } } if (rule) { @@ -1265,11 +1407,15 @@ export abstract class LanguageServerBase implements LanguageServerInterface { switch (category) { case DiagnosticCategory.Error: return DiagnosticSeverity.Error; + case DiagnosticCategory.Warning: return DiagnosticSeverity.Warning; + case DiagnosticCategory.Information: return DiagnosticSeverity.Information; + case DiagnosticCategory.UnusedCode: + case DiagnosticCategory.Deprecated: return DiagnosticSeverity.Hint; } } @@ -1307,7 +1453,12 @@ export abstract class LanguageServerBase implements LanguageServerInterface { if (trimmedName === 'env:HOME' && process.env.HOME !== undefined) { return process.env.HOME; } - + if (trimmedName === 'env:USERNAME' && process.env.USERNAME !== undefined) { + return process.env.USERNAME; + } + if (trimmedName === 'env:VIRTUAL_ENV' && process.env.VIRTUAL_ENV !== undefined) { + return process.env.VIRTUAL_ENV; + } return match; }); } diff --git a/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts b/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts index 5eeac42b923b..3f2926fb0c21 100644 --- a/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts +++ b/packages/pyright-internal/src/languageService/analyzerServiceExecutor.ts @@ -8,9 +8,14 @@ * with a specified set of options. */ import { isPythonBinary } from '../analyzer/pythonPathUtils'; +import { AnalyzerService } from '../analyzer/service'; +import type { BackgroundAnalysis } from '../backgroundAnalysis'; import { CommandLineOptions } from '../common/commandLineOptions'; +import { LogLevel } from '../common/console'; +import { createDeferred } from '../common/deferred'; +import { FileSystem } from '../common/fileSystem'; import { combinePaths } from '../common/pathUtils'; -import { ServerSettings, WorkspaceServiceInstance } from '../languageServerBase'; +import { LanguageServerInterface, ServerSettings, WorkspaceServiceInstance } from '../languageServerBase'; export class AnalyzerServiceExecutor { static runWithOptions( @@ -29,7 +34,37 @@ export class AnalyzerServiceExecutor { ); // Setting options causes the analyzer service to re-analyze everything. - workspace.serviceInstance.setOptions(commandLineOptions, trackFiles); + workspace.serviceInstance.setOptions(commandLineOptions); + } + + static async cloneService( + ls: LanguageServerInterface, + workspace: WorkspaceServiceInstance, + typeStubTargetImportName?: string, + backgroundAnalysis?: BackgroundAnalysis, + fileSystem?: FileSystem + ): Promise { + // Allocate a temporary pseudo-workspace to perform this job. + const tempWorkspace: WorkspaceServiceInstance = { + workspaceName: `temp workspace for cloned service`, + rootPath: workspace.rootPath, + rootUri: workspace.rootUri, + serviceInstance: workspace.serviceInstance.clone('cloned service', backgroundAnalysis, fileSystem), + disableLanguageServices: true, + disableOrganizeImports: true, + isInitialized: createDeferred(), + }; + + const serverSettings = await ls.getSettings(workspace); + AnalyzerServiceExecutor.runWithOptions( + ls.rootPath, + tempWorkspace, + serverSettings, + typeStubTargetImportName, + /* trackFiles */ false + ); + + return tempWorkspace.serviceInstance; } } @@ -48,6 +83,7 @@ function getEffectiveCommandLineOptions( commandLineOptions.indexing = serverSettings.indexing; commandLineOptions.logTypeEvaluationTime = serverSettings.logTypeEvaluationTime ?? false; commandLineOptions.typeEvaluationTimeThreshold = serverSettings.typeEvaluationTimeThreshold ?? 50; + commandLineOptions.enableAmbientAnalysis = trackFiles; if (!trackFiles) { commandLineOptions.watchForSourceChanges = false; @@ -89,6 +125,12 @@ function getEffectiveCommandLineOptions( commandLineOptions.stubPath = serverSettings.stubPath; } + if (serverSettings.logLevel === LogLevel.Log) { + // When logLevel is "Trace", turn on verboseOutput as well + // so we can get detailed log from analysis service. + commandLineOptions.verboseOutput = true; + } + if (typeStubTargetImportName) { commandLineOptions.typeStubTargetImportName = typeStubTargetImportName; } diff --git a/packages/pyright-internal/src/languageService/autoImporter.ts b/packages/pyright-internal/src/languageService/autoImporter.ts index bb5fdfb0ecfb..3c5923ab9d48 100644 --- a/packages/pyright-internal/src/languageService/autoImporter.ts +++ b/packages/pyright-internal/src/languageService/autoImporter.ts @@ -12,6 +12,7 @@ import { ImportResolver, ModuleNameAndType } from '../analyzer/importResolver'; import { ImportType } from '../analyzer/importResult'; import { getImportGroup, + getImportGroupFromModuleNameAndType, getTextEditsForAutoImportInsertion, getTextEditsForAutoImportSymbolAddition, getTopLevelImports, @@ -30,6 +31,7 @@ import { Position } from '../common/textRange'; import { Duration } from '../common/timing'; import { ParseNodeType } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; +import { CompletionMap } from './completionProvider'; import { IndexAliasData, IndexResults } from './documentSymbolProvider'; export interface AutoImportSymbol { @@ -185,7 +187,7 @@ export class AutoImporter { private _importResolver: ImportResolver, private _parseResults: ParseResults, private _invocationPosition: Position, - private _excludes: Set, + private readonly _excludes: CompletionMap, private _moduleSymbolMap: ModuleSymbolMap, private _options: AutoImportOptions ) { @@ -599,7 +601,7 @@ export class AutoImporter { const moduleNameAndType = this._getModuleNameAndTypeFromFilePath(filePath); return [ moduleNameAndType.moduleName, - this._getImportGroupFromModuleNameAndType(moduleNameAndType), + getImportGroupFromModuleNameAndType(moduleNameAndType), moduleNameAndType, ]; } @@ -656,8 +658,11 @@ export class AutoImporter { return this._options.patternMatcher(word, name); } + private _shouldExclude(name: string) { + return this._excludes.has(name, CompletionMap.labelOnlyIgnoringAutoImports); + } private _containsName(name: string, source: string | undefined, results: AutoImportResultMap) { - if (this._excludes.has(name)) { + if (this._shouldExclude(name)) { return true; } @@ -676,17 +681,6 @@ export class AutoImporter { return this._importResolver.getModuleNameForImport(filePath, this._execEnvironment); } - private _getImportGroupFromModuleNameAndType(moduleNameAndType: ModuleNameAndType): ImportGroup { - let importGroup = ImportGroup.Local; - if (moduleNameAndType.isLocalTypingsFile || moduleNameAndType.importType === ImportType.ThirdParty) { - importGroup = ImportGroup.ThirdParty; - } else if (moduleNameAndType.importType === ImportType.BuiltIn) { - importGroup = ImportGroup.BuiltIn; - } - - return importGroup; - } - private _getTextEditsForAutoImportByFilePath( moduleName: string, importName: string | undefined, @@ -721,7 +715,11 @@ export class AutoImporter { } // Does an 'import from' statement already exist? - if (importName && importStatement.node.nodeType === ParseNodeType.ImportFrom) { + if ( + importName && + importStatement.node.nodeType === ParseNodeType.ImportFrom && + !importStatement.node.isWildcardImport + ) { // If so, see whether what we want already exist. const importNode = importStatement.node.imports.find((i) => i.name.value === importName); if (importNode) { @@ -743,10 +741,9 @@ export class AutoImporter { edits: this._options.lazyEdit ? undefined : getTextEditsForAutoImportSymbolAddition( - importName, + { name: importName, alias: abbrFromUsers }, importStatement, - this._parseResults, - abbrFromUsers + this._parseResults ), }; } @@ -755,7 +752,7 @@ export class AutoImporter { // If it is the module itself that got imported, make sure we don't import it again. // ex) from module import submodule const imported = this._importStatements.orderedImports.find((i) => i.moduleName === moduleName); - if (imported && imported.node.nodeType === ParseNodeType.ImportFrom) { + if (imported && imported.node.nodeType === ParseNodeType.ImportFrom && !imported.node.isWildcardImport) { const importFrom = imported.node.imports.find((i) => i.name.value === importName); if (importFrom) { // For now, we don't check whether alias or moduleName got overwritten at @@ -774,10 +771,9 @@ export class AutoImporter { edits: this._options.lazyEdit ? undefined : getTextEditsForAutoImportSymbolAddition( - importName, + { name: importName, alias: abbrFromUsers }, imported, - this._parseResults, - abbrFromUsers + this._parseResults ), }; } @@ -801,13 +797,12 @@ export class AutoImporter { edits: this._options.lazyEdit ? undefined : getTextEditsForAutoImportInsertion( - importName, + { name: importName, alias: abbrFromUsers }, this._importStatements, moduleName, importGroup, this._parseResults, - this._invocationPosition, - abbrFromUsers + this._invocationPosition ), }; } diff --git a/packages/pyright-internal/src/languageService/codeActionProvider.ts b/packages/pyright-internal/src/languageService/codeActionProvider.ts index b76f953866e0..7d6979775fa8 100644 --- a/packages/pyright-internal/src/languageService/codeActionProvider.ts +++ b/packages/pyright-internal/src/languageService/codeActionProvider.ts @@ -11,6 +11,7 @@ import { CancellationToken, CodeAction, CodeActionKind, Command } from 'vscode-l import { Commands } from '../commands/commands'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { AddMissingOptionalToParamAction, CreateTypeStubFileAction } from '../common/diagnostic'; +import { convertPathToUri } from '../common/pathUtils'; import { Range } from '../common/textRange'; import { WorkspaceServiceInstance } from '../languageServerBase'; import { Localizer } from '../localization/localize'; @@ -63,11 +64,13 @@ export class CodeActionProvider { .getActions()! .find((a) => a.action === Commands.addMissingOptionalToParam) as AddMissingOptionalToParamAction; if (action) { + const fs = workspace.serviceInstance.getImportResolver().fileSystem; const addMissingOptionalAction = CodeAction.create( Localizer.CodeAction.addOptionalToAnnotation(), Command.create( Localizer.CodeAction.addOptionalToAnnotation(), Commands.addMissingOptionalToParam, + convertPathToUri(fs, filePath), action.offsetOfTypeNode ), CodeActionKind.QuickFix diff --git a/packages/pyright-internal/src/languageService/completionProvider.ts b/packages/pyright-internal/src/languageService/completionProvider.ts index 0ae2048a8b62..605ba5a5e0a0 100644 --- a/packages/pyright-internal/src/languageService/completionProvider.ts +++ b/packages/pyright-internal/src/languageService/completionProvider.ts @@ -26,6 +26,7 @@ import { DeclarationType, FunctionDeclaration, isFunctionDeclaration, + isIntrinsicDeclaration, VariableDeclaration, } from '../analyzer/declaration'; import { isDefinedInFile } from '../analyzer/declarationUtils'; @@ -33,6 +34,7 @@ import { convertDocStringToMarkdown, convertDocStringToPlainText } from '../anal import { ImportedModuleDescriptor, ImportResolver } from '../analyzer/importResolver'; import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; import { getCallNodeAndActiveParameterIndex } from '../analyzer/parseTreeUtils'; +import { getScopeForNode } from '../analyzer/scopeUtils'; import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; import { Symbol, SymbolTable } from '../analyzer/symbol'; import * as SymbolNameUtils from '../analyzer/symbolNameUtils'; @@ -41,7 +43,6 @@ import { getTypedDictMembersForClass } from '../analyzer/typedDicts'; import { getClassDocString, getModuleDocString, - getOverloadedFunctionDocStringsInherited, getPropertyDocStringInherited, getVariableDocString, } from '../analyzer/typeDocStringUtils'; @@ -56,11 +57,10 @@ import { isFunction, isInstantiableClass, isModule, - isNone, + isNoneInstance, isOverloadedFunction, isUnbound, isUnknown, - OverloadedFunctionType, Type, TypeBase, UnknownType, @@ -71,7 +71,9 @@ import { getMembersForClass, getMembersForModule, isLiteralType, + isLiteralTypeOrUnion, isProperty, + lookUpObjectMember, } from '../analyzer/typeUtils'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { ConfigOptions, ExecutionEnvironment } from '../common/configOptions'; @@ -89,6 +91,7 @@ import { ArgumentCategory, DecoratorNode, DictionaryKeyEntryNode, + DictionaryNode, ErrorExpressionCategory, ErrorNode, ExpressionNode, @@ -101,14 +104,20 @@ import { ParameterNode, ParseNode, ParseNodeType, + SetNode, StringNode, } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; -import { StringTokenFlags, Token } from '../parser/tokenizerTypes'; +import { StringToken, StringTokenFlags, Token, TokenType } from '../parser/tokenizerTypes'; import { AbbreviationInfo, AutoImporter, AutoImportResult, ModuleSymbolMap } from './autoImporter'; import { DocumentSymbolCollector } from './documentSymbolCollector'; import { IndexResults } from './documentSymbolProvider'; -import { getAutoImportText, getFunctionDocStringFromType, getOverloadedFunctionTooltip } from './tooltipUtils'; +import { + getAutoImportText, + getFunctionDocStringFromType, + getOverloadedFunctionDocStringsFromType, + getOverloadedFunctionTooltip, +} from './tooltipUtils'; namespace Keywords { const base: string[] = [ @@ -146,6 +155,7 @@ namespace Keywords { 'return', 'try', 'while', + 'with', ]; const python3_5: string[] = [...base, 'async', 'await']; @@ -191,6 +201,9 @@ enum SortCategory { // A keyword in the python syntax. Keyword, + // An enum member. + EnumMember, + // A normal symbol. NormalSymbol, @@ -247,17 +260,23 @@ export interface ExtensionInfo { totalTimeInMS: number; } -export interface CompletionResults { - completionList: CompletionList | undefined; +interface CompletionResultsBase { memberAccessInfo?: MemberAccessInfo | undefined; autoImportInfo?: AutoImportInfo | undefined; extensionInfo?: ExtensionInfo | undefined; } +export interface CompletionResultsList extends CompletionResultsBase { + completionList: CompletionList | undefined; +} +export interface CompletionResults extends CompletionResultsBase { + completionMap: CompletionMap | undefined; +} export interface CompletionOptions { format: MarkupKind; snippet: boolean; lazyEdit: boolean; + autoImport: boolean; } export type AbbreviationMap = Map; @@ -283,7 +302,7 @@ interface SymbolDetail { funcParensDisabled?: boolean | undefined; autoImportSource?: string | undefined; autoImportAlias?: string | undefined; - boundObject?: ClassType | undefined; + boundObjectOrClass?: ClassType | undefined; edits?: Edits | undefined; } @@ -316,6 +335,9 @@ export class CompletionProvider { private _execEnv: ExecutionEnvironment; + // Indicate whether invocation is inside of string literal. + private _insideStringLiteral = false; + constructor( private _workspacePath: string, private _parseResults: ParseResults, @@ -328,7 +350,7 @@ export class CompletionProvider { private _evaluator: TypeEvaluator, private _options: CompletionOptions, private _sourceMapper: SourceMapper, - private _autoImportMaps: AutoImportMaps | undefined, + private _autoImportMaps: AutoImportMaps, private _cancellationToken: CancellationToken ) { this._execEnv = this._configOptions.findExecEnvironment(this._filePath); @@ -340,6 +362,15 @@ export class CompletionProvider { return undefined; } + const token = ParseTreeUtils.getTokenOverlapping(this._parseResults.tokenizerOutput.tokens, offset); + if (token?.type === TokenType.String) { + this._insideStringLiteral = TextRange.contains(token, offset) + ? true + : (token as StringToken).flags & StringTokenFlags.Unterminated + ? true + : false; + } + let node = ParseTreeUtils.findNodeByOffset(this._parseResults.parseTree, offset); // See if we can get to a "better" node by backing up a few columns. @@ -434,6 +465,37 @@ export class CompletionProvider { return this._getMemberAccessCompletions(curNode.leftExpression, priorWord); } + if (curNode.nodeType === ParseNodeType.Dictionary) { + const completionMap = new CompletionMap(); + if (this._addTypedDictKeys(curNode, /* stringNode */ undefined, priorText, postText, completionMap)) { + return { completionMap }; + } + } + + const dictionaryEntry = ParseTreeUtils.getFirstAncestorOrSelfOfKind( + curNode, + ParseNodeType.DictionaryKeyEntry + ); + if (dictionaryEntry) { + if (dictionaryEntry.parent?.nodeType === ParseNodeType.Dictionary) { + const dictionaryNode = dictionaryEntry.parent; + if (dictionaryNode.trailingCommaToken && dictionaryNode.trailingCommaToken.start < offset) { + const completionMap = new CompletionMap(); + if ( + this._addTypedDictKeys( + dictionaryNode, + /* stringNode */ undefined, + priorText, + postText, + completionMap + ) + ) { + return { completionMap }; + } + } + } + } + if (curNode.nodeType === ParseNodeType.Name) { // This condition is little different than others since it does its own // tree walk up to find context and let outer tree walk up to proceed if it can't find @@ -547,8 +609,8 @@ export class CompletionProvider { // cached, so it's not as bad as it might seem. this.getCompletionsForPosition(); } else if (!completionItem.additionalTextEdits) { - const completionList = CompletionList.create(); - const completionResults = { completionList }; + const completionMap = new CompletionMap(); + const completionResults = { completionMap }; this._addAutoImportCompletions( completionItemData.symbolLabel, @@ -705,11 +767,11 @@ export class CompletionProvider { // to determine its type and offer suggestions based on it. switch (node.category) { case ErrorExpressionCategory.MissingIn: { - return this._createSingleKeywordCompletionList('in'); + return this._createSingleKeywordCompletion('in'); } case ErrorExpressionCategory.MissingElse: { - return this._createSingleKeywordCompletionList('else'); + return this._createSingleKeywordCompletion('else'); } case ErrorExpressionCategory.MissingExpression: @@ -720,7 +782,7 @@ export class CompletionProvider { case ErrorExpressionCategory.MissingIndexOrSlice: { let completionResults = this._getLiteralCompletions(node, priorWord, priorText, postText); - if (!completionResults || !completionResults.completionList) { + if (!completionResults || !completionResults.completionMap) { completionResults = this._getExpressionCompletions(node, priorWord, priorText, postText); } @@ -759,12 +821,13 @@ export class CompletionProvider { return node.expression.nodeType === ParseNodeType.Name && node.expression.value === value; } - private _createSingleKeywordCompletionList(keyword: string): CompletionResults { + private _createSingleKeywordCompletion(keyword: string): CompletionResults { const completionItem = CompletionItem.create(keyword); completionItem.kind = CompletionItemKind.Keyword; completionItem.sortText = this._makeSortText(SortCategory.LikelyKeyword, keyword); - const completionList = CompletionList.create([completionItem]); - return { completionList }; + const completionMap = new CompletionMap(); + completionMap.set(completionItem); + return { completionMap }; } private _getMethodOverloadsCompletions(priorWord: string, partialName: NameNode): CompletionResults | undefined { @@ -774,7 +837,7 @@ export class CompletionProvider { } const funcParensDisabled = partialName.parent?.nodeType === ParseNodeType.Function ? true : undefined; - const completionList = CompletionList.create(); + const completionMap = new CompletionMap(); const enclosingFunc = ParseTreeUtils.getEnclosingFunction(partialName); symbolTable.forEach((symbol, name) => { @@ -796,14 +859,14 @@ export class CompletionProvider { if (StringUtils.isPatternInSymbol(partialName.value, name)) { const textEdit = this._createReplaceEdits(priorWord, partialName, decl.node.name.value); - this._addSymbol(name, symbol, partialName.value, completionList, { + this._addSymbol(name, symbol, partialName.value, completionMap, { funcParensDisabled, edits: { textEdit }, }); } }); - return { completionList }; + return { completionMap }; function getSymbolTable(evaluator: TypeEvaluator, partialName: NameNode) { const enclosingClass = ParseTreeUtils.getEnclosingClass(partialName, false); @@ -860,7 +923,7 @@ export class CompletionProvider { const staticmethod = decorators?.some((d) => this._checkDecorator(d, 'staticmethod')) ?? false; const classmethod = decorators?.some((d) => this._checkDecorator(d, 'classmethod')) ?? false; - const completionList = CompletionList.create(); + const completionMap = new CompletionMap(); symbolTable.forEach((symbol, name) => { let decl = getLastTypedDeclaredForSymbol(symbol); @@ -922,7 +985,7 @@ export class CompletionProvider { const textEdit = this._createReplaceEdits(priorWord, partialName, text); - this._addSymbol(name, symbol, partialName.value, completionList, { + this._addSymbol(name, symbol, partialName.value, completionMap, { // method signature already contains () funcParensDisabled: true, edits: { @@ -934,7 +997,7 @@ export class CompletionProvider { } }); - return { completionList }; + return { completionMap }; } private _createReplaceEdits(priorWord: string, node: ParseNode | undefined, text: string) { @@ -1019,14 +1082,19 @@ export class CompletionProvider { case ParseNodeType.Number: case ParseNodeType.Constant: return true; + case ParseNodeType.String: return (node.token.flags & StringTokenFlags.Format) === 0; + case ParseNodeType.StringList: return node.strings.every(isSimpleDefault); + case ParseNodeType.UnaryOperation: return isSimpleDefault(node.expression); + case ParseNodeType.BinaryOperation: return isSimpleDefault(node.leftExpression) && isSimpleDefault(node.rightExpression); + default: return false; } @@ -1055,7 +1123,7 @@ export class CompletionProvider { return sb; } - const parameters = getParameters(); + const parameters = getParameters(isStaticMethod ? decl.node.parameters : decl.node.parameters.slice(1)); if (decl.node.name.value !== '__init__') { sb += 'return '; } @@ -1070,25 +1138,34 @@ export class CompletionProvider { return sb + `super().${decl.node.name.value}(${parameters.map(convertToString).join(', ')})`; - function getParameters() { - if (isStaticMethod) { - return decl.node.parameters.filter((p) => p.name); + function getParameters(parameters: ParameterNode[]) { + const results: [node: ParameterNode, keywordOnly: boolean][] = []; + + let keywordOnly = false; + for (const parameter of parameters) { + if (parameter.name) { + results.push([parameter, keywordOnly]); + } + + keywordOnly = + parameter.category === ParameterCategory.VarArgList || + parameter.category === ParameterCategory.VarArgDictionary; } - return decl.node.parameters.slice(1).filter((p) => p.name); + return results; } - function convertToString(parameter: ParameterNode) { - const name = parameter.name?.value; - if (parameter.category === ParameterCategory.VarArgList) { + function convertToString(parameter: [node: ParameterNode, keywordOnly: boolean]) { + const name = parameter[0].name?.value; + if (parameter[0].category === ParameterCategory.VarArgList) { return `*${name}`; } - if (parameter.category === ParameterCategory.VarArgDictionary) { + if (parameter[0].category === ParameterCategory.VarArgDictionary) { return `**${name}`; } - return parameter.defaultValue ? `${name}=${name}` : name; + return parameter[1] ? `${name}=${name}` : name; } } @@ -1097,7 +1174,7 @@ export class CompletionProvider { priorWord: string ): CompletionResults | undefined { const symbolTable = new Map(); - const completionList = CompletionList.create(); + const completionMap = new CompletionMap(); let memberAccessInfo: MemberAccessInfo = {}; let leftType = this._evaluator.getType(leftExprNode); @@ -1117,21 +1194,20 @@ export class CompletionProvider { if (functionClass && isInstantiableClass(functionClass)) { getMembersForClass(functionClass, symbolTable, /* includeInstanceVars */ true); } - } else if (isNone(subtype)) { + } else if (isNoneInstance(subtype)) { const objectClass = this._evaluator.getBuiltInType(leftExprNode, 'object'); if (objectClass && isInstantiableClass(objectClass)) { getMembersForClass(objectClass, symbolTable, TypeBase.isInstance(subtype)); } } - const boundObject = isClassInstance(subtype) ? subtype : undefined; this._addSymbolsForSymbolTable( symbolTable, - (_) => true, + () => true, priorWord, /* isInImport */ false, - boundObject, - completionList + isClass(subtype) ? subtype : undefined, + completionMap ); }); } @@ -1141,7 +1217,7 @@ export class CompletionProvider { memberAccessInfo = this._getLastKnownModule(leftExprNode, leftType); } - return { completionList, memberAccessInfo }; + return { completionMap, memberAccessInfo }; } private _getLastKnownModule(leftExprNode: ExpressionNode, leftType: Type | undefined): MemberAccessInfo { @@ -1229,8 +1305,8 @@ export class CompletionProvider { return undefined; } - const completionList = CompletionList.create(); - const completionResults = { completionList }; + const completionMap = new CompletionMap(); + const completionResults = { completionMap }; // Return empty completionList for Ellipsis if (priorText.slice(-2) === '..') { @@ -1244,23 +1320,26 @@ export class CompletionProvider { priorText, postText, /*atArgument*/ false, - completionList + completionMap ); // Add symbols that are in scope. - this._addSymbols(parseNode, priorWord, completionList); + this._addSymbols(parseNode, priorWord, completionMap); // Add keywords. this._findMatchingKeywords(Keywords.forVersion(this._execEnv.pythonVersion), priorWord).map((keyword) => { + if (completionMap.has(keyword)) { + return; + } const completionItem = CompletionItem.create(keyword); completionItem.kind = CompletionItemKind.Keyword; - completionList.items.push(completionItem); completionItem.sortText = this._makeSortText(SortCategory.Keyword, keyword); + completionMap.set(completionItem); }); // Add auto-import suggestions from other modules. // Ignore this check for privates, since they are not imported. - if (this._configOptions.autoImportCompletions && !priorWord.startsWith('_') && !this._itemToResolve) { + if (!priorWord.startsWith('_') && !this._itemToResolve) { this._addAutoImportCompletions(priorWord, similarityLimit, this._options.lazyEdit, completionResults); } @@ -1274,21 +1353,18 @@ export class CompletionProvider { parseNode.parent, /*priorText*/ undefined, /*postText*/ undefined, - completionList + completionMap ); } else if (parseNode.category === ErrorExpressionCategory.MissingExpression) { if (parseNode.parent && parseNode.parent.nodeType === ParseNodeType.Assignment) { - const declaredTypeOfTarget = this._evaluator.getDeclaredTypeForExpression( - parseNode.parent.leftExpression - ); - + const declaredTypeOfTarget = this._evaluator.getExpectedType(parseNode)?.type; if (declaredTypeOfTarget) { this._addLiteralValuesForTargetType( declaredTypeOfTarget, priorText, priorWord, postText, - completionList + completionMap ); } } @@ -1298,10 +1374,9 @@ export class CompletionProvider { if (isIndexArgument) { // Completion for dict key (ex, dict_variable[]) const indexNode = parseNode.parent!.parent! as IndexNode; - const excludes = new Set(completionList.items.map((i) => i.label)); - this._getDictionaryKeys(indexNode, parseNode).forEach((key) => { - if (excludes.has(key)) { + this._getIndexerKeys(indexNode, parseNode).forEach((key) => { + if (completionMap.has(key)) { // Don't add key if it already exists in the completion. // ex) key = "dictKey" // dict[key] = 1 @@ -1309,7 +1384,7 @@ export class CompletionProvider { return; } - this._addNameToCompletionList(key, CompletionItemKind.Constant, priorWord, completionList, { + this._addNameToCompletions(key, CompletionItemKind.Constant, priorWord, completionMap, { sortText: this._makeSortText(SortCategory.LiteralValue, key), itemDetail: dictionaryKeyDetail, }); @@ -1338,7 +1413,7 @@ export class CompletionProvider { priorText: string, postText: string, atArgument: boolean, - completionList: CompletionList + completionMap: CompletionMap ) { // If we're within the argument list of a call, add parameter names. const offset = convertPositionToOffset(this._position, this._parseResults.tokenizerOutput.lines)!; @@ -1367,11 +1442,11 @@ export class CompletionProvider { if (comparePositions(this._position, callNameEnd) > 0) { if (!atArgument) { - this._addNamedParameters(signatureInfo, priorWord, completionList); + this._addNamedParameters(signatureInfo, priorWord, completionMap); } // Add literals that apply to this parameter. - this._addLiteralValuesForArgument(signatureInfo, priorText, priorWord, postText, completionList); + this._addLiteralValuesForArgument(signatureInfo, priorText, priorWord, postText, completionMap); } } } @@ -1381,7 +1456,7 @@ export class CompletionProvider { priorText: string, priorWord: string, postText: string, - completionList: CompletionList + completionMap: CompletionMap ) { signatureInfo.signatures.forEach((signature) => { if (!signature.activeParam) { @@ -1396,7 +1471,7 @@ export class CompletionProvider { } const paramType = type.details.parameters[paramIndex].type; - this._addLiteralValuesForTargetType(paramType, priorText, priorWord, postText, completionList); + this._addLiteralValuesForTargetType(paramType, priorText, priorWord, postText, completionMap); return undefined; }); } @@ -1406,29 +1481,65 @@ export class CompletionProvider { priorText: string, priorWord: string, postText: string, - completionList: CompletionList + completionMap: CompletionMap ) { const quoteValue = this._getQuoteValueFromPriorText(priorText); this._getSubTypesWithLiteralValues(type).forEach((v) => { if (ClassType.isBuiltIn(v, 'str')) { const value = printLiteralValue(v, quoteValue.quoteCharacter); if (quoteValue.stringValue === undefined) { - this._addNameToCompletionList(value, CompletionItemKind.Constant, priorWord, completionList, { + this._addNameToCompletions(value, CompletionItemKind.Constant, priorWord, completionMap, { sortText: this._makeSortText(SortCategory.LiteralValue, v.literalValue as string), }); } else { - this._addStringLiteralToCompletionList( + this._addStringLiteralToCompletions( value.substr(1, value.length - 2), quoteValue.stringValue, postText, quoteValue.quoteCharacter, - completionList + completionMap ); } } }); } + private _getDictExpressionStringKeys(parseNode: ParseNode, excludeIds?: Set) { + const node = getDictionaryLikeNode(parseNode); + if (!node) { + return []; + } + + return node.entries.flatMap((entry) => { + if (entry.nodeType !== ParseNodeType.DictionaryKeyEntry || excludeIds?.has(entry.keyExpression.id)) { + return []; + } + + if (entry.keyExpression.nodeType === ParseNodeType.StringList) { + return [entry.keyExpression.strings.map((s) => s.value).join('')]; + } + + return []; + }); + + function getDictionaryLikeNode(parseNode: ParseNode) { + // this method assumes the given parseNode is either a child of a dictionary or a dictionary itself + if (parseNode.nodeType === ParseNodeType.Dictionary) { + return parseNode; + } + + let curNode: ParseNode | undefined = parseNode; + while (curNode && curNode.nodeType !== ParseNodeType.Dictionary && curNode.nodeType !== ParseNodeType.Set) { + curNode = curNode.parent; + if (!curNode) { + return; + } + } + + return curNode; + } + } + private _getSubTypesWithLiteralValues(type: Type) { const values: ClassType[] = []; @@ -1441,7 +1552,30 @@ export class CompletionProvider { return values; } - private _getDictionaryKeys(indexNode: IndexNode, invocationNode: ParseNode) { + private _getIndexerKeyType(baseType: ClassType) { + // Handle dict type + if (ClassType.isBuiltIn(baseType, 'dict') || ClassType.isBuiltIn(baseType, 'Mapping')) { + if (baseType.typeArguments?.length === 2) { + return baseType.typeArguments[0]; + } + } + + // Handle simple __getitem__ + const member = lookUpObjectMember(baseType, '__getitem__'); + if (member?.symbol.hasDeclarations()) { + const declaration = member.symbol.getDeclarations()[0]; + if (isFunctionDeclaration(declaration) && declaration.isMethod) { + const getItemType = this._evaluator.getTypeForDeclaration(declaration); + if (getItemType && isFunction(getItemType) && getItemType.details.parameters.length === 2) { + return getItemType.details.parameters[1].type; + } + } + } + + return undefined; + } + + private _getIndexerKeys(indexNode: IndexNode, invocationNode: ParseNode) { if (indexNode.baseExpression.nodeType !== ParseNodeType.Name) { // This completion only supports simple name case return []; @@ -1452,17 +1586,12 @@ export class CompletionProvider { return []; } - // Must be dict type - if (!ClassType.isBuiltIn(baseType, 'dict') && !ClassType.isBuiltIn(baseType, 'Mapping')) { - return []; - } - - // See whether dictionary is typed using Literal types. If it is, return those literal keys. - // For now, we are not using __getitem__ since we don't have a way to get effective parameter type of __getitem__. - if (baseType.typeArguments?.length === 2) { + // See whether indexer key is typed using Literal types. If it is, return those literal keys. + const keyType = this._getIndexerKeyType(baseType); + if (keyType) { const keys: string[] = []; - this._getSubTypesWithLiteralValues(baseType.typeArguments[0]).forEach((v) => { + this._getSubTypesWithLiteralValues(keyType).forEach((v) => { if ( !ClassType.isBuiltIn(v, 'str') && !ClassType.isBuiltIn(v, 'int') && @@ -1508,19 +1637,17 @@ export class CompletionProvider { } } - const results: NameNode[] = []; - const collector = new DocumentSymbolCollector( + const results = DocumentSymbolCollector.collectFromNode( indexNode.baseExpression, this._evaluator, - results, this._cancellationToken, startingNode ); - collector.collect(); const keys: Set = new Set(); - for (const nameNode of results) { - const node = nameNode.parent?.nodeType === ParseNodeType.TypeAnnotation ? nameNode.parent : nameNode; + for (const result of results) { + const node = + result.node.parent?.nodeType === ParseNodeType.TypeAnnotation ? result.node.parent : result.node; if ( node.parent?.nodeType === ParseNodeType.Assignment || @@ -1584,6 +1711,47 @@ export class CompletionProvider { return undefined; } + const completionMap = new CompletionMap(); + + // See if the type evaluator can determine the expected type for this node. + if (isExpressionNode(parentNode)) { + const expectedTypeResult = this._evaluator.getExpectedType(parentNode); + if (expectedTypeResult && isLiteralTypeOrUnion(expectedTypeResult.type)) { + this._addLiteralValuesForTargetType( + expectedTypeResult.type, + priorText, + priorWord, + postText, + completionMap + ); + return { completionMap }; + } + + if (parseNode.nodeType === ParseNodeType.String && parseNode.parent?.parent) { + const stringParent = parseNode.parent.parent; + + // If the dictionary is not yet filled in, it will appear as though it's + // a set initially. + let dictOrSet: DictionaryNode | SetNode | undefined; + + if ( + stringParent.nodeType === ParseNodeType.DictionaryKeyEntry && + stringParent.keyExpression === parseNode.parent && + stringParent.parent?.nodeType === ParseNodeType.Dictionary + ) { + dictOrSet = stringParent.parent; + } else if (stringParent?.nodeType === ParseNodeType.Set) { + dictOrSet = stringParent; + } + + if (dictOrSet) { + if (this._addTypedDictKeys(dictOrSet, parseNode, priorText, postText, completionMap)) { + return { completionMap }; + } + } + } + } + if (parentNode.nodeType !== ParseNodeType.Argument) { if (parentNode.nodeType !== ParseNodeType.StringList || parentNode.strings.length > 1) { return undefined; @@ -1595,17 +1763,16 @@ export class CompletionProvider { } } - const completionList = CompletionList.create(); if (parentNode.nodeType === ParseNodeType.Argument && parentNode.parent?.nodeType === ParseNodeType.Index) { if ( !this._tryAddTypedDictStringLiteral( parentNode.parent, parseNode.nodeType === ParseNodeType.String ? priorText : '', postText, - completionList + completionMap ) ) { - const keys = this._getDictionaryKeys(parentNode.parent, parseNode); + const keys = this._getIndexerKeys(parentNode.parent, parseNode); const quoteValue = this._getQuoteValueFromPriorText(priorText); for (const key of keys) { @@ -1617,47 +1784,120 @@ export class CompletionProvider { if (stringLiteral) { const keyWithoutQuote = key.substr(1, key.length - 2); - this._addStringLiteralToCompletionList( + this._addStringLiteralToCompletions( keyWithoutQuote, quoteValue.stringValue, postText, quoteValue.quoteCharacter, - completionList, + completionMap, dictionaryKeyDetail ); } else { - this._addNameToCompletionList(key, CompletionItemKind.Constant, priorWord, completionList, { + this._addNameToCompletions(key, CompletionItemKind.Constant, priorWord, completionMap, { sortText: this._makeSortText(SortCategory.LiteralValue, key), itemDetail: dictionaryKeyDetail, }); } } - if (completionList.items.length === 0) { + if (completionMap.size === 0) { return undefined; } } - } else if (parentNode.nodeType === ParseNodeType.Assignment) { - const declaredTypeOfTarget = this._evaluator.getDeclaredTypeForExpression(parentNode.leftExpression); - - if (declaredTypeOfTarget) { - this._addLiteralValuesForTargetType( - declaredTypeOfTarget, - priorText, - priorWord, - postText, - completionList - ); - } } else { debug.assert(parseNode.nodeType === ParseNodeType.String); const offset = convertPositionToOffset(this._position, this._parseResults.tokenizerOutput.lines)!; const atArgument = parentNode.start < offset && offset < TextRange.getEnd(parseNode); - this._addCallArgumentCompletions(parseNode, priorWord, priorText, postText, atArgument, completionList); + this._addCallArgumentCompletions(parseNode, priorWord, priorText, postText, atArgument, completionMap); + } + + return { completionMap }; + } + + private _addTypedDictKeys( + dictionaryNode: DictionaryNode | SetNode, + stringNode: StringNode | undefined, + priorText: string, + postText: string, + completionMap: CompletionMap + ) { + const expectedTypeResult = this._evaluator.getExpectedType(dictionaryNode); + if (!expectedTypeResult) { + return false; + } + + // If the expected type result is associated with a node above the + // dictionaryNode in the parse tree, there are no typed dict keys to add. + if (ParseTreeUtils.getNodeDepth(expectedTypeResult.node) < ParseTreeUtils.getNodeDepth(dictionaryNode)) { + return false; + } + + let typedDicts: ClassType[] = []; + + doForEachSubtype(expectedTypeResult.type, (subtype) => { + if (isClassInstance(subtype) && ClassType.isTypedDictClass(subtype)) { + typedDicts.push(subtype); + } + }); + + if (typedDicts.length === 0) { + return false; } - return { completionList }; + const keys = this._getDictExpressionStringKeys( + dictionaryNode, + stringNode ? new Set([stringNode.parent?.id]) : undefined + ); + + typedDicts = this._tryNarrowTypedDicts(typedDicts, keys); + + const quoteValue = this._getQuoteValueFromPriorText(priorText); + const excludes = new Set(keys); + + typedDicts.forEach((typedDict) => { + getTypedDictMembersForClass(this._evaluator, typedDict, /* allowNarrowed */ true).forEach((_, key) => { + // Unions of TypedDicts may define the same key. + if (excludes.has(key) || completionMap.has(key)) { + return; + } + + excludes.add(key); + + this._addStringLiteralToCompletions( + key, + quoteValue ? quoteValue.stringValue : undefined, + postText, + quoteValue + ? quoteValue.quoteCharacter + : this._parseResults.tokenizerOutput.predominantSingleQuoteCharacter, + completionMap + ); + }); + }); + + return true; + } + + private _tryNarrowTypedDicts(types: ClassType[], keys: string[]): ClassType[] { + const newTypes = types.flatMap((type) => { + const entries = getTypedDictMembersForClass(this._evaluator, type, /* allowNarrowed */ true); + + for (let index = 0; index < keys.length; index++) { + if (!entries.has(keys[index])) { + return []; + } + } + + return [type]; + }); + + if (newTypes.length === 0) { + // Couldn't narrow to any typed dicts. Just include all. + return types; + } + + return newTypes; } // Given a string of text that precedes the current insertion point, @@ -1665,20 +1905,22 @@ export class CompletionProvider { // (either starting with a single or double quote). Returns the quote // type and the string literal value after the starting quote. private _getQuoteValueFromPriorText(priorText: string) { - const lastSingleQuote = priorText.lastIndexOf("'"); - const lastDoubleQuote = priorText.lastIndexOf('"'); - - let quoteCharacter = this._parseResults.tokenizerOutput.predominantSingleQuoteCharacter; - let stringValue = undefined; - - if (lastSingleQuote > lastDoubleQuote) { - quoteCharacter = "'"; - stringValue = priorText.substr(lastSingleQuote + 1); - } else if (lastDoubleQuote > lastSingleQuote) { - quoteCharacter = '"'; - stringValue = priorText.substr(lastDoubleQuote + 1); + if (this._insideStringLiteral) { + const lastSingleQuote = priorText.lastIndexOf("'"); + const lastDoubleQuote = priorText.lastIndexOf('"'); + + if (lastSingleQuote > lastDoubleQuote) { + return { + quoteCharacter: "'", + stringValue: priorText.substr(lastSingleQuote + 1), + }; + } else if (lastDoubleQuote > lastSingleQuote) { + return { quoteCharacter: '"', stringValue: priorText.substr(lastDoubleQuote + 1) }; + } } + const quoteCharacter = this._parseResults.tokenizerOutput.predominantSingleQuoteCharacter; + const stringValue = undefined; return { stringValue, quoteCharacter }; } @@ -1686,7 +1928,7 @@ export class CompletionProvider { indexNode: IndexNode | undefined, priorText: string | undefined, postText: string | undefined, - completionList: CompletionList + completionMap: CompletionMap ) { if (!indexNode) { return false; @@ -1706,30 +1948,34 @@ export class CompletionProvider { const quoteValue = priorText ? this._getQuoteValueFromPriorText(priorText) : undefined; entries.forEach((_, key) => { - this._addStringLiteralToCompletionList( + this._addStringLiteralToCompletions( key, quoteValue ? quoteValue.stringValue : undefined, postText, quoteValue ? quoteValue.quoteCharacter : this._parseResults.tokenizerOutput.predominantSingleQuoteCharacter, - completionList + completionMap ); }); return true; } - private _addStringLiteralToCompletionList( + private _addStringLiteralToCompletions( value: string, priorString: string | undefined, postText: string | undefined, quoteCharacter: string, - completionList: CompletionList, + completionMap: CompletionMap, detail?: string ) { if (StringUtils.isPatternInSymbol(priorString || '', value)) { const valueWithQuotes = `${quoteCharacter}${value}${quoteCharacter}`; + if (completionMap.has(valueWithQuotes)) { + return; + } + const completionItem = CompletionItem.create(valueWithQuotes); completionItem.kind = CompletionItemKind.Constant; @@ -1755,7 +2001,7 @@ export class CompletionProvider { completionItem.textEdit = TextEdit.replace(range, valueWithQuotes); completionItem.detail = detail; - completionList.items.push(completionItem); + completionMap.set(completionItem); } } @@ -1765,30 +2011,31 @@ export class CompletionProvider { lazyEdit: boolean, completionResults: CompletionResults ) { - if (!this._autoImportMaps) { - return; - } - - const completionList = completionResults.completionList; - if (!completionList) { + if ( + !completionResults.completionMap || + !this._configOptions.autoImportCompletions || + !this._options.autoImport + ) { + // If auto import on the server is turned off or this particular invocation + // is turned off (ex, notebook), don't do any thing. return; } const moduleSymbolMap = this._autoImportMaps.getModuleSymbolsMap(); - const excludes = new Set(completionList.items.filter((i) => !i.data?.autoImport).map((i) => i.label)); + const autoImporter = new AutoImporter( this._execEnv, this._importResolver, this._parseResults, this._position, - excludes, + completionResults.completionMap, moduleSymbolMap, { libraryMap: this._autoImportMaps.libraryMap, lazyEdit } ); const results: AutoImportResult[] = []; const info = this._autoImportMaps.nameMap?.get(priorWord); - if (info && priorWord.length > 1 && !excludes.has(priorWord)) { + if (info && priorWord.length > 1 && !completionResults.completionMap.has(priorWord)) { results.push(...autoImporter.getAutoImportCandidatesForAbbr(priorWord, info, this._cancellationToken)); } @@ -1801,7 +2048,7 @@ export class CompletionProvider { const additionDuration = new Duration(); for (const result of results) { if (result.symbol) { - this._addSymbol(result.name, result.symbol, priorWord, completionList, { + this._addSymbol(result.name, result.symbol, priorWord, completionResults.completionMap, { autoImportSource: result.source, autoImportAlias: result.alias, edits: { @@ -1810,11 +2057,11 @@ export class CompletionProvider { }, }); } else { - this._addNameToCompletionList( + this._addNameToCompletions( result.alias ?? result.name, result.kind ?? CompletionItemKind.Module, priorWord, - completionList, + completionResults.completionMap, { autoImportText: this._getAutoImportText(result.name, result.source, result.alias), edits: { @@ -1859,7 +2106,7 @@ export class CompletionProvider { return undefined; } - const completionList = CompletionList.create(); + const completionMap = new CompletionMap(); const resolvedPath = importInfo.resolvedPaths.length > 0 ? importInfo.resolvedPaths[importInfo.resolvedPaths.length - 1] : ''; @@ -1868,25 +2115,28 @@ export class CompletionProvider { if (lookupResults) { this._addSymbolsForSymbolTable( lookupResults.symbolTable, - (name) => { - // Don't suggest symbols that have already been imported. - return !importFromNode.imports.find((imp) => imp.name.value === name); + (symbol, name) => { + // Don't suggest built in symbols or ones that have already been imported. + return ( + symbol.getDeclarations().some((d) => !isIntrinsicDeclaration(d)) && + !importFromNode.imports.find((imp) => imp.name.value === name) + ); }, priorWord, /* isInImport */ true, /* boundObject */ undefined, - completionList + completionMap ); } // Add the implicit imports. importInfo.implicitImports.forEach((implImport) => { if (!importFromNode.imports.find((imp) => imp.name.value === implImport.name)) { - this._addNameToCompletionList(implImport.name, CompletionItemKind.Module, priorWord, completionList); + this._addNameToCompletions(implImport.name, CompletionItemKind.Module, priorWord, completionMap); } }); - return { completionList }; + return { completionMap }; } private _findMatchingKeywords(keywordList: string[], partialMatch: string): string[] { @@ -1899,7 +2149,7 @@ export class CompletionProvider { }); } - private _addNamedParameters(signatureInfo: CallSignatureInfo, priorWord: string, completionList: CompletionList) { + private _addNamedParameters(signatureInfo: CallSignatureInfo, priorWord: string, completionMap: CompletionMap) { const argNameMap = new Map(); signatureInfo.signatures.forEach((signature) => { @@ -1916,7 +2166,12 @@ export class CompletionProvider { // Add the remaining unique parameter names to the completion list. argNameMap.forEach((argName) => { if (StringUtils.isPatternInSymbol(priorWord, argName)) { - const completionItem = CompletionItem.create(argName + '='); + const label = argName + '='; + if (completionMap.has(label)) { + return; + } + + const completionItem = CompletionItem.create(label); completionItem.kind = CompletionItemKind.Variable; const completionItemData: CompletionItemData = { @@ -1927,7 +2182,7 @@ export class CompletionProvider { completionItem.data = completionItemData; completionItem.sortText = this._makeSortText(SortCategory.NamedParameter, argName); - completionList.items.push(completionItem); + completionMap.set(completionItem); } }); } @@ -1944,12 +2199,12 @@ export class CompletionProvider { }); } - private _addSymbols(node: ParseNode, priorWord: string, completionList: CompletionList) { + private _addSymbols(node: ParseNode, priorWord: string, completionMap: CompletionMap) { let curNode: ParseNode | undefined = node; while (curNode) { // Does this node have a scope associated with it? - let scope = AnalyzerNodeInfo.getScope(curNode); + let scope = getScopeForNode(curNode); if (scope) { while (scope) { this._addSymbolsForSymbolTable( @@ -1958,7 +2213,7 @@ export class CompletionProvider { priorWord, /* isInImport */ false, /* boundObject */ undefined, - completionList + completionMap ); scope = scope.parent; } @@ -1971,9 +2226,8 @@ export class CompletionProvider { if (isInstantiableClass(baseClass)) { this._addSymbolsForSymbolTable( baseClass.details.fields, - (name) => { - const symbol = baseClass.details.fields.get(name); - if (!symbol || !symbol.isClassMember()) { + (symbol) => { + if (!symbol.isClassMember()) { return false; } @@ -1985,7 +2239,7 @@ export class CompletionProvider { priorWord, /* isInImport */ false, /* boundObject */ undefined, - completionList + completionMap ); } }); @@ -2000,11 +2254,11 @@ export class CompletionProvider { private _addSymbolsForSymbolTable( symbolTable: SymbolTable, - includeSymbolCallback: (name: string) => boolean, + includeSymbolCallback: (symbol: Symbol, name: string) => boolean, priorWord: string, isInImport: boolean, - boundObject: ClassType | undefined, - completionList: CompletionList + boundObjectOrClass: ClassType | undefined, + completionMap: CompletionMap ) { symbolTable.forEach((symbol, name) => { // If there are no declarations or the symbol is not @@ -2013,12 +2267,12 @@ export class CompletionProvider { const hidden = symbol.isExternallyHidden() && !symbol.getDeclarations().some((d) => isDefinedInFile(d, this._filePath)); - if (!hidden && includeSymbolCallback(name)) { + if (!hidden && includeSymbolCallback(symbol, name)) { // Don't add a symbol more than once. It may have already been // added from an inner scope's symbol table. - if (!completionList.items.some((item) => item.label === name)) { - this._addSymbol(name, symbol, priorWord, completionList, { - boundObject, + if (!completionMap.has(name)) { + this._addSymbol(name, symbol, priorWord, completionMap, { + boundObjectOrClass, funcParensDisabled: isInImport, }); } @@ -2030,7 +2284,7 @@ export class CompletionProvider { name: string, symbol: Symbol, priorWord: string, - completionList: CompletionList, + completionMap: CompletionMap, detail: SymbolDetail ) { // If the symbol is a py.typed import that is not supposed to be re-exported, @@ -2054,6 +2308,19 @@ export class CompletionProvider { if (primaryDecl) { itemKind = this._convertDeclarationTypeToItemKind(primaryDecl); + // Handle enum members specially. Enum members normally look like + // variables, but the are declared using assignment expressions + // within an enum class. + if ( + primaryDecl.type === DeclarationType.Variable && + detail.boundObjectOrClass && + isInstantiableClass(detail.boundObjectOrClass) && + ClassType.isEnumClass(detail.boundObjectOrClass) && + primaryDecl.node.parent?.nodeType === ParseNodeType.Assignment + ) { + itemKind = CompletionItemKind.EnumMember; + } + // Are we resolving a completion item? If so, see if this symbol // is the one that we're trying to match. if (this._itemToResolve) { @@ -2088,11 +2355,18 @@ export class CompletionProvider { case DeclarationType.Function: { const functionType = - detail.boundObject && (isFunction(type) || isOverloadedFunction(type)) - ? this._evaluator.bindFunctionToClassOrObject(detail.boundObject, type) + detail.boundObjectOrClass && (isFunction(type) || isOverloadedFunction(type)) + ? this._evaluator.bindFunctionToClassOrObject( + detail.boundObjectOrClass, + type + ) : type; if (functionType) { - if (isProperty(functionType) && detail.boundObject) { + if ( + isProperty(functionType) && + detail.boundObjectOrClass && + isClassInstance(detail.boundObjectOrClass) + ) { const propertyType = this._evaluator.getGetterTypeFromProperty( functionType as ClassType, @@ -2104,7 +2378,7 @@ export class CompletionProvider { this._evaluator.printType(propertyType, /* expandTypeAlias */ false) + ' (property)'; altDetail = - detail.boundObject.details.fullName + '.' + name + ' (property)'; + detail.boundObjectOrClass.details.fullName + '.' + name + ' (property)'; } else if (isOverloadedFunction(functionType)) { // 35 is completion tooltip's default width size typeDetail = getOverloadedFunctionTooltip( @@ -2161,37 +2435,27 @@ export class CompletionProvider { } else if (isInstantiableClass(type)) { documentation = getClassDocString(type, primaryDecl, this._sourceMapper); } else if (isFunction(type)) { - const functionType = detail.boundObject - ? (this._evaluator.bindFunctionToClassOrObject( - detail.boundObject, - type - ) as FunctionType) + const functionType = detail.boundObjectOrClass + ? this._evaluator.bindFunctionToClassOrObject(detail.boundObjectOrClass, type) : type; - documentation = getFunctionDocStringFromType( - functionType, - this._sourceMapper, - this._evaluator - ); + if (functionType && isFunction(functionType)) { + documentation = getFunctionDocStringFromType( + functionType, + this._sourceMapper, + this._evaluator + ); + } } else if (isOverloadedFunction(type)) { - const enclosingClass = isFunctionDeclaration(primaryDecl) - ? ParseTreeUtils.getEnclosingClass(primaryDecl.node.name, false) - : undefined; - const classResults = enclosingClass - ? this._evaluator.getTypeOfClass(enclosingClass) - : undefined; - const functionType = detail.boundObject - ? (this._evaluator.bindFunctionToClassOrObject( - detail.boundObject, - type - ) as OverloadedFunctionType) + const functionType = detail.boundObjectOrClass + ? this._evaluator.bindFunctionToClassOrObject(detail.boundObjectOrClass, type) : type; - documentation = getOverloadedFunctionDocStringsInherited( - functionType, - primaryDecl, - this._sourceMapper, - this._evaluator, - classResults?.classType - ).find((doc) => doc); + if (functionType && isOverloadedFunction(functionType)) { + documentation = getOverloadedFunctionDocStringsFromType( + functionType, + this._sourceMapper, + this._evaluator + ).find((doc) => doc); + } } else if (primaryDecl?.type === DeclarationType.Function) { // @property functions documentation = getPropertyDocStringInherited( @@ -2258,7 +2522,7 @@ export class CompletionProvider { ? this._getAutoImportText(name, detail.autoImportSource, detail.autoImportAlias) : undefined; - this._addNameToCompletionList(detail.autoImportAlias ?? name, itemKind, priorWord, completionList, { + this._addNameToCompletions(detail.autoImportAlias ?? name, itemKind, priorWord, completionMap, { autoImportText, funcParensDisabled: detail.funcParensDisabled, edits: detail.edits, @@ -2268,7 +2532,7 @@ export class CompletionProvider { const synthesizedType = symbol.getSynthesizedType(); if (synthesizedType) { const itemKind: CompletionItemKind = CompletionItemKind.Variable; - this._addNameToCompletionList(name, itemKind, priorWord, completionList, { + this._addNameToCompletions(name, itemKind, priorWord, completionMap, { funcParensDisabled: detail.funcParensDisabled, edits: detail.edits, }); @@ -2288,11 +2552,11 @@ export class CompletionProvider { } } - private _addNameToCompletionList( + private _addNameToCompletions( name: string, itemKind: CompletionItemKind, filter: string, - completionList: CompletionList, + completionMap: CompletionMap, detail?: CompletionDetail ) { // Auto importer already filtered out unnecessary ones. No need to do it again. @@ -2301,6 +2565,10 @@ export class CompletionProvider { return; } + if (completionMap.has(name, CompletionMap.matchKindAndImportText, itemKind, detail?.autoImportText)) { + return; + } + const completionItem = CompletionItem.create(name); completionItem.kind = itemKind; @@ -2324,6 +2592,9 @@ export class CompletionProvider { completionItem.sortText = this._makeSortText(SortCategory.AutoImport, name, detail.autoImportText); completionItemData.autoImportText = detail.autoImportText; completionItem.detail = autoImportDetail; + } else if (itemKind === CompletionItemKind.EnumMember) { + // Handle enum members separately so they are sorted above other symbols. + completionItem.sortText = this._makeSortText(SortCategory.EnumMember, name); } else if (SymbolNameUtils.isDunderName(name)) { // Force dunder-named symbols to appear after all other symbols. completionItem.sortText = this._makeSortText(SortCategory.DunderSymbol, name); @@ -2425,7 +2696,7 @@ export class CompletionProvider { } } - completionList.items.push(completionItem); + completionMap.set(completionItem); } private _getRecentListIndex(name: string, autoImportText: string) { @@ -2531,7 +2802,7 @@ export class CompletionProvider { moduleDescriptor ); - const completionList = CompletionList.create(); + const completionMap = new CompletionMap(); // If we're in the middle of a "from X import Y" statement, offer // the "import" keyword as a completion. @@ -2544,18 +2815,22 @@ export class CompletionProvider { const keyword = 'import'; const completionItem = CompletionItem.create(keyword); completionItem.kind = CompletionItemKind.Keyword; - completionList.items.push(completionItem); completionItem.sortText = this._makeSortText(SortCategory.Keyword, keyword); + completionMap.set(completionItem); } completions.forEach((completionName) => { + if (completionMap.has(completionName)) { + return; + } + const completionItem = CompletionItem.create(completionName); completionItem.kind = CompletionItemKind.Module; - completionList.items.push(completionItem); completionItem.sortText = this._makeSortText(SortCategory.ImportModuleName, completionName); + completionMap.set(completionItem); }); - return { completionList }; + return { completionMap }; } private _isPossiblePropertyDeclaration(decl: FunctionDeclaration) { @@ -2587,3 +2862,106 @@ export function printSimplifiedFunctionSignature(functionType: FunctionType): st ')' ); } + +export class CompletionMap { + private _completions: Map = new Map(); + + get size() { + return this._completions.size; + } + + set(value: CompletionItem): void { + const existing = this._completions.get(value.label); + if (!existing) { + this._completions.set(value.label, value); + } else if (Array.isArray(existing)) { + existing.push(value); + } else { + this._completions.set(value.label, [existing, value]); + } + } + + get(key: string): CompletionItem | CompletionItem[] | undefined { + return this._completions.get(key); + } + + has( + label: string, + predicate?: ( + other: CompletionItem | CompletionItem[], + kind?: CompletionItemKind, + autoImportText?: string + ) => boolean, + kind?: CompletionItemKind, + autImportText?: string + ): boolean { + const existing = this._completions.get(label); + if (!existing) { + return false; + } + + if (predicate) { + return predicate(existing, kind, autImportText); + } + return true; + } + + clear(): void { + this._completions.clear(); + } + + delete(key: string): boolean { + return this._completions.delete(key); + } + + toArray(): CompletionItem[] { + const items: CompletionItem[] = []; + this._completions?.forEach((value) => { + if (Array.isArray(value)) { + value.forEach((item) => { + items.push(item); + }); + } else { + items.push(value); + } + }); + return items; + } + + static matchKindAndImportText( + existing: CompletionItem | CompletionItem[], + kind?: CompletionItemKind, + autoImportText?: string + ): boolean { + if (!existing) { + return false; + } + + if (!Array.isArray(existing)) { + return existing.kind === kind && existing.data?.autoImport === autoImportText; + } else { + return !!existing.find((c) => c.kind === kind && c.data.autoImport === autoImportText); + } + } + + static labelOnlyIgnoringAutoImports( + existing: CompletionItem | CompletionItem[], + _kind?: CompletionItemKind, + _autoImportText?: string + ): boolean { + if (!existing) { + return false; + } + + if (Array.isArray(existing)) { + if (existing.find((c) => !c.data?.autoImport)) { + return true; + } + } else { + if (!existing.data?.autoImport) { + return true; + } + } + return false; + } +} diff --git a/packages/pyright-internal/src/languageService/definitionProvider.ts b/packages/pyright-internal/src/languageService/definitionProvider.ts index 979c6bf856a2..1b2a82919e2c 100644 --- a/packages/pyright-internal/src/languageService/definitionProvider.ts +++ b/packages/pyright-internal/src/languageService/definitionProvider.ts @@ -13,11 +13,12 @@ import { CancellationToken } from 'vscode-languageserver'; import { getFileInfo } from '../analyzer/analyzerNodeInfo'; -import { DeclarationType, isFunctionDeclaration } from '../analyzer/declaration'; +import { Declaration, DeclarationType, isFunctionDeclaration } from '../analyzer/declaration'; import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; -import { isOverloadedFunction } from '../analyzer/types'; +import { isOverloadedFunction, TypeCategory } from '../analyzer/types'; +import { doForEachSubtype } from '../analyzer/typeUtils'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { isDefined } from '../common/core'; import { convertPositionToOffset } from '../common/positionUtils'; @@ -56,70 +57,7 @@ export class DefinitionProvider { if (node.nodeType === ParseNodeType.Name) { const declarations = evaluator.getDeclarationsForNameNode(node); - if (declarations) { - declarations.forEach((decl) => { - let resolvedDecl = evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); - if (resolvedDecl && resolvedDecl.path) { - // If the decl is an unresolved import, skip it. - if (resolvedDecl.type === DeclarationType.Alias && resolvedDecl.isUnresolved) { - return; - } - - // If the resolved decl is still an alias, it means it - // resolved to a module. We need to apply loader actions - // to determine its path. - if ( - resolvedDecl.type === DeclarationType.Alias && - resolvedDecl.symbolName && - resolvedDecl.submoduleFallback && - resolvedDecl.submoduleFallback.path - ) { - resolvedDecl = resolvedDecl.submoduleFallback; - } - - this._addIfUnique(definitions, { - path: resolvedDecl.path, - range: resolvedDecl.range, - }); - - if (isFunctionDeclaration(resolvedDecl)) { - // Handle overloaded function case - const functionType = evaluator.getTypeForDeclaration(resolvedDecl); - if (functionType && isOverloadedFunction(functionType)) { - for (const overloadDecl of functionType.overloads - .map((o) => o.details.declaration) - .filter(isDefined)) { - this._addIfUnique(definitions, { - path: overloadDecl.path, - range: overloadDecl.range, - }); - } - } - } - - if (isStubFile(resolvedDecl.path)) { - if (resolvedDecl.type === DeclarationType.Alias) { - // Add matching source module - sourceMapper - .findModules(resolvedDecl.path) - .map((m) => getFileInfo(m)?.filePath) - .filter(isDefined) - .forEach((f) => this._addIfUnique(definitions, this._createModuleEntry(f))); - } else { - const implDecls = sourceMapper.findDeclarations(resolvedDecl); - for (const implDecl of implDecls) { - if (implDecl && implDecl.path) { - this._addIfUnique(definitions, { - path: implDecl.path, - range: implDecl.range, - }); - } - } - } - } - } - }); - } + DefinitionProvider._resolveDeclarations(declarations, evaluator, definitions, sourceMapper); } if (definitions.length === 0) { @@ -141,6 +79,133 @@ export class DefinitionProvider { return definitions; } + static getTypeDefinitionsForPosition( + sourceMapper: SourceMapper, + parseResults: ParseResults, + position: Position, + evaluator: TypeEvaluator, + filePath: string, + token: CancellationToken + ): DocumentRange[] | undefined { + throwIfCancellationRequested(token); + + const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + const node = ParseTreeUtils.findNodeByOffset(parseResults.parseTree, offset); + if (node === undefined) { + return undefined; + } + + const definitions: DocumentRange[] = []; + + if (node.nodeType === ParseNodeType.Name) { + const type = evaluator.getType(node); + + if (type) { + let declarations: Declaration[] = []; + + doForEachSubtype(type, (subtype) => { + if (subtype?.category === TypeCategory.Class) { + declarations.push(...sourceMapper.findClassDeclarationsByType(filePath, subtype)); + } + }); + + // Fall back to Go To Definition if the type can't be found (ex. Go To Type Definition + // was executed on a type name) + if (declarations.length === 0) { + declarations = evaluator.getDeclarationsForNameNode(node) ?? []; + } + + DefinitionProvider._resolveDeclarations(declarations, evaluator, definitions, sourceMapper); + } + } + + if (definitions.length === 0) { + return undefined; + } + + return definitions; + } + + private static _resolveDeclarations( + declarations: Declaration[] | undefined, + evaluator: TypeEvaluator, + definitions: DocumentRange[], + sourceMapper: SourceMapper + ) { + if (declarations) { + declarations.forEach((decl) => { + let resolvedDecl = evaluator.resolveAliasDeclaration( + decl, + /* resolveLocalNames */ true, + /* allowExternallyHiddenAccess */ true + ); + if (resolvedDecl && resolvedDecl.path) { + // If the decl is an unresolved import, skip it. + if (resolvedDecl.type === DeclarationType.Alias && resolvedDecl.isUnresolved) { + return; + } + + // If the resolved decl is still an alias, it means it + // resolved to a module. We need to apply loader actions + // to determine its path. + if ( + resolvedDecl.type === DeclarationType.Alias && + resolvedDecl.symbolName && + resolvedDecl.submoduleFallback && + resolvedDecl.submoduleFallback.path + ) { + resolvedDecl = resolvedDecl.submoduleFallback; + } + + this._addIfUnique(definitions, { + path: resolvedDecl.path, + range: resolvedDecl.range, + }); + + if (isFunctionDeclaration(resolvedDecl)) { + // Handle overloaded function case + const functionType = evaluator.getTypeForDeclaration(resolvedDecl); + if (functionType && isOverloadedFunction(functionType)) { + for (const overloadDecl of functionType.overloads + .map((o) => o.details.declaration) + .filter(isDefined)) { + this._addIfUnique(definitions, { + path: overloadDecl.path, + range: overloadDecl.range, + }); + } + } + } + + if (isStubFile(resolvedDecl.path)) { + if (resolvedDecl.type === DeclarationType.Alias) { + // Add matching source module + sourceMapper + .findModules(resolvedDecl.path) + .map((m) => getFileInfo(m)?.filePath) + .filter(isDefined) + .forEach((f) => this._addIfUnique(definitions, this._createModuleEntry(f))); + } else { + const implDecls = sourceMapper.findDeclarations(resolvedDecl); + for (const implDecl of implDecls) { + if (implDecl && implDecl.path) { + this._addIfUnique(definitions, { + path: implDecl.path, + range: implDecl.range, + }); + } + } + } + } + } + }); + } + } + private static _createModuleEntry(filePath: string): DocumentRange { return { path: filePath, diff --git a/packages/pyright-internal/src/languageService/documentHighlightProvider.ts b/packages/pyright-internal/src/languageService/documentHighlightProvider.ts index 8b67288fe665..94492e4ac702 100644 --- a/packages/pyright-internal/src/languageService/documentHighlightProvider.ts +++ b/packages/pyright-internal/src/languageService/documentHighlightProvider.ts @@ -15,7 +15,7 @@ import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { convertOffsetsToRange, convertPositionToOffset } from '../common/positionUtils'; import { Position, TextRange } from '../common/textRange'; -import { NameNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseNodeType } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; import { DocumentSymbolCollector } from './documentSymbolCollector'; @@ -42,14 +42,20 @@ export class DocumentHighlightProvider { return undefined; } - const results: NameNode[] = []; - const collector = new DocumentSymbolCollector(node, evaluator, results, token); - - collector.collect(); - - return results.map((n) => ({ - kind: ParseTreeUtils.isWriteAccess(n) ? DocumentHighlightKind.Write : DocumentHighlightKind.Read, - range: convertOffsetsToRange(n.start, TextRange.getEnd(n), parseResults.tokenizerOutput.lines), + const results = DocumentSymbolCollector.collectFromNode( + node, + evaluator, + token, + parseResults.parseTree, + /* treatModuleInImportAndFromImportSame */ true + ); + + return results.map((r) => ({ + kind: + r.node.nodeType === ParseNodeType.Name && ParseTreeUtils.isWriteAccess(r.node) + ? DocumentHighlightKind.Write + : DocumentHighlightKind.Read, + range: convertOffsetsToRange(r.range.start, TextRange.getEnd(r.range), parseResults.tokenizerOutput.lines), })); } } diff --git a/packages/pyright-internal/src/languageService/documentSymbolCollector.ts b/packages/pyright-internal/src/languageService/documentSymbolCollector.ts index f27b048e2776..c9a08776f285 100644 --- a/packages/pyright-internal/src/languageService/documentSymbolCollector.ts +++ b/packages/pyright-internal/src/languageService/documentSymbolCollector.ts @@ -10,63 +10,123 @@ import { CancellationToken } from 'vscode-languageserver'; -import { isCodeUnreachable } from '../analyzer/analyzerNodeInfo'; -import { Declaration } from '../analyzer/declaration'; -import { areDeclarationsSame } from '../analyzer/declarationUtils'; -import { getModuleNode } from '../analyzer/parseTreeUtils'; +import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; +import { AliasDeclaration, Declaration, DeclarationType, isAliasDeclaration } from '../analyzer/declaration'; +import { + areDeclarationsSame, + createSynthesizedAliasDeclaration, + getDeclarationsWithUsesLocalNameRemoved, +} from '../analyzer/declarationUtils'; +import { getModuleNode, getStringNodeValueRange } from '../analyzer/parseTreeUtils'; import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import * as ScopeUtils from '../analyzer/scopeUtils'; +import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { TypeCategory } from '../analyzer/types'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; -import { ModuleNameNode, NameNode, ParseNode } from '../parser/parseNodes'; +import { TextRange } from '../common/textRange'; +import { ImportAsNode, NameNode, ParseNode, ParseNodeType, StringNode } from '../parser/parseNodes'; + +export type CollectionResult = { + node: NameNode | StringNode; + range: TextRange; +}; // This walker looks for symbols that are semantically equivalent // to the requested symbol. export class DocumentSymbolCollector extends ParseTreeWalker { - private _symbolName: string; - private _declarations: Declaration[] = []; - private _startingNode: ParseNode | undefined; + static collectFromNode( + node: NameNode, + evaluator: TypeEvaluator, + cancellationToken: CancellationToken, + startingNode?: ParseNode, + treatModuleInImportAndFromImportSame = false + ): CollectionResult[] { + const symbolName = node.value; + const declarations = this.getDeclarationsForNode( + node, + evaluator, + /* resolveLocalName */ true, + cancellationToken + ); - constructor( + startingNode = startingNode ?? getModuleNode(node); + if (!startingNode) { + return []; + } + + const collector = new DocumentSymbolCollector( + symbolName, + declarations, + evaluator, + cancellationToken, + startingNode, + treatModuleInImportAndFromImportSame + ); + + return collector.collect(); + } + + static getDeclarationsForNode( node: NameNode, - private _evaluator: TypeEvaluator, - private _results: NameNode[], - private _cancellationToken: CancellationToken, - startingNode?: ParseNode - ) { - super(); - this._symbolName = node.value; + evaluator: TypeEvaluator, + resolveLocalName: boolean, + token: CancellationToken, + sourceMapper?: SourceMapper + ): Declaration[] { + throwIfCancellationRequested(token); - const declarations = this._evaluator.getDeclarationsForNameNode(node) || []; + const declarations = this._getDeclarationsForNode(node, evaluator); + const resolvedDeclarations: Declaration[] = []; declarations.forEach((decl) => { - const resolvedDecl = this._evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ true); + const resolvedDecl = evaluator.resolveAliasDeclaration(decl, resolveLocalName); if (resolvedDecl) { - this._declarations.push(resolvedDecl); + resolvedDeclarations.push(resolvedDecl); + + if (sourceMapper && isStubFile(resolvedDecl.path)) { + const implDecls = sourceMapper.findDeclarations(resolvedDecl); + for (const implDecl of implDecls) { + if (implDecl && implDecl.path) { + this._addIfUnique(resolvedDeclarations, implDecl); + } + } + } } }); - this._startingNode = startingNode ?? getModuleNode(node); + return resolvedDeclarations; } - collect() { - if (!this._startingNode) { - return; - } + private _results: CollectionResult[] = []; + private _dunderAllNameNodes = new Set(); + constructor( + private _symbolName: string, + private _declarations: Declaration[], + private _evaluator: TypeEvaluator, + private _cancellationToken: CancellationToken, + private _startingNode: ParseNode, + private _treatModuleInImportAndFromImportSame = false + ) { + super(); + + // Don't report strings in __all__ right away, that will + // break the assumption on the result ordering. + this._setDunderAllNodes(this._startingNode); + } + + collect() { this.walk(this._startingNode); + return this._results; } override walk(node: ParseNode) { - if (!isCodeUnreachable(node)) { + if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { super.walk(node); } } - override visitModuleName(node: ModuleNameNode): boolean { - // Don't ever look for references within a module name. - return false; - } - override visitName(node: NameNode): boolean { throwIfCancellationRequested(this._cancellationToken); @@ -76,7 +136,7 @@ export class DocumentSymbolCollector extends ParseTreeWalker { } if (this._declarations.length > 0) { - const declarations = this._evaluator.getDeclarationsForNameNode(node); + const declarations = DocumentSymbolCollector._getDeclarationsForNode(node, this._evaluator); if (declarations && declarations.length > 0) { // Does this name share a declaration with the symbol of interest? @@ -89,11 +149,22 @@ export class DocumentSymbolCollector extends ParseTreeWalker { this._addResult(node); } - return true; + return false; } - private _addResult(node: NameNode) { - this._results.push(node); + override visitString(node: StringNode): boolean { + throwIfCancellationRequested(this._cancellationToken); + + if (this._dunderAllNameNodes.has(node)) { + this._addResult(node); + } + + return false; + } + + private _addResult(node: NameNode | StringNode) { + const range: TextRange = node.nodeType === ParseNodeType.Name ? node : getStringNodeValueRange(node); + this._results.push({ node, range }); } private _resultsContainsDeclaration(declaration: Declaration) { @@ -105,20 +176,211 @@ export class DocumentSymbolCollector extends ParseTreeWalker { // The reference results declarations are already resolved, so we don't // need to call resolveAliasDeclaration on them. - if (this._declarations.some((decl) => areDeclarationsSame(decl, resolvedDecl))) { + if ( + this._declarations.some((decl) => + areDeclarationsSame(decl, resolvedDecl, this._treatModuleInImportAndFromImportSame) + ) + ) { return true; } // We didn't find the declaration using local-only alias resolution. Attempt // it again by fully resolving the alias. - const resolvedDeclNonlocal = this._evaluator.resolveAliasDeclaration( - resolvedDecl, - /* resolveLocalNames */ true - ); + const resolvedDeclNonlocal = this._getResolveAliasDeclaration(resolvedDecl); if (!resolvedDeclNonlocal || resolvedDeclNonlocal === resolvedDecl) { return false; } - return this._declarations.some((decl) => areDeclarationsSame(decl, resolvedDeclNonlocal)); + return this._declarations.some((decl) => + areDeclarationsSame(decl, resolvedDeclNonlocal, this._treatModuleInImportAndFromImportSame) + ); + } + + private _getResolveAliasDeclaration(declaration: Declaration) { + // TypeEvaluator.resolveAliasDeclaration only resolve alias in AliasDeclaration in the form of + // "from x import y as [y]" but don't do thing for alias in "import x as [x]" + // Here, alias should have same name as module name. + if (isAliasDeclFromImportAsWithAlias(declaration)) { + return getDeclarationsWithUsesLocalNameRemoved([declaration])[0]; + } + + const resolvedDecl = this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ true); + return isAliasDeclFromImportAsWithAlias(resolvedDecl) + ? getDeclarationsWithUsesLocalNameRemoved([resolvedDecl])[0] + : resolvedDecl; + + function isAliasDeclFromImportAsWithAlias(decl?: Declaration): decl is AliasDeclaration { + return ( + !!decl && + decl.type === DeclarationType.Alias && + decl.node && + decl.usesLocalName && + decl.node.nodeType === ParseNodeType.ImportAs + ); + } + } + + private _setDunderAllNodes(node: ParseNode) { + if (node.nodeType !== ParseNodeType.Module) { + return; + } + + const dunderAllInfo = AnalyzerNodeInfo.getDunderAllInfo(node); + if (!dunderAllInfo) { + return; + } + + const moduleScope = ScopeUtils.getScopeForNode(node); + if (!moduleScope) { + return; + } + + dunderAllInfo.stringNodes.forEach((stringNode) => { + if (stringNode.value !== this._symbolName) { + return; + } + + const symbolInScope = moduleScope.lookUpSymbolRecursive(stringNode.value); + if (!symbolInScope) { + return; + } + + if (!symbolInScope.symbol.getDeclarations().some((d) => this._resultsContainsDeclaration(d))) { + return; + } + + this._dunderAllNameNodes.add(stringNode); + }); + } + + private static _addIfUnique(declarations: Declaration[], itemToAdd: Declaration) { + for (const def of declarations) { + if (areDeclarationsSame(def, itemToAdd)) { + return; + } + } + + declarations.push(itemToAdd); + } + + private static _getDeclarationsForNode(node: NameNode, evaluator: TypeEvaluator): Declaration[] { + // This can handle symbols brought in by wildcard as long as declarations symbol collector + // compare against point to actual alias declaration, not one that use local name (ex, import alias) + if (node.parent?.nodeType !== ParseNodeType.ModuleName) { + let decls = evaluator.getDeclarationsForNameNode(node) || []; + + if (node.parent?.nodeType === ParseNodeType.ImportFromAs) { + // Make sure we get the decl for this specific from import statement + decls = decls.filter((d) => d.node === node.parent); + } + + // If we can't get decl, see whether we can get type from the node. + // Some might have synthesized type for the node such as subModule in import X.Y statement. + if (decls.length === 0) { + const type = evaluator.getType(node); + if (type?.category === TypeCategory.Module) { + // Synthesize decl for the module. + return [createSynthesizedAliasDeclaration(type.filePath)]; + } + } + + // We would like to make X in import X and import X.Y as Y to match, but path for + // X in import X and one in import X.Y as Y might not match since path in X.Y will point + // to X.Y rather than X if import statement has an alias. + // so, for such case, we put synthesized one so we can treat X in both statement same. + for (const aliasDecl of decls.filter((d) => isAliasDeclaration(d) && !d.loadSymbolsFromPath)) { + const node = (aliasDecl as AliasDeclaration).node; + if (node.nodeType === ParseNodeType.ImportFromAs) { + // from ... import X case, decl in the submodule fallback has the path. + continue; + } + + decls.push(...(evaluator.getDeclarationsForNameNode(node.module.nameParts[0]) || [])); + } + + return decls; + } + + // We treat module name special in find all references. so symbol highlight or rename on multiple files + // works even if it is not actually a symbol defined in the file. + const moduleName = node.parent; + if ( + moduleName.parent?.nodeType === ParseNodeType.ImportAs || + moduleName.parent?.nodeType === ParseNodeType.ImportFrom + ) { + const index = moduleName.nameParts.findIndex((n) => n === node); + + // Special case, first module name part. + if (index === 0) { + // 1. import X or from X import ... + let decls: Declaration[] = []; + + // ex, import X as x + const isImportAsWithAlias = + moduleName.nameParts.length === 1 && + moduleName.parent.nodeType === ParseNodeType.ImportAs && + !!moduleName.parent.alias; + + // if "import" has alias, symbol is assigned to alias, not the module. + const importName = isImportAsWithAlias + ? (moduleName.parent as ImportAsNode).alias!.value + : moduleName.nameParts[0].value; + + // First, we need to re-use "decls for X" binder has created + // so that it matches with decls type evaluator returns for "references for X". + // ex) import X or from .X import ... in init file and etc. + const symbolWithScope = ScopeUtils.getScopeForNode(node)?.lookUpSymbolRecursive(importName); + if (symbolWithScope && moduleName.nameParts.length === 1) { + decls.push(...symbolWithScope.symbol.getDeclarations().filter((d) => isAliasDeclaration(d))); + + // If symbols are re-used, then find one that belong to this import statement. + if (decls.length > 1) { + decls = decls.filter((d) => { + d = d as AliasDeclaration; + + if (d.firstNamePart !== undefined) { + // For multiple import statements with sub modules, decl can be re-used. + // ex) import X.Y and import X.Z or from .X import ... in init file. + // Decls for X will be reused for both import statements, and node will point + // to first import statement. For those case, use firstNamePart instead to check. + return d.firstNamePart === moduleName.nameParts[0].value; + } + + return d.node === moduleName.parent; + }); + } + + // ex, import X as x + // We have decls for the alias "x" not the module name "X". Convert decls for the "X" + if (isImportAsWithAlias) { + decls = getDeclarationsWithUsesLocalNameRemoved(decls); + } + } + + // But, also, we need to put decls for module names type evaluator synthesized so that + // we can match both "import X" and "from X import ..." + decls.push( + ...(evaluator + .getDeclarationsForNameNode(moduleName.nameParts[0]) + ?.filter((d) => isAliasDeclaration(d)) || []) + ); + + return decls; + } + + if (index > 0) { + // 2. import X.Y or from X.Y import .... + // For submodule "Y", we just use synthesized decls from type evaluator. + // Decls for these sub module don't actually exist in the system. Instead, symbol for Y in + // "import X.Y" hold onto synthesized module type (without any decl). + // And "from X.Y import ..." doesn't have any symbol associated module names. + // they can't be referenced in the module. + return evaluator.getDeclarationsForNameNode(moduleName.nameParts[index]) || []; + } + + return []; + } + + return []; } } diff --git a/packages/pyright-internal/src/languageService/documentSymbolProvider.ts b/packages/pyright-internal/src/languageService/documentSymbolProvider.ts index ccb219ca8543..6e598c1b5e4c 100644 --- a/packages/pyright-internal/src/languageService/documentSymbolProvider.ts +++ b/packages/pyright-internal/src/languageService/documentSymbolProvider.ts @@ -76,7 +76,7 @@ export function getIndexAliasData( /* resolveLocalNames */ true, /* allowExternallyHiddenAccess */ false ); - if (!resolvedInfo) { + if (!resolvedInfo || !resolvedInfo.declaration) { return undefined; } @@ -352,7 +352,7 @@ function collectSymbolIndexData( return; } - if (declaration.path.length <= 0) { + if (!declaration.loadSymbolsFromPath || declaration.path.length <= 0) { // If alias doesn't have a path to the original file, we can't do dedup // so ignore those aliases. // ex) asyncio.futures, asyncio.base_futures.futures and many will dedup @@ -411,15 +411,21 @@ function collectSymbolIndexDataForName( ); } + let aliasData: IndexAliasData | undefined = undefined; + if (DeclarationType.Alias === declaration.type) { + aliasData = getIndexAliasData(AnalyzerNodeInfo.getFileInfo(parseResults.parseTree)!.importLookup, declaration); + // If we can't create alias data for import alias, then don't include it in index. + if (!aliasData) { + return; + } + } + const data: IndexSymbolData = { name, externallyVisible, kind: symbolKind, itemKind: convertSymbolKindToCompletionItemKind(symbolKind), - alias: - DeclarationType.Alias === declaration.type - ? getIndexAliasData(AnalyzerNodeInfo.getFileInfo(parseResults.parseTree)!.importLookup, declaration) - : undefined, + alias: aliasData, range: options.indexingForAutoImportMode ? undefined : range, selectionRange: options.indexingForAutoImportMode ? undefined : selectionRange, children: options.indexingForAutoImportMode ? undefined : children, diff --git a/packages/pyright-internal/src/languageService/importAdder.ts b/packages/pyright-internal/src/languageService/importAdder.ts new file mode 100644 index 000000000000..a74fadc7f407 --- /dev/null +++ b/packages/pyright-internal/src/languageService/importAdder.ts @@ -0,0 +1,437 @@ +/* + * importAdder.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides code that figures out imports needed for symbols + * used in the given range and apply them later. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { getFileInfo } from '../analyzer/analyzerNodeInfo'; +import { + Declaration, + isAliasDeclaration, + isClassDeclaration, + isFunctionDeclaration, + isParameterDeclaration, + isVariableDeclaration, + ModuleLoaderActions, +} from '../analyzer/declaration'; +import { + createSynthesizedAliasDeclaration, + getNameFromDeclaration, + isDefinedInFile, +} from '../analyzer/declarationUtils'; +import { ImportResolver } from '../analyzer/importResolver'; +import { + getRelativeModuleName, + getTextEditsForAutoImportInsertions, + getTextEditsForAutoImportSymbolAddition, + getTopLevelImports, + ImportNameInfo, + ImportNameWithModuleInfo, + ImportStatements, +} from '../analyzer/importStatementUtils'; +import { + getDottedName, + getDottedNameWithGivenNodeAsLastName, + isLastNameOfDottedName, +} from '../analyzer/parseTreeUtils'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { ScopeType } from '../analyzer/scope'; +import { getScopeForNode } from '../analyzer/scopeUtils'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { throwIfCancellationRequested } from '../common/cancellationUtils'; +import { addIfUnique, createMapFromItems, getOrAdd, removeArrayElements } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { TextEditAction } from '../common/editAction'; +import { getDirectoryPath } from '../common/pathUtils'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { TextRange } from '../common/textRange'; +import { ModuleNameNode, NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { ParseResults } from '../parser/parser'; + +export interface ImportData { + containsUnreferenceableSymbols: boolean; + declarations: Map; +} + +export class ImportAdder { + constructor( + private _configOptions: ConfigOptions, + private _importResolver: ImportResolver, + private _evaluator: TypeEvaluator + ) {} + + collectImportsForSymbolsUsed(parseResults: ParseResults, range: TextRange, token: CancellationToken): ImportData { + const collector = new NameCollector(this._evaluator, parseResults, range, token); + collector.walk(parseResults.parseTree); + + return { + containsUnreferenceableSymbols: collector.containsUnreferenceableSymbols, + declarations: collector.declsForSymbols, + }; + } + + applyImports( + result: ImportData, + parseResults: ParseResults, + insertionPosition: number, + token: CancellationToken + ): TextEditAction[] { + throwIfCancellationRequested(token); + + const filePath = getFileInfo(parseResults.parseTree).filePath; + const importStatements = getTopLevelImports(parseResults.parseTree); + const execEnv = this._configOptions.findExecEnvironment(filePath); + + const importNameInfo: ImportNameWithModuleInfo[] = []; + for (const decl of result.declarations.keys() ?? []) { + const importInfo = this._getImportInfo(decl, filePath); + if (!importInfo) { + continue; + } + + const moduleAndType = this._importResolver.getModuleNameForImport(importInfo.filePath, execEnv); + if (!moduleAndType.moduleName) { + if (!importInfo.nameInfo.name) { + continue; + } + + // module can't be addressed by absolute path in "from import" statement. + // ex) namespace package at [workspace root] or [workspace root]\__init__.py(i) + // use relative path + moduleAndType.moduleName = getRelativeModuleName( + this._importResolver.fileSystem, + filePath, + importInfo.filePath + ); + } + + addIfUnique( + importNameInfo, + { module: moduleAndType, name: importInfo.nameInfo.name, alias: importInfo.nameInfo.alias }, + (a, b) => this._areSame(a, b) + ); + } + + const edits: TextEditAction[] = []; + const newNameInfo: ImportNameWithModuleInfo[] = []; + for (const moduleAndInfo of createMapFromItems(importNameInfo, (i) => i.module.moduleName)) { + if (!this._tryProcessExistingImports(moduleAndInfo, importStatements, parseResults, edits)) { + newNameInfo.push(...moduleAndInfo[1]); + continue; + } + } + + edits.push( + ...getTextEditsForAutoImportInsertions( + newNameInfo, + importStatements, + parseResults, + convertOffsetToPosition(insertionPosition, parseResults.tokenizerOutput.lines) + ) + ); + + return edits; + } + + private _tryProcessExistingImports( + moduleAndInfo: [string, ImportNameWithModuleInfo[]], + importStatements: ImportStatements, + parseResults: ParseResults, + edits: TextEditAction[] + ) { + for (const kindAndImports of createMapFromItems( + importStatements.orderedImports.filter((i) => i.moduleName === moduleAndInfo[0]), + (i) => (i.node.nodeType === ParseNodeType.Import ? 'import' : 'importFrom') + )) { + if (kindAndImports[0] === 'importFrom') { + // We can't merge to "from module import *" statement. + const imported = kindAndImports[1].filter( + (i) => i.node.nodeType === ParseNodeType.ImportFrom && !i.node.isWildcardImport + ); + if (imported.length === 0) { + // No regular from import statement. + continue; + } + + // get name info that don't exist in any of existing import statements. + const info = moduleAndInfo[1].filter( + (m) => + !imported.some( + (n) => + n.node.nodeType === ParseNodeType.ImportFrom && + n.node.imports.some((i) => i.name.value === m.name && i.alias?.value === m.alias) + ) + ); + edits.push(...getTextEditsForAutoImportSymbolAddition(info, imported[0], parseResults)); + return true; + } + + if (kindAndImports[0] === 'import') { + // import statement already exists. skip those module info. + removeArrayElements( + moduleAndInfo[1], + (i) => !i.name && kindAndImports[1].some((n) => i.alias === n.subnode?.alias?.value) + ); + continue; + } + } + + return false; + } + + private _getImportInfo( + decl: Declaration, + destFilePath: string + ): { filePath: string; nameInfo: ImportNameInfo } | undefined { + if (isAliasDeclaration(decl)) { + if (!decl.node) { + // This is synthesized decl for implicit module case such as "import a.b" + return { filePath: decl.path, nameInfo: {} }; + } + + if (decl.node.nodeType === ParseNodeType.ImportAs) { + const importDecl = this._evaluator.getDeclarationsForNameNode( + decl.node.module.nameParts[decl.node.module.nameParts.length - 1] + ); + + if (!importDecl || importDecl.length === 0) { + // We have no idea where it came from. + // ex) from unknown import unknown + return undefined; + } + + return { + filePath: importDecl[0].path, + nameInfo: { alias: decl.usesLocalName ? decl.node.alias?.value : undefined }, + }; + } + + if (decl.node.nodeType === ParseNodeType.ImportFromAs) { + let path: string | undefined = decl.path; + if (!path) { + // Check submodule case with no __init__ + if (decl.submoduleFallback) { + path = getDirectoryPath(decl.submoduleFallback.path); + } + } + + if (!path) { + // We have no idea where it came from. + // ex) from unknown import unknown + return undefined; + } + + if (path === destFilePath && !decl.usesLocalName && !decl.submoduleFallback) { + // Don't create import for the symbol (not module) defined in the current file + // unless alias is used. + // + // We don't check insertion point since we don't create type alias for decl defined later + // anyway. but in future, we could consider either rewrite or creating type alias for symbols + // defined after insertion point. + return undefined; + } + + return { + filePath: path, + nameInfo: { + name: decl.symbolName, + alias: decl.usesLocalName ? decl.node.alias?.value : undefined, + }, + }; + } + + if (decl.node.nodeType === ParseNodeType.ImportFrom) { + return { + filePath: decl.path, + nameInfo: { name: decl.symbolName }, + }; + } + } + + if (isVariableDeclaration(decl) || isFunctionDeclaration(decl) || isClassDeclaration(decl)) { + const name = getNameFromDeclaration(decl); + if (!name) { + return undefined; + } + + return { + filePath: decl.path, + nameInfo: { name }, + }; + } + + return undefined; + } + + private _areSame(a: ImportNameWithModuleInfo, b: ImportNameWithModuleInfo) { + return ( + a.alias === b.alias && + a.name === b.name && + a.module.importType === b.module.importType && + a.module.isLocalTypingsFile === b.module.isLocalTypingsFile && + a.module.moduleName === b.module.moduleName + ); + } +} + +class NameCollector extends ParseTreeWalker { + private readonly _filePath: string; + + // Hold onto names that we need to move imports. + readonly declsForSymbols = new Map(); + containsUnreferenceableSymbols = false; + + constructor( + private _evaluator: TypeEvaluator, + private _parseResults: ParseResults, + private _range: TextRange, + private _token: CancellationToken + ) { + super(); + + this._filePath = getFileInfo(this._parseResults.parseTree).filePath; + + // For now, we assume the given range is at right boundary such as statement, statements, expression or expressions. + // In future, we might consider validating the range and adjusting it to the right boundary if needed. + } + + override walk(node: ParseNode) { + if (!TextRange.overlapsRange(this._range, node)) { + return; + } + + super.walk(node); + } + + override visitModuleName(node: ModuleNameNode) { + // We only care about references to module symbols. not decls. + return false; + } + + override visitName(name: NameNode) { + throwIfCancellationRequested(this._token); + + // We process dotted name as a whole rather than + // process each part of dotted name. + if (!isLastNameOfDottedName(name)) { + return false; + } + + const dottedName = getDottedName(getDottedNameWithGivenNodeAsLastName(name)); + if (!dottedName) { + // Not dotted name + // ex) foo().[var] + return false; + } + + // See whether the first dotted name bound to symbols defined in current file. + const firstName = dottedName[0]; + const firstNameDecls = this._getDeclarationsInModule(firstName); + if (!firstNameDecls || firstNameDecls.length === 0) { + return false; + } + + // Simple case. + // ex) import os + // [os] + if (dottedName.length === 1) { + this._handleName(firstName, firstNameDecls); + return false; + } + + for (const firstNameDecl of firstNameDecls) { + if (!isAliasDeclaration(firstNameDecl) || firstNameDecl.node.nodeType !== ParseNodeType.ImportAs) { + // decls we have is for symbols defined in current module. + // ex) [foo]() + this._handleName(firstName, [firstNameDecl]); + continue; + } + + // Import with alias + // ex) import json.encoder as j + if (firstNameDecl.usesLocalName) { + this._handleName(firstName, [firstNameDecl]); + continue; + } + + // Special casing import statement with sub module ex) import a.[b] + // It is complex for import a.[b] case since decl for [b] doesn't exist. so + // when binding a.[b].foo(), we don't get decl for "import a.[b]", we need to + // do some tree walk to find import a.[b] and synthesize decl for it. + this._handleImplicitImports(firstNameDecl, dottedName, 1); + } + + return false; + } + + private _getDeclarationsInModule(name: NameNode) { + return this._evaluator.getDeclarationsForNameNode(name)?.filter((d) => isDefinedInFile(d, this._filePath)); + } + + private _handleImplicitImports( + aliasDecl: { path: string; implicitImports?: Map }, + dottedName: NameNode[], + nameIndex: number + ) { + if (dottedName.length === nameIndex) { + return; + } + + if (!aliasDecl.implicitImports) { + this._handleName(dottedName[nameIndex - 1], [createSynthesizedAliasDeclaration(aliasDecl.path)]); + return; + } + + const implicitImportDecl = aliasDecl.implicitImports.get(dottedName[nameIndex].value); + if (!implicitImportDecl) { + this._handleName(dottedName[nameIndex - 1], [createSynthesizedAliasDeclaration(aliasDecl.path)]); + return; + } + + this._handleImplicitImports(implicitImportDecl, dottedName, nameIndex + 1); + } + + private _handleName(name: NameNode, decls: Declaration[]) { + for (const decl of decls) { + if (decl.node && TextRange.containsRange(this._range, decl.node)) { + // Make sure our range doesn't already contain them. + continue; + } + + if (isParameterDeclaration(decl)) { + // Parameter is not referenceable from import statement. + this.containsUnreferenceableSymbols = true; + continue; + } + + if (isVariableDeclaration(decl) || isFunctionDeclaration(decl) || isClassDeclaration(decl)) { + // For now, we will allow private variable to be referenced by import + // so that user can fix it up once import is added. + + // We only support top level variables. + const scope = getScopeForNode(name); + if (!scope) { + this.containsUnreferenceableSymbols = true; + continue; + } + + const result = scope.lookUpSymbolRecursive(name.value); + if (!result || result.scope.type !== ScopeType.Module) { + this.containsUnreferenceableSymbols = true; + continue; + } + } + + this._addName(decl, name); + } + } + + private _addName(decl: Declaration, name: NameNode) { + getOrAdd(this.declsForSymbols, decl, () => []).push(name); + } +} diff --git a/packages/pyright-internal/src/languageService/indentationUtils.ts b/packages/pyright-internal/src/languageService/indentationUtils.ts new file mode 100644 index 000000000000..966062f19815 --- /dev/null +++ b/packages/pyright-internal/src/languageService/indentationUtils.ts @@ -0,0 +1,812 @@ +/* + * indentationUtils.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Provides code to get indentation and re-indent code to the given indentation. + */ + +import Char from 'typescript-char'; + +import { + findNodeByOffset, + getFirstAncestorOrSelf, + getFirstAncestorOrSelfOfKind, + getStringValueRange, + getTokenAt, + isDocString, +} from '../analyzer/parseTreeUtils'; +import { convertOffsetToPosition, convertTextRangeToRange } from '../common/positionUtils'; +import { Range, TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { ModuleNode, ParseNode, ParseNodeType, SuiteNode } from '../parser/parseNodes'; +import { ParseResults } from '../parser/parser'; +import { defaultTabSize } from '../parser/tokenizer'; +import { + IndentToken, + KeywordToken, + KeywordType, + StringToken, + StringTokenFlags, + Token, + TokenType, +} from '../parser/tokenizerTypes'; + +interface TokenInfo extends TextRange { + range: Range; + text: string; + + kind: 'comment' | 'string' | 'token'; + firstTokenOnLine: boolean; + multilineDocComment: boolean; +} + +export function getIndentation(parseResults: ParseResults, offset: number, preferDedent?: boolean): number { + // ex) + // a = """ + // | <= here + const strIndent = _tryHandleStringLiterals(parseResults, offset); + if (strIndent !== undefined) { + return strIndent; + } + + // ex) + // a = 1 + \ + // | <= here + // or + // a = (1 + + // | <= here + const exprIndent = _tryHandleMultilineConstructs(parseResults, offset); + if (exprIndent !== undefined) { + return exprIndent; + } + + preferDedent = preferDedent ?? _shouldDedentAfterKeyword(parseResults.tokenizerOutput.tokens, offset); + return Math.max(_getIndentation(parseResults, offset, preferDedent).indentation, 0); +} + +export function reindentSpan( + parseResults: ParseResults, + span: TextRange, + indentation: number, + indentFirstToken = true +) { + let indentDelta = 0; + const texts: string[] = []; + + // Currently _convertTokenStreams converts text in the span as whitespace and non whitespace + // and then this function puts those back to string with reidentation if needed. + // + // Another approach we can take is converting the text in 2 chunks that require reindentation and not + // and process chunks that require reindentation line by line (like how it currently does for + // multiline doc comments) and put chunks that don't require reindentation as it is. + const tokenInfo = _convertTokenStreams(parseResults, span); + let previousInfo = tokenInfo[0]; + + indentDelta = + indentation - + _getIndentationFromText(parseResults, previousInfo.range.start.line, previousInfo.range.start.character) + .indentation; + + if (previousInfo.multilineDocComment) { + texts.push(..._reindentLinesFromText(parseResults, previousInfo, indentDelta)); + } else { + if (indentFirstToken) { + texts.push(_createIndentationString(parseResults, indentation)); + } + + texts.push(previousInfo.text); + } + + for (let i = 1; i < tokenInfo.length; i++) { + const info = tokenInfo[i]; + if (info.firstTokenOnLine) { + texts.push( + parseResults.tokenizerOutput.predominantEndOfLineSequence.repeat( + info.range.start.line - previousInfo.range.end.line + ) + ); + + if (info.multilineDocComment) { + texts.push(..._reindentLinesFromText(parseResults, info, indentDelta)); + } else { + // Put indentation for the first token on the line. + texts.push( + _createIndentationString( + parseResults, + Math.max( + 0, + _getIndentationFromText(parseResults, info.range.start.line, info.range.start.character) + .indentation + indentDelta + ) + ) + ); + texts.push(info.text); + } + } else { + // Put whitespace between 2 tokens on same line + // token1[space]token2 + texts.push(' '.repeat(info.range.start.character - previousInfo.range.end.character)); + texts.push(info.text); + } + + previousInfo = info; + } + + return texts.join(''); +} + +function _getIndentation( + parseResults: ParseResults, + offset: number, + preferDedent: boolean +): { token?: Token; indentation: number } { + const tokens = parseResults.tokenizerOutput.tokens; + const startingToken = _findPreviousNonWhitespaceToken(tokens, offset); + if (!startingToken) { + return { + indentation: 0, + }; + } + + const node = findNodeByOffset(parseResults.parseTree, TextRange.getEnd(startingToken)); + if (!node) { + return { + indentation: 0, + }; + } + + const suite = getFirstAncestorOrSelfOfKind(node, ParseNodeType.Suite); + if (!suite) { + return _getIndentationForNode(parseResults, parseResults.parseTree, node); + } + + const suiteSpan = convertTextRangeToRange(suite, parseResults.tokenizerOutput.lines); + if (preferDedent || suiteSpan.start.line === suiteSpan.end.line) { + // Go one more level up. + const outerContainer = getFirstAncestorOrSelf( + suite, + (n) => n !== suite && n.nodeType === ParseNodeType.Suite + ) as SuiteNode | undefined; + return _getIndentationForNode(parseResults, outerContainer ?? parseResults.parseTree, suite); + } + + return _getIndentationForNode(parseResults, suite, node); +} + +function _getIndentationForNode( + parseResults: ParseResults, + container: ModuleNode | SuiteNode, + current: ParseNode +): { token?: Token; indentation: number } { + if (container.nodeType === ParseNodeType.Module) { + // It is at the module level + return { + token: _getFirstTokenOFStatement(parseResults, container, current), + indentation: 0, + }; + } + + if (_containsNoIndentBeforeFirstStatement(parseResults, container)) { + const tabSize = _getTabSize(parseResults); + const outerContainer = getFirstAncestorOrSelf( + container, + (n) => n !== container && n.nodeType === ParseNodeType.Suite + ) as SuiteNode | undefined; + + const result = _getIndentationForNode(parseResults, outerContainer ?? parseResults.parseTree, container); + return { + token: result.token, + indentation: result.indentation + tabSize, + }; + } else { + const tokens = parseResults.tokenizerOutput.tokens; + return { + token: _getFirstTokenOFStatement(parseResults, container, current), + indentation: _getIndentationFromIndentToken(tokens, tokens.getItemAtPosition(container.start)), + }; + } +} + +function _containsNoIndentBeforeFirstStatement(parseResults: ParseResults, suite: SuiteNode): boolean { + if (suite.statements.filter((s) => s.length > 0).length === 0) { + // There is no statement in the suite. + // ex) + // def foo(): + // | <= here + return true; + } + + // If suite contains no indent before first statement, then consider user is in the middle of writing block + // and parser is in broken state. + // ex) + // def foo(): + // while True: + // | <= here + // def bar(): + // pass + // + // parser will think "def bar" belongs to "while True" with invalid indentation. + const tokens = parseResults.tokenizerOutput.tokens; + const start = tokens.getItemAtPosition(suite.start); + const end = tokens.getItemAtPosition(suite.statements[0].start); + + for (let i = start; i <= end; i++) { + const token = _getTokenAtIndex(tokens, i); + if (token?.type === TokenType.Indent) { + return false; + } + } + + return true; +} + +function _getFirstTokenOFStatement( + parseResults: ParseResults, + container: ModuleNode | SuiteNode, + span: TextRange +): Token { + const tokens = parseResults.tokenizerOutput.tokens; + for (const statement of container.statements) { + if (!TextRange.containsRange(statement, span)) { + continue; + } + + return getTokenAt(tokens, statement.start)!; + } + + return getTokenAt(tokens, container.start)!; +} + +function _getIndentationFromIndentToken(tokens: TextRangeCollection, index: number) { + for (let i = index; i < tokens.count; i++) { + const token = _getTokenAtIndex(tokens, i); + if (token?.type === TokenType.Indent) { + return (token as IndentToken).indentAmount; + } + } + + // At the module level. + return 0; +} + +function _tryHandleMultilineConstructs(parseResults: ParseResults, offset: number): number | undefined { + const tokens = parseResults.tokenizerOutput.tokens; + + // Make sure we use next token to get line delta. + // This is just to handle how tokenizer associates new lines to which token. + // ex) a = 1 + \ + // | <= here + // [b] = 2 + const index = _findNextTokenIndex(tokens, offset); + if (index < 0) { + return undefined; + } + + const lines = parseResults.tokenizerOutput.lines; + const tabSize = _getTabSize(parseResults); + + for (let i = index; i > 0; i--) { + const token = _getTokenAtIndex(tokens, i)!; + if (TextRange.getEnd(token) < offset) { + return undefined; + } + + const previousToken = _getTokenAtIndex(tokens, i - 1)!; + const tokenSpan = token ? convertTextRangeToRange(token, lines) : undefined; + const previousTokenSpan = previousToken ? convertTextRangeToRange(previousToken, lines) : undefined; + + if ( + tokenSpan && + previousTokenSpan && + previousTokenSpan.end.line < tokenSpan.start.line && + previousToken!.type !== TokenType.NewLine + ) { + const indentationResult = _getIndentation(parseResults, previousToken!.start, /*preferDedent*/ false); + const currentPosition = convertOffsetToPosition(offset, lines); + + // Handle multiline constructs (explicit or implicit) + // ex) def foo \ + // | <= here + // or + // i = \ + // \ + // | <= here + // or + // a = ( + // | <= here + const lineDelta = + currentPosition.line - + (indentationResult.token + ? convertOffsetToPosition(indentationResult.token.start, lines).line + : previousTokenSpan.start.line); + + const indentation = _getFirstNonBlankLineIndentationFromText( + parseResults, + currentPosition.line, + previousTokenSpan.start.line + ); + + return indentation + (lineDelta === 1 ? tabSize : 0); + } + } + + return undefined; +} + +function _tryHandleStringLiterals(parseResults: ParseResults, offset: number): number | undefined { + const tokens = parseResults.tokenizerOutput.tokens; + const index = tokens.getItemAtPosition(offset); + if (index < 0) { + return undefined; + } + + const token = _findStringToken(tokens, index); + if (!token || token.type !== TokenType.String) { + return undefined; + } + + const stringToken = token as StringToken; + if (!(stringToken.flags & StringTokenFlags.Triplicate)) { + // We only care """ string literal + return undefined; + } + + if ( + !(stringToken.flags & StringTokenFlags.Unterminated) && + !TextRange.contains(getStringValueRange(stringToken), offset) + ) { + // ex) We only support these 2 cases. + // """ + // | <= here + // or + // """ + // | <= here + // """ + return undefined; + } + + const lines = parseResults.tokenizerOutput.lines; + const begin = convertOffsetToPosition(token.start, lines); + const current = convertOffsetToPosition(offset, lines); + + return _getFirstNonBlankLineIndentationFromText(parseResults, current.line, begin.line); +} + +function _getFirstNonBlankLineIndentationFromText(parseResults: ParseResults, currentLine: number, endingLine: number) { + endingLine = Math.max(endingLine, 0); + for (let i = currentLine; i >= endingLine; i--) { + const result = _getIndentationFromText(parseResults, i); + + if (!_isBlankLine(parseResults, i, result.charOffset)) { + // Not blank line. + // ex) [indentation]i = 1 + return result.indentation; + } + } + + return _getIndentationFromText(parseResults, endingLine).indentation; +} + +function _findStringToken(tokens: TextRangeCollection, index: number): Token | undefined { + const token = _findPreviousNonWhitespaceTokenFromIndex(tokens, index); + if (!token) { + return undefined; + } + + return token.type === TokenType.String ? token : undefined; +} + +function _findPreviousNonWhitespaceToken(tokens: TextRangeCollection, offset: number): Token | undefined { + const index = tokens.getItemAtPosition(offset); + if (index < 0) { + return undefined; + } + + return _findPreviousNonWhitespaceTokenFromIndex(tokens, index); +} + +function _findPreviousNonWhitespaceTokenFromIndex( + tokens: TextRangeCollection, + index: number +): Token | undefined { + for (let i = index; i >= 0; i--) { + const token = _getTokenAtIndex(tokens, i); + if (!token) { + break; + } + + if (_isWhitespaceToken(token.type)) { + continue; + } + + return token; + } + + return undefined; +} + +function _findNextTokenIndex(tokens: TextRangeCollection, offset: number): number { + const index = tokens.getItemAtPosition(offset); + if (index < 0) { + return index; + } + + for (let i = index + 1; i < tokens.count; i++) { + const token = _getTokenAtIndex(tokens, i); + if (token?.type === TokenType.Dedent || token?.type === TokenType.NewLine) { + continue; + } + + return i; + } + + return tokens.count - 1; +} + +function _getTokenAtIndex(tokens: TextRangeCollection, index: number) { + if (index < 0) { + return undefined; + } + + return tokens.getItemAt(index); +} + +function _shouldDedentAfterKeyword(tokens: TextRangeCollection, offset: number) { + // Keeping the PTVS smart indenter behavior. + // For now, we won't include all small statements that can put at single line. + // See parser.ts to see all small statements or see python grammar. + // ex) def foo(): pass + const index = tokens.getItemAtPosition(offset); + if (index < 0) { + return false; + } + + for (let i = index; i >= 0; i--) { + const token = _getTokenAtIndex(tokens, i); + if (!token) { + return false; + } + + switch (token.type) { + case TokenType.Dedent: + case TokenType.NewLine: + case TokenType.EndOfStream: + continue; + + case TokenType.Keyword: { + const previousToken = _getTokenAtIndex(tokens, i - 1); + if (previousToken?.type === TokenType.Colon) { + // Not for single line construct. + // ex) def foo(): pass + return false; + } + + const keyword = token as KeywordToken; + return ( + keyword.keywordType === KeywordType.Pass || + keyword.keywordType === KeywordType.Return || + keyword.keywordType === KeywordType.Break || + keyword.keywordType === KeywordType.Continue || + keyword.keywordType === KeywordType.Raise + ); + } + + default: + return false; + } + } + + return false; +} + +function _isBlankLine(parseResults: ParseResults, line: number, charOffset: number) { + const endingLength = _getLineEndingLength(parseResults, line); + const lineSpan = parseResults.tokenizerOutput.lines.getItemAt(line); + + return charOffset === lineSpan.length - endingLength; +} + +function _getLineEndingLength(parseResults: ParseResults, line: number) { + let length = 0; + const range = parseResults.tokenizerOutput.lines.getItemAt(line); + + for (let i = range.length - 1; i >= 0; i--) { + const charCode = parseResults.text.charCodeAt(range.start + i); + switch (charCode) { + case Char.FormFeed: + case Char.Hash: + case Char.LineFeed: + case Char.CarriageReturn: + length++; + break; + + default: + return length; + } + } + + return length; +} + +function _getIndentationFromText( + parseResults: ParseResults, + line: number, + uptoLineOffset?: number +): { indentation: number; charOffset: number } { + let indentation = 0; + let charOffset = 0; + + const tabSize = _getTabSize(parseResults); + const range = parseResults.tokenizerOutput.lines.getItemAt(line); + for (let i = 0; i < range.length; i++) { + const charCode = parseResults.text.charCodeAt(range.start + i); + switch (charCode) { + case Char.Space: + charOffset++; + indentation++; + break; + + case Char.Tab: + charOffset++; + indentation += tabSize; + break; + + default: + if (!uptoLineOffset || uptoLineOffset === i) { + return { + charOffset, + indentation, + }; + } + + // calculate indentation upto line offset given. + charOffset++; + indentation++; + } + } + + return { + charOffset, + indentation, + }; +} + +function _convertTokenStreams(parseResults: ParseResults, span: TextRange) { + // Existing token stream contains text and whitespace mixed, making it difficult + // to process for re-indentation. This will convert those to strictly text and whitespace. + const tokens = parseResults.tokenizerOutput.tokens; + + let startIndex = Math.max(tokens.getItemAtPosition(span.start), 0); + const startToken = _getTokenAtIndex(tokens, startIndex)!; + if (TextRange.getEnd(startToken) < span.start) { + // ex) firstToken | <= span start. + startIndex++; + } + + let endIndex = Math.min(tokens.getItemAtPosition(TextRange.getEnd(span)), tokens.length - 1); + const endToken = _getTokenAtIndex(tokens, endIndex)!; + if (TextRange.getEnd(span) < endToken.start) { + // ex) |< = span end [endToken] + endIndex--; + } + + const tokenInfoArray: TokenInfo[] = []; + const lines = parseResults.tokenizerOutput.lines; + + for (let i = startIndex; i <= endIndex; i++) { + const token = _getTokenAtIndex(tokens, i)!; + + if (token.comments) { + for (const comment of token.comments) { + tokenInfoArray.push({ + start: comment.start, + length: comment.length, + range: convertTextRangeToRange(comment, lines), + text: comment.value, + + kind: 'comment', + firstTokenOnLine: false, + multilineDocComment: false, + }); + } + } + + if (_isWhitespaceToken(token.type) || token.length === 0) { + continue; + } + + tokenInfoArray.push({ + start: token.start, + length: token.length, + range: convertTextRangeToRange(token, lines), + text: parseResults.text.substr(token.start, token.length), + + kind: token.type === TokenType.String ? 'string' : 'token', + firstTokenOnLine: false, + multilineDocComment: false, + }); + } + + if (tokenInfoArray.length === 0) { + return tokenInfoArray; + } + + tokenInfoArray.sort((a, b) => a.start - b.start); + + // Handle text in whitespace that is not part of token stream. + let previousInfo = tokenInfoArray[0]; + const additionalTokens: TokenInfo[] = []; + for (let i = 1; i < tokenInfoArray.length; i++) { + const info = tokenInfoArray[i]; + + // Another approach is just blindly go through the range looking for + // non whitespace char rather than looking for specific cases like below. + if (previousInfo.kind !== 'comment') { + for ( + let whitespaceLine = previousInfo.range.end.line; + whitespaceLine < info.range.start.line; + whitespaceLine++ + ) { + const lineTextRange = lines.getItemAt(whitespaceLine); + const lastCharOffset = lineTextRange.length - _getLineEndingLength(parseResults, whitespaceLine) - 1; + if (lastCharOffset >= 0) { + // ex) i = 1 \ <= explicit multiline construct + // + + // 2 + const start = lineTextRange.start + lastCharOffset; + _addTokenInfoIfMatch(parseResults, start, start + 1, Char.Backslash, additionalTokens); + } + } + } + + if (info.kind === 'comment') { + const start = + previousInfo.range.end.line === info.range.start.line + ? TextRange.getEnd(previousInfo) + : lines.getItemAt(info.range.start.line).start; + + // ex) token [#] comment + _addTokenInfoIfMatch(parseResults, start, info.start, Char.Hash, additionalTokens); + } + + previousInfo = info; + } + + tokenInfoArray.push(...additionalTokens); + tokenInfoArray.sort((a, b) => a.start - b.start); + + // Update firstTokenOnLine and multilineDocComment + previousInfo = tokenInfoArray[0]; + + if (startIndex === 0) { + // It is the first token in the file. + previousInfo.firstTokenOnLine = true; + } else { + const previousToken = _findPreviousNonWhitespaceTokenFromIndex(tokens, startIndex - 1)!; + const previousEnd = convertOffsetToPosition(TextRange.getEnd(previousToken), lines); + previousInfo.firstTokenOnLine = previousEnd.line !== previousInfo.range.start.line; + } + + previousInfo.multilineDocComment = _isMultilineDocComment(parseResults, previousInfo); + + for (let i = 1; i < tokenInfoArray.length; i++) { + const info = tokenInfoArray[i]; + + info.firstTokenOnLine = previousInfo.range.end.line !== info.range.start.line; + info.multilineDocComment = _isMultilineDocComment(parseResults, info); + + previousInfo = info; + } + + return tokenInfoArray; +} + +function _addTokenInfoIfMatch( + parseResults: ParseResults, + start: number, + end: number, + charCode: number, + tokens: TokenInfo[] +) { + for (let i = start; i < end; i++) { + if (parseResults.text.charCodeAt(i) === charCode) { + tokens.push({ + start: i, + length: 1, + range: convertTextRangeToRange(TextRange.create(i, 1), parseResults.tokenizerOutput.lines), + text: String.fromCharCode(charCode), + + kind: 'token', + firstTokenOnLine: false, + multilineDocComment: false, + }); + } + } +} + +function _isWhitespaceToken(type: TokenType): boolean { + switch (type) { + case TokenType.Dedent: + case TokenType.NewLine: + case TokenType.Indent: + case TokenType.EndOfStream: + return true; + + default: + return false; + } +} + +function _isMultilineDocComment(parseResults: ParseResults, info: TokenInfo) { + if (info.kind !== 'string' || !info.firstTokenOnLine || info.range.start.line === info.range.end.line) { + return false; + } + + const node = findNodeByOffset(parseResults.parseTree, info.start); + if ( + node?.nodeType !== ParseNodeType.String || + node.parent?.nodeType !== ParseNodeType.StringList || + node.parent.parent?.nodeType !== ParseNodeType.StatementList + ) { + return false; + } + + return isDocString(node.parent.parent); +} + +function _reindentLinesFromText(parseResults: ParseResults, info: TokenInfo, indentDelta: number) { + const texts: string[] = []; + for (let i = info.range.start.line; i <= info.range.end.line; i++) { + texts.push(_reindentLineFromText(parseResults, i, indentDelta, i === info.range.end.line ? info : undefined)); + } + + return texts; +} + +function _reindentLineFromText( + parseResults: ParseResults, + line: number, + indentDelta: number, + range?: TextRange +): string { + const result = _getIndentationFromText(parseResults, line); + if (_isBlankLine(parseResults, line, result.charOffset)) { + return ''; + } + + let lineRange = parseResults.tokenizerOutput.lines.getItemAt(line); + if (range) { + lineRange = TextRange.fromBounds( + lineRange.start, + Math.min(TextRange.getEnd(range), TextRange.getEnd(lineRange)) + ); + } + + const text = parseResults.text.substr(lineRange.start + result.charOffset, lineRange.length - result.charOffset); + return _createIndentationString(parseResults, Math.max(result.indentation + indentDelta, 0)) + text; +} + +function _getTabSize(parseResults: ParseResults) { + const tab = parseResults.tokenizerOutput.predominantTabSequence; + const tabLength = tab.length; + if (tabLength === 1 && tab.charCodeAt(0) === Char.Tab) { + // Tokenizer will use 8 for Char.Tab and put that info in indentToken's indent size. + return defaultTabSize; + } + + return tabLength; +} + +function _createIndentationString(parseResults: ParseResults, indentation: number) { + const tab = parseResults.tokenizerOutput.predominantTabSequence; + const tabLength = tab.length; + if (tabLength === 1 && tab.charCodeAt(0) === Char.Tab) { + const spaceCount = indentation % defaultTabSize; + const tabCount = (indentation - spaceCount) / defaultTabSize; + + return '\t'.repeat(tabCount) + ' '.repeat(spaceCount); + } + + return ' '.repeat(indentation); +} diff --git a/packages/pyright-internal/src/languageService/quickActions.ts b/packages/pyright-internal/src/languageService/quickActions.ts index 7cb1595ee2b0..23f6c0f41455 100644 --- a/packages/pyright-internal/src/languageService/quickActions.ts +++ b/packages/pyright-internal/src/languageService/quickActions.ts @@ -85,16 +85,20 @@ function _addMissingOptionalToParam( const importStatement = importStatements.orderedImports.find((imp) => imp.moduleName === 'typing'); // If there's an existing import statement, insert into it. - if (importStatement && importStatement.node.nodeType === ParseNodeType.ImportFrom) { + if ( + importStatement && + importStatement.node.nodeType === ParseNodeType.ImportFrom && + !importStatement.node.isWildcardImport + ) { const additionalEditActions = getTextEditsForAutoImportSymbolAddition( - 'Optional', + { name: 'Optional' }, importStatement, parseResults ); editActions.push(...additionalEditActions); } else { const additionalEditActions = getTextEditsForAutoImportInsertion( - 'Optional', + { name: 'Optional' }, importStatements, 'typing', ImportGroup.BuiltIn, diff --git a/packages/pyright-internal/src/languageService/referencesProvider.ts b/packages/pyright-internal/src/languageService/referencesProvider.ts index 9e81b556d4f5..ff346b9e98f4 100644 --- a/packages/pyright-internal/src/languageService/referencesProvider.ts +++ b/packages/pyright-internal/src/languageService/referencesProvider.ts @@ -10,19 +10,17 @@ import { CancellationToken } from 'vscode-languageserver'; -import * as AnalyzerNodeInfo from '../analyzer/analyzerNodeInfo'; import { Declaration } from '../analyzer/declaration'; -import * as DeclarationUtils from '../analyzer/declarationUtils'; import * as ParseTreeUtils from '../analyzer/parseTreeUtils'; -import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; -import { isStubFile, SourceMapper } from '../analyzer/sourceMapper'; +import { SourceMapper } from '../analyzer/sourceMapper'; import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; import { throwIfCancellationRequested } from '../common/cancellationUtils'; import { convertOffsetToPosition, convertPositionToOffset } from '../common/positionUtils'; import { DocumentRange, Position } from '../common/textRange'; import { TextRange } from '../common/textRange'; -import { ModuleNameNode, NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; +import { NameNode, ParseNode, ParseNodeType } from '../parser/parseNodes'; import { ParseResults } from '../parser/parser'; +import { DocumentSymbolCollector } from './documentSymbolCollector'; export type ReferenceCallback = (locations: DocumentRange[]) => void; @@ -54,9 +52,7 @@ export class ReferencesResult { } } -export class FindReferencesTreeWalker extends ParseTreeWalker { - private readonly _locationsFound: DocumentRange[] = []; - +export class FindReferencesTreeWalker { constructor( private _parseResults: ParseResults, private _filePath: string, @@ -64,152 +60,66 @@ export class FindReferencesTreeWalker extends ParseTreeWalker { private _includeDeclaration: boolean, private _evaluator: TypeEvaluator, private _cancellationToken: CancellationToken - ) { - super(); - } + ) {} findReferences(rootNode = this._parseResults.parseTree) { - this.walk(rootNode); - - return this._locationsFound; - } - - override walk(node: ParseNode) { - if (!AnalyzerNodeInfo.isCodeUnreachable(node)) { - super.walk(node); - } - } - - override visitModuleName(node: ModuleNameNode): boolean { - // Don't ever look for references within a module name. - return false; - } - - override visitName(node: NameNode): boolean { - throwIfCancellationRequested(this._cancellationToken); - - // No need to do any more work if the symbol name doesn't match. - if (node.value !== this._referencesResult.symbolName) { - return false; - } - - const declarations = this._evaluator.getDeclarationsForNameNode(node); + const collector = new DocumentSymbolCollector( + this._referencesResult.symbolName, + this._referencesResult.declarations, + this._evaluator, + this._cancellationToken, + rootNode, + /* treat module in import and from import same */ true + ); - if (declarations && declarations.length > 0) { - // Does this name share a declaration with the symbol of interest? - if (declarations.some((decl) => this._resultsContainsDeclaration(decl))) { - // Is it the same symbol? - if (this._includeDeclaration || node !== this._referencesResult.nodeAtOffset) { - this._locationsFound.push({ - path: this._filePath, - range: { - start: convertOffsetToPosition(node.start, this._parseResults.tokenizerOutput.lines), - end: convertOffsetToPosition( - TextRange.getEnd(node), - this._parseResults.tokenizerOutput.lines - ), - }, - }); - } + const results: DocumentRange[] = []; + for (const result of collector.collect()) { + // Is it the same symbol? + if (this._includeDeclaration || result.node !== this._referencesResult.nodeAtOffset) { + results.push({ + path: this._filePath, + range: { + start: convertOffsetToPosition(result.range.start, this._parseResults.tokenizerOutput.lines), + end: convertOffsetToPosition( + TextRange.getEnd(result.range), + this._parseResults.tokenizerOutput.lines + ), + }, + }); } } - return true; - } - - private _resultsContainsDeclaration(declaration: Declaration) { - // Resolve the declaration. - const resolvedDecl = this._evaluator.resolveAliasDeclaration(declaration, /* resolveLocalNames */ false); - if (!resolvedDecl) { - return false; - } - - // The reference results declarations are already resolved, so we don't - // need to call resolveAliasDeclaration on them. - if ( - this._referencesResult.declarations.some((decl) => DeclarationUtils.areDeclarationsSame(decl, resolvedDecl)) - ) { - return true; - } - - // We didn't find the declaration using local-only alias resolution. Attempt - // it again by fully resolving the alias. - const resolvedDeclNonlocal = this._evaluator.resolveAliasDeclaration( - resolvedDecl, - /* resolveLocalNames */ true - ); - if (!resolvedDeclNonlocal || resolvedDeclNonlocal === resolvedDecl) { - return false; - } - - return this._referencesResult.declarations.some((decl) => - DeclarationUtils.areDeclarationsSame(decl, resolvedDeclNonlocal) - ); + return results; } } export class ReferencesProvider { - static getDeclarationForPosition( + static getDeclarationForNode( sourceMapper: SourceMapper, - parseResults: ParseResults, filePath: string, - position: Position, + node: NameNode, evaluator: TypeEvaluator, reporter: ReferenceCallback | undefined, token: CancellationToken - ): ReferencesResult | undefined { + ) { throwIfCancellationRequested(token); - const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); - if (offset === undefined) { - return undefined; - } - - const node = ParseTreeUtils.findNodeByOffset(parseResults.parseTree, offset); - if (node === undefined) { - return undefined; - } - - // If this isn't a name node, there are no references to be found. - if (node.nodeType !== ParseNodeType.Name) { - return undefined; - } - - // Special case module names, which don't have references. - if (node.parent?.nodeType === ParseNodeType.ModuleName) { - return undefined; - } - - const declarations = evaluator.getDeclarationsForNameNode(node); - if (!declarations) { - return undefined; - } - - const resolvedDeclarations: Declaration[] = []; - declarations.forEach((decl) => { - const resolvedDecl = evaluator.resolveAliasDeclaration(decl, /* resolveLocalNames */ false); - if (resolvedDecl) { - resolvedDeclarations.push(resolvedDecl); - - if (isStubFile(resolvedDecl.path)) { - const implDecls = sourceMapper.findDeclarations(resolvedDecl); - for (const implDecl of implDecls) { - if (implDecl && implDecl.path) { - this._addIfUnique(resolvedDeclarations, implDecl); - } - } - } - } - }); + const declarations = DocumentSymbolCollector.getDeclarationsForNode( + node, + evaluator, + /* resolveLocalNames */ false, + token, + sourceMapper + ); - if (resolvedDeclarations.length === 0) { + if (declarations.length === 0) { return undefined; } // Does this symbol require search beyond the current file? Determine whether // the symbol is declared within an evaluation scope that is within the current // file and cannot be imported directly from other modules. - const requiresGlobalSearch = resolvedDeclarations.some((decl) => { + const requiresGlobalSearch = declarations.some((decl) => { // If the declaration is outside of this file, a global search is needed. if (decl.path !== filePath) { return true; @@ -234,17 +144,36 @@ export class ReferencesProvider { return false; }); - return new ReferencesResult(requiresGlobalSearch, node, node.value, resolvedDeclarations, reporter); + return new ReferencesResult(requiresGlobalSearch, node, node.value, declarations, reporter); } - private static _addIfUnique(declarations: Declaration[], itemToAdd: Declaration) { - for (const def of declarations) { - if (DeclarationUtils.areDeclarationsSame(def, itemToAdd)) { - return; - } + static getDeclarationForPosition( + sourceMapper: SourceMapper, + parseResults: ParseResults, + filePath: string, + position: Position, + evaluator: TypeEvaluator, + reporter: ReferenceCallback | undefined, + token: CancellationToken + ): ReferencesResult | undefined { + throwIfCancellationRequested(token); + + const offset = convertPositionToOffset(position, parseResults.tokenizerOutput.lines); + if (offset === undefined) { + return undefined; + } + + const node = ParseTreeUtils.findNodeByOffset(parseResults.parseTree, offset); + if (node === undefined) { + return undefined; + } + + // If this isn't a name node, there are no references to be found. + if (node.nodeType !== ParseNodeType.Name) { + return undefined; } - declarations.push(itemToAdd); + return this.getDeclarationForNode(sourceMapper, filePath, node, evaluator, reporter, token); } static addReferences( diff --git a/packages/pyright-internal/src/languageService/renameModuleProvider.ts b/packages/pyright-internal/src/languageService/renameModuleProvider.ts new file mode 100644 index 000000000000..13d5a07e8353 --- /dev/null +++ b/packages/pyright-internal/src/languageService/renameModuleProvider.ts @@ -0,0 +1,1353 @@ +/* + * renameModuleProvider.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Logic that updates affected references of a module rename/move. + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { getImportInfo } from '../analyzer/analyzerNodeInfo'; +import { + AliasDeclaration, + Declaration, + isAliasDeclaration, + isClassDeclaration, + isFunctionDeclaration, + isVariableDeclaration, +} from '../analyzer/declaration'; +import { createSynthesizedAliasDeclaration, getNameFromDeclaration } from '../analyzer/declarationUtils'; +import { createImportedModuleDescriptor, ImportResolver, ModuleNameAndType } from '../analyzer/importResolver'; +import { + getDirectoryLeadingDotsPointsTo, + getImportGroupFromModuleNameAndType, + getRelativeModuleName, + getTextEditsForAutoImportInsertion, + getTextEditsForAutoImportSymbolAddition, + getTextRangeForImportNameDeletion, + getTopLevelImports, + ImportNameInfo, + ImportStatement, + ImportStatements, +} from '../analyzer/importStatementUtils'; +import { + getDottedNameWithGivenNodeAsLastName, + getFirstAncestorOrSelfOfKind, + getFullStatementRange, + isFromImportAlias, + isFromImportModuleName, + isFromImportName, + isImportAlias, + isImportModuleName, + isLastNameOfModuleName, +} from '../analyzer/parseTreeUtils'; +import { ParseTreeWalker } from '../analyzer/parseTreeWalker'; +import { isStubFile } from '../analyzer/sourceMapper'; +import { TypeEvaluator } from '../analyzer/typeEvaluatorTypes'; +import { getOrAdd, removeArrayElements } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { isString } from '../common/core'; +import { assert, assertNever } from '../common/debug'; +import { FileEditAction } from '../common/editAction'; +import { FileSystem } from '../common/fileSystem'; +import { + combinePaths, + getDirectoryChangeKind, + getDirectoryPath, + getFileName, + isDirectory, + isFile, + resolvePaths, + stripFileExtension, +} from '../common/pathUtils'; +import { convertOffsetToPosition, convertTextRangeToRange } from '../common/positionUtils'; +import { doRangesIntersect, extendRange, Range, rangesAreEqual, TextRange } from '../common/textRange'; +import { + ImportAsNode, + ImportFromAsNode, + ImportFromNode, + ImportNode, + isExpressionNode, + MemberAccessNode, + ModuleNameNode, + ModuleNode, + NameNode, + ParseNode, + ParseNodeType, +} from '../parser/parseNodes'; +import { ParseResults } from '../parser/parser'; +import { CollectionResult, DocumentSymbolCollector } from './documentSymbolCollector'; + +enum UpdateType { + File, + Folder, + Symbol, +} + +export class RenameModuleProvider { + static createForModule( + importResolver: ImportResolver, + configOptions: ConfigOptions, + evaluator: TypeEvaluator, + path: string, + newPath: string, + token: CancellationToken + ): RenameModuleProvider | undefined { + if (!importResolver.fileSystem.existsSync(path)) { + return undefined; + } + + if (isFile(importResolver.fileSystem, path)) { + return this._create(importResolver, configOptions, evaluator, path, newPath, UpdateType.File, token); + } else if (isDirectory(importResolver.fileSystem, path)) { + // Make sure folder path is simple rename. + if (getDirectoryChangeKind(importResolver.fileSystem, path, newPath) !== 'Renamed') { + return undefined; + } + + // We don't support namespace folder name. Currently, we don't have + // a way to find namespace folder references. + let fileNameForPackage = combinePaths(path, '__init__.pyi'); + if (!importResolver.fileSystem.existsSync(fileNameForPackage)) { + fileNameForPackage = combinePaths(path, '__init__.py'); + if (!importResolver.fileSystem.existsSync(fileNameForPackage)) { + return undefined; + } + } + + return this._create( + importResolver, + configOptions, + evaluator, + fileNameForPackage, + combinePaths(newPath, getFileName(fileNameForPackage)), + UpdateType.Folder, + token + ); + } + + return undefined; + } + + static createForSymbol( + importResolver: ImportResolver, + configOptions: ConfigOptions, + evaluator: TypeEvaluator, + path: string, + newPath: string, + declarations: Declaration[], + token: CancellationToken + ): RenameModuleProvider | undefined { + if (!importResolver.fileSystem.existsSync(path)) { + return undefined; + } + + const filteredDecls = declarations.filter( + (d) => isClassDeclaration(d) || isFunctionDeclaration(d) || isVariableDeclaration(d) + ); + + if (filteredDecls.length === 0) { + return undefined; + } + + return this._create( + importResolver, + configOptions, + evaluator, + path, + newPath, + UpdateType.Symbol, + filteredDecls, + token! + ); + } + + private static _create( + importResolver: ImportResolver, + configOptions: ConfigOptions, + evaluator: TypeEvaluator, + moduleFilePath: string, + newModuleFilePath: string, + type: UpdateType, + tokenOrDeclarations: Declaration[] | CancellationToken, + token?: CancellationToken + ): RenameModuleProvider | undefined { + const execEnv = configOptions.findExecEnvironment(moduleFilePath); + const moduleName = importResolver.getModuleNameForImport(moduleFilePath, execEnv); + if (!moduleName.moduleName) { + return undefined; + } + + const newModuleName = importResolver.getModuleNameForImport(newModuleFilePath, execEnv); + if (!newModuleName.moduleName) { + return undefined; + } + + token = CancellationToken.is(tokenOrDeclarations) ? tokenOrDeclarations : token; + const declarations = CancellationToken.is(tokenOrDeclarations) ? [] : tokenOrDeclarations; + if (declarations.length === 0) { + // Create synthesized alias decls from the given file path. If the given file is for stub, + // create one for the corresponding py file as well. + declarations.push(createSynthesizedAliasDeclaration(moduleFilePath)); + if (isStubFile(moduleFilePath)) { + // The resolveImport should make sure non stub file search to happen. + importResolver.resolveImport( + moduleFilePath, + execEnv, + createImportedModuleDescriptor(moduleName.moduleName) + ); + + importResolver + .getSourceFilesFromStub(moduleFilePath, execEnv, /*mapCompiled*/ false) + .forEach((p) => declarations!.push(createSynthesizedAliasDeclaration(p))); + } + } + + return new RenameModuleProvider( + importResolver.fileSystem, + evaluator, + moduleFilePath, + newModuleFilePath, + moduleName, + newModuleName, + type, + declarations, + token! + ); + } + + private readonly _newModuleFilePath: string; + private readonly _moduleNames: string[]; + private readonly _newModuleNames: string[]; + private readonly _onlyNameChanged: boolean; + private readonly _results = new Map(); + private readonly _aliasIntroduced = new Set(); + + private constructor( + private _fs: FileSystem, + private _evaluator: TypeEvaluator, + private _moduleFilePath: string, + newModuleFilePath: string, + private _moduleNameAndType: ModuleNameAndType, + private _newModuleNameAndType: ModuleNameAndType, + private _type: UpdateType, + private _declarations: Declaration[], + private _token: CancellationToken + ) { + // moduleName and newModuleName are always in the absolute path form. + this._newModuleFilePath = resolvePaths(newModuleFilePath); + + this._moduleNames = this._moduleName.split('.'); + this._newModuleNames = this._newModuleName.split('.'); + + if (this._moduleNames.length !== this._newModuleNames.length) { + this._onlyNameChanged = false; + return; + } + + let i = 0; + for (i = 0; i < this._moduleNames.length - 1; i++) { + if (this._moduleNames[i] !== this._newModuleNames[i]) { + break; + } + } + + this._onlyNameChanged = i === this._moduleNames.length - 1; + assert(this._type !== UpdateType.Folder || this._onlyNameChanged, 'We only support simple rename for folder'); + } + + renameReferences(filePath: string, parseResults: ParseResults) { + switch (this._type) { + case UpdateType.Folder: + return this._renameFolderReferences(filePath, parseResults); + case UpdateType.File: + return this._renameModuleReferences(filePath, parseResults); + case UpdateType.Symbol: + return this._updateSymbolReferences(filePath, parseResults); + default: + return assertNever(this._type, `${this._type} is unknown`); + } + } + + private _updateSymbolReferences(filePath: string, parseResults: ParseResults) { + const collector = new DocumentSymbolCollector( + getNameFromDeclaration(this._declarations[0]) ?? '', + this._declarations, + this._evaluator!, + this._token, + parseResults.parseTree, + /*treatModuleImportAndFromImportSame*/ true + ); + + // See if we need to insert new import statement + const importStatements = getTopLevelImports(parseResults.parseTree, /*includeImplicitImports*/ true); + + // See whether we have existing import statement for the same module + // ex) import [moduleName] or from ... import [moduleName] + const imported = importStatements.orderedImports.find((i) => i.moduleName === this._newModuleName); + + const nameRemoved = new Set(); + const importUsed = new Map(); + for (const result of collector.collect()) { + const nodeFound = result.node; + + if (nodeFound.nodeType === ParseNodeType.String) { + // Ignore symbol appearing in the __all__. it should be handled + // when decl is moved. + continue; + } + + if (isFromImportName(nodeFound)) { + // ex) from ... import [symbol] ... + const fromNode = nodeFound.parent?.parent as ImportFromNode; + const newModuleName = this._getNewModuleName( + filePath, + fromNode.module.leadingDots > 0, + /* isLastPartImportName */ false + ); + + if (fromNode.imports.length === 1) { + // ex) "from [module] import symbol" to "from [module.changed] import symbol" + this._addResultWithTextRange(filePath, fromNode.module, parseResults, newModuleName); + } else { + // ex) "from module import symbol, another_symbol" to + // "from module import another_symbol" and "from module.changed import symbol" + + // Delete the existing import name including alias. + const importFromAs = nodeFound.parent as ImportFromAsNode; + this._addFromImportNameDeletion( + filePath, + parseResults, + nameRemoved, + fromNode.imports, + importFromAs + ); + + // For now, this won't merge absolute and relative path "from import" statement. + const importNameInfo = { + name: importFromAs.name.value, + alias: importFromAs.alias?.value, + }; + + this._addResultEdits( + this._getTextEditsForNewOrExistingFromImport( + filePath, + fromNode, + parseResults, + nameRemoved, + importStatements, + newModuleName, + [importNameInfo] + ) + ); + } + + continue; + } + + const dottedName = getDottedNameWithGivenNodeAsLastName(nodeFound); + if (dottedName === nodeFound || dottedName.nodeType !== ParseNodeType.MemberAccess) { + // ex) from module import foo + // foo + // foo.method() + // + // from module import * + // foo() + // bar() + // + // we don't need to do anything for wild card case since + // we will preserve __all__ entries. + continue; + } + + const moduleName = + dottedName.leftExpression.nodeType === ParseNodeType.MemberAccess + ? dottedName.leftExpression.memberName + : dottedName.leftExpression.nodeType === ParseNodeType.Name + ? dottedName.leftExpression + : undefined; + if (!moduleName) { + // ex) from module import foo + // getModule().foo + continue; + } + + const moduleDecl = this._evaluator + .getDeclarationsForNameNode(moduleName) + ?.filter( + (d) => + isAliasDeclaration(d) && + (d.node.nodeType === ParseNodeType.ImportAs || d.node.nodeType === ParseNodeType.ImportFromAs) + ); + if (!moduleDecl || moduleDecl.length === 0) { + // ex) from xxx import yyy + // yyy.property.foo + continue; + } + + const importAs = moduleDecl[0].node as ImportAsNode | ImportFromAsNode; + getOrAdd(importUsed, importAs, () => []).push(dottedName); + continue; + } + + // Handle symbol references that are used off imported modules. + for (const [key, value] of importUsed) { + let referenceModuleName: string; + if (this._canReplaceImportName(parseResults, key, value)) { + const moduleName = this._getReferenceModuleName(importStatements, imported); + if (key.nodeType === ParseNodeType.ImportAs) { + if (moduleName) { + referenceModuleName = moduleName; + this._addImportNameDeletion( + filePath, + parseResults, + nameRemoved, + (key.parent as ImportNode).list, + key + ); + } else { + referenceModuleName = key.alias ? key.alias.value : this._newModuleName; + this._addResultWithTextRange(filePath, key.module, parseResults, this._newModuleName); + } + } else { + if (moduleName) { + referenceModuleName = moduleName; + this._addFromImportNameDeletion( + filePath, + parseResults, + nameRemoved, + (key.parent as ImportFromNode).imports, + key + ); + } else { + const fromNode = key.parent as ImportFromNode; + const newModuleName = this._getNewModuleName( + filePath, + fromNode.module.leadingDots > 0, + /* isLastPartImportName */ true + ); + + referenceModuleName = key.alias ? key.alias.value : this._newLastModuleName; + this._addResultWithTextRange(filePath, fromNode.module, parseResults, newModuleName); + this._addResultWithTextRange(filePath, key.name, parseResults, this._newLastModuleName); + } + } + } else { + const moduleName = this._getReferenceModuleName(importStatements, imported); + if (moduleName) { + referenceModuleName = moduleName; + } else { + referenceModuleName = this._newModuleName; + this._addResultEdits( + getTextEditsForAutoImportInsertion( + [], + importStatements, + this._newModuleName, + getImportGroupFromModuleNameAndType(this._newModuleNameAndType), + parseResults, + convertOffsetToPosition(parseResults.parseTree.length, parseResults.tokenizerOutput.lines) + ).map((e) => ({ filePath, range: e.range, replacementText: e.replacementText })) + ); + } + } + + for (const node of value) { + this._addResultWithTextRange(filePath, node.leftExpression, parseResults, referenceModuleName); + } + } + } + + private _getReferenceModuleName( + importStatements: ImportStatements, + imported: ImportStatement | undefined + ): string | undefined { + if (imported && imported.node.nodeType === ParseNodeType.Import) { + return imported.subnode?.alias ? imported.subnode.alias.value : this._newModuleName; + } else if (importStatements.implicitImports?.has(this._newModuleFilePath)) { + const fromImportAs = importStatements.implicitImports.get(this._newModuleFilePath)!; + return fromImportAs.alias ? fromImportAs.alias.value : fromImportAs.name.value; + } + + return undefined; + } + + private _canReplaceImportName( + parseResults: ParseResults, + importAs: ImportAsNode | ImportFromAsNode, + symbolReferences: MemberAccessNode[] + ): boolean { + const nameToBind = + importAs.alias ?? + (importAs.nodeType === ParseNodeType.ImportAs + ? importAs.module.nameParts[importAs.module.nameParts.length - 1] + : importAs.name); + + const declarations = DocumentSymbolCollector.getDeclarationsForNode( + nameToBind, + this._evaluator, + /*resolveLocalName*/ false, + this._token + ); + if (declarations.length === 0) { + return false; + } + + const collector = new DocumentSymbolCollector( + nameToBind.value, + declarations, + this._evaluator!, + this._token, + parseResults.parseTree, + /*treatModuleImportAndFromImportSame*/ true + ); + + for (const result of collector.collect()) { + if ( + isImportModuleName(result.node) || + isImportAlias(result.node) || + isFromImportModuleName(result.node) || + isFromImportName(result.node) || + isFromImportAlias(result.node) + ) { + // collector will report decls as well. ignore decls. + continue; + } + + if (!symbolReferences.some((s) => TextRange.containsRange(s, result.node))) { + return false; + } + } + + return true; + } + + private _renameFolderReferences(filePath: string, parseResults: ParseResults) { + const collector = new DocumentSymbolCollector( + this.lastModuleName, + this._declarations, + this._evaluator!, + this._token, + parseResults.parseTree, + /*treatModuleImportAndFromImportSame*/ true + ); + + // We only support simple rename of folder. Change all occurrence of the old folder name + // to new name. + for (const result of collector.collect()) { + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + } + } + + private _renameModuleReferences(filePath: string, parseResults: ParseResults) { + const collector = new DocumentSymbolCollector( + this.lastModuleName, + this._declarations, + this._evaluator!, + this._token, + parseResults.parseTree, + /*treatModuleImportAndFromImportSame*/ true + ); + + const nameRemoved = new Set(); + const results = collector.collect(); + + // Update module references first. + this._updateModuleReferences(filePath, parseResults, nameRemoved, results); + + // If the module file has moved, we need to update all relative paths used in the file to reflect the move. + this._updateRelativeModuleNamePath(filePath, parseResults, nameRemoved, results); + } + + private _updateRelativeModuleNamePath( + filePath: string, + parseResults: ParseResults, + nameRemoved: Set, + results: CollectionResult[] + ) { + if (filePath !== this._moduleFilePath) { + // We only update relative import paths for the file that has moved. + return; + } + + let importStatements: ImportStatements | undefined; + + // Filter out module name that is already re-written. + for (const edit of this._getNewRelativeModuleNamesForFileMoved( + filePath, + ModuleNameCollector.collect(parseResults.parseTree).filter( + (m) => !results.some((r) => TextRange.containsRange(m.parent!, r.node)) + ) + )) { + this._addResultWithTextRange(filePath, edit.moduleName, parseResults, edit.newModuleName); + + if (!edit.itemsToMove) { + continue; + } + + // This could introduce multiple import statements for same modules with + // different symbols per module name. Unfortunately, there is no easy way to + // prevent it since we can't see changes made by other code until all changes + // are committed. In future, if we support snapshot and diff between snapshots, + // then we can support those complex code generations. + const fromNode = edit.moduleName.parent as ImportFromNode; + + // First, delete existing exported symbols from "from import" statement. + for (const importFromAs of edit.itemsToMove) { + this._addFromImportNameDeletion(filePath, parseResults, nameRemoved, fromNode.imports, importFromAs); + } + + importStatements = + importStatements ?? getTopLevelImports(parseResults.parseTree, /*includeImplicitImports*/ false); + + // For now, this won't merge absolute and relative path "from import" + // statement. + this._addResultEdits( + this._getTextEditsForNewOrExistingFromImport( + filePath, + fromNode, + parseResults, + nameRemoved, + importStatements, + getRelativeModuleName( + this._fs, + this._newModuleFilePath, + this._newModuleFilePath, + /*ignoreFolderStructure*/ false, + /*sourceIsFile*/ true + ), + edit.itemsToMove.map((i) => { + return { name: i.name.value, alias: i.alias?.value }; + }) + ) + ); + } + } + + private _updateModuleReferences( + filePath: string, + parseResults: ParseResults, + nameRemoved: Set, + results: CollectionResult[] + ) { + let importStatements: ImportStatements | undefined; + for (const result of results) { + const nodeFound = result.node; + + if (nodeFound.nodeType === ParseNodeType.String) { + // ex) __all__ = ["[a]"] + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + + if (isImportModuleName(nodeFound)) { + if (!isLastNameOfModuleName(nodeFound)) { + // It must be directory and we don't support folder rename. + continue; + } + + const moduleNameNode = getFirstAncestorOrSelfOfKind(nodeFound, ParseNodeType.ModuleName)!; + + // * Enhancement * one case we don't handle is introducing new symbol in __all__ + // or converting "import" statement to "from import" statement. + // + // when the existing statement was "import x as x" and it is changed to + // "import y.z as z". we either need to introduce "z" in __all__ or convert + // "import y.z as z" to "from y import z as z" to make sure we keep the symbol + // visibility same. + // + // when we convert "import x as x" to "from y import z as z", we need to handle + // deletion of existing import statement or (x as x) and inserting/merging + // new "from import" statement. + + // If original module name was single word and it becomes dotted name without alias, + // then we introduce alias to keep references as a single word. + // ex) import [xxx] to import [aaa.bbb as bbb] + if ( + moduleNameNode.nameParts.length === 1 && + moduleNameNode.parent?.nodeType === ParseNodeType.ImportAs && + !moduleNameNode.parent.alias && + this._newModuleNames.length > 1 + ) { + this._aliasIntroduced.add(moduleNameNode.parent); + + this._addResultWithTextRange( + filePath, + moduleNameNode, + parseResults, + `${this._newModuleName} as ${this._newLastModuleName}` + ); + continue; + } + + // Otherwise, update whole module name to new name + // ex) import [xxx.yyy] to import [aaa.bbb] + this._addResultWithTextRange(filePath, moduleNameNode, parseResults, this._newModuleName); + continue; + } + + if (isImportAlias(nodeFound)) { + // ex) import xxx as [yyy] to import xxx as [zzz] + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + + if (isFromImportModuleName(nodeFound)) { + if (!isLastNameOfModuleName(nodeFound)) { + // It must be directory and we don't support folder rename. + continue; + } + + const moduleNameNode = getFirstAncestorOrSelfOfKind(nodeFound, ParseNodeType.ModuleName)!; + const fromNode = moduleNameNode.parent as ImportFromNode; + + // We need to check whether imports of this import statement has + // any implicit submodule imports or not. If there is one, we need to + // either split or leave it as it is. + const exportedSymbols = []; + const subModules = []; + for (const importFromAs of fromNode.imports) { + if (this._isExportedSymbol(importFromAs.name)) { + exportedSymbols.push(importFromAs); + } else { + subModules.push(importFromAs); + } + } + + if (subModules.length === 0) { + // We don't have any sub modules, we can change module name to new one. + // Update whole module name to new name. + // ex) from [xxx.yyy] import zzz to from [aaa.bbb] import zzz + this._addResultWithTextRange( + filePath, + moduleNameNode, + parseResults, + this._getNewModuleName( + filePath, + moduleNameNode.leadingDots > 0, + /* isLastPartImportName */ false + ) + ); + continue; + } + + if (exportedSymbols.length === 0) { + // We only have sub modules. That means module name actually refers to + // folder name, not module (ex, __init__.py). Folder rename is done by + // different code path. + continue; + } + + // Now, we need to split "from import" statement to 2. + + // Update module name if needed. + if (fromNode.module.leadingDots > 0) { + for (const edit of this._getNewRelativeModuleNamesForFileMoved(filePath, [fromNode.module])) { + this._addResultWithTextRange(filePath, edit.moduleName, parseResults, edit.newModuleName); + } + } + + // First, delete existing exported symbols from "from import" statement. + for (const importFromAs of exportedSymbols) { + this._addFromImportNameDeletion( + filePath, + parseResults, + nameRemoved, + fromNode.imports, + importFromAs + ); + } + + importStatements = + importStatements ?? getTopLevelImports(parseResults.parseTree, /*includeImplicitImports*/ false); + + // For now, this won't merge absolute and relative path "from import" + // statement. + this._addResultEdits( + this._getTextEditsForNewOrExistingFromImport( + filePath, + fromNode, + parseResults, + nameRemoved, + importStatements, + this._newModuleName, + exportedSymbols.map((i) => { + const name = + results.findIndex((r) => r.node === i.name) >= 0 + ? this._newLastModuleName + : i.name.value; + const alias = + results.findIndex((r) => r.node === i.alias) >= 0 + ? this._newLastModuleName + : i.alias?.value; + + return { name, alias }; + }) + ) + ); + continue; + } + + if (isFromImportName(nodeFound)) { + if (nameRemoved.has(nodeFound.id)) { + // Import name is already removed. + continue; + } + + const fromNode = nodeFound.parent?.parent as ImportFromNode; + const newModuleName = this._getNewModuleName( + filePath, + fromNode.module.leadingDots > 0, + /* isLastPartImportName */ true + ); + + // If the name bound to symbol re-exported, we don't need to update module name. + // Existing logic should make sure re-exported symbol name work as before after + // symbol rename. + if (this._isExportedSymbol(nodeFound)) { + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + + if (fromNode.imports.length === 1) { + // ex) from xxx import [yyy] to from [aaa.bbb] import [zzz] + this._addResultWithTextRange(filePath, fromNode.module, parseResults, newModuleName); + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + } else { + // Delete the existing import name including alias. + const importFromAs = nodeFound.parent as ImportFromAsNode; + + // Update module name if needed. + if (fromNode.module.leadingDots > 0) { + for (const edit of this._getNewRelativeModuleNamesForFileMoved(filePath, [fromNode.module])) { + this._addResultWithTextRange(filePath, edit.moduleName, parseResults, edit.newModuleName); + } + } + + this._addFromImportNameDeletion( + filePath, + parseResults, + nameRemoved, + fromNode.imports, + importFromAs + ); + + importStatements = + importStatements ?? + getTopLevelImports(parseResults.parseTree, /*includeImplicitImports*/ false); + + // ex) from xxx import yyy, [zzz] to + // from xxx import yyy + // from [aaa.bbb] import [ccc] + // or + // from aaa.bbb import ddd + // from xxx import yyy, [zzz] to + // from aaa.bbb import [ccc], ddd + // + // For now, this won't merge absolute and relative path "from import" + // statement. + const importNameInfo = { + name: this._newLastModuleName, + alias: + importFromAs.alias?.value === this.lastModuleName + ? this._newLastModuleName + : importFromAs.alias?.value, + }; + + this._addResultEdits( + this._getTextEditsForNewOrExistingFromImport( + filePath, + fromNode, + parseResults, + nameRemoved, + importStatements, + newModuleName, + [importNameInfo] + ) + ); + } + continue; + } + + if (isFromImportAlias(nodeFound)) { + if (nameRemoved.has(nodeFound.id)) { + // alias is already removed. + continue; + } + + // ex) from ccc import xxx as [yyy] to from ccc import xxx as [zzz] + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + + /** TODO: if we get more than 1 decls, flag it as attention needed */ + const decls = DocumentSymbolCollector.getDeclarationsForNode( + nodeFound, + this._evaluator, + /*resolveLocalName*/ false, + this._token + ).filter((d) => isAliasDeclaration(d)) as AliasDeclaration[]; + + if (this._onlyNameChanged) { + // Simple case. only name has changed. but not path. + // Just replace name to new symbol name. + // ex) a.[b].foo() to a.[z].foo() + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + + if ( + decls?.some( + (d) => + !d.usesLocalName && + (!d.node || d.node.nodeType === ParseNodeType.ImportAs) && + !this._aliasIntroduced.has(d.node) + ) + ) { + const dottedName = getDottedNameWithGivenNodeAsLastName(nodeFound); + if (dottedName.parent?.nodeType !== ParseNodeType.MemberAccess) { + // Replace whole dotted name with new module name. + this._addResultWithTextRange(filePath, dottedName, parseResults, this._newModuleName); + continue; + } + + // Check whether name after me is sub module or not. + // ex) a.b.[c] + const nextNameDecl = this._evaluator.getDeclarationsForNameNode(dottedName.parent.memberName); + if (!nextNameDecl || nextNameDecl.length === 0) { + // Next dotted name is sub module. That means dottedName actually refers to folder names, not modules. + // and We don't support renaming folder. So, leave things as they are. + // ex) import a.b.c + // [a.b].[c] + continue; + } + + // Next name is actual symbol. Replace whole name to new module name. + // ex) import a.b.c + // [a.b.c].[foo]() + this._addResultWithTextRange(filePath, dottedName, parseResults, this._newModuleName); + continue; + } + + if (result.node.value !== this._newLastModuleName) { + this._addResultWithTextRange(filePath, result.range, parseResults, this._newLastModuleName); + continue; + } + } + } + + private _getNewRelativeModuleNamesForFileMoved(filePath: string, moduleNames: ModuleNameNode[]) { + if (filePath !== this._moduleFilePath) { + // We only update relative import paths for the file that has moved. + return []; + } + + const originalFileName = stripFileExtension(getFileName(filePath)); + const originalInit = originalFileName === '__init__'; + const originalDirectory = getDirectoryPath(filePath); + + const newNames: { moduleName: ModuleNameNode; newModuleName: string; itemsToMove?: ImportFromAsNode[] }[] = []; + for (const moduleName of moduleNames) { + // Filter out all absolute path. + if (moduleName.leadingDots === 0) { + continue; + } + + const result = this._getNewModuleNameInfoForFileMoved(moduleName, originalInit, originalDirectory); + if (!result) { + continue; + } + + const newModuleName = getRelativeModuleName( + this._fs, + result.src, + result.dest, + /*ignoreFolderStructure*/ false, + /*sourceIsFile*/ true + ); + + newNames.push({ moduleName, newModuleName, itemsToMove: result.itemsToMove }); + } + + return newNames; + } + + private _getNewModuleNameInfoForFileMoved( + moduleName: ModuleNameNode, + originalInit: boolean, + originalDirectory: string + ) { + const importInfo = getImportInfo(moduleName); + if (!importInfo) { + return undefined; + } + + let importPath = importInfo.resolvedPaths[importInfo.resolvedPaths.length - 1]; + if (!importPath) { + // It is possible for the module name to point to namespace folder (no __init__). + // See whether we can use some heuristic to get importPath + if (moduleName.nameParts.length === 0) { + const directory = getDirectoryLeadingDotsPointsTo(originalDirectory, moduleName.leadingDots); + if (!directory) { + return undefined; + } + + // Add fake __init__.py since we know this is namespace folder. + importPath = combinePaths(directory, '__init__.py'); + } else { + return undefined; + } + } + + // Check whether module is pointing to moved file itself and whether it is __init__ + if (this._moduleFilePath !== importPath || !originalInit) { + return { src: this._newModuleFilePath, dest: importPath }; + } + + // Now, moduleName is pointing to __init__ which point to moved file itself. + + // We need to check whether imports of this import statement has + // any implicit submodule imports or not. If there is one, we need to + // either split or leave it as it is. + const exportedSymbols = []; + const subModules = []; + for (const importFromAs of (moduleName.parent as ImportFromNode).imports) { + if (this._isExportedSymbol(importFromAs.name)) { + exportedSymbols.push(importFromAs); + } else { + subModules.push(importFromAs); + } + } + + // Point to itself. + if (subModules.length === 0) { + return { src: this._newModuleFilePath, dest: this._newModuleFilePath }; + } + + // "." is used to point folder location. + if (exportedSymbols.length === 0) { + return { src: this._newModuleFilePath, dest: this._moduleFilePath }; + } + + // now we need to split, provide split info as well. + return { + src: this._newModuleFilePath, + dest: this._moduleFilePath, + itemsToMove: [...exportedSymbols], + }; + } + + private _isExportedSymbol(nameNode: NameNode): boolean { + const decls = this._evaluator.getDeclarationsForNameNode(nameNode); + if (!decls) { + return false; + } + + // If submoduleFallback exists, then, it points to submodule not symbol. + return !decls.some((d) => isAliasDeclaration(d) && d.submoduleFallback); + } + + private _getNewModuleName(currentFilePath: string, isRelativePath: boolean, isLastPartImportName: boolean) { + const filePath = currentFilePath === this._moduleFilePath ? this._newModuleFilePath : currentFilePath; + + // If the existing code was using relative path, try to keep the relative path. + const moduleName = isRelativePath + ? getRelativeModuleName( + this._fs, + filePath, + this._newModuleFilePath, + isLastPartImportName, + /* sourceIsFile*/ true + ) + : this._newModuleName; + + if (isLastPartImportName && moduleName.endsWith(this._newLastModuleName)) { + const dotPrefix = + moduleName === this._newLastModuleName + ? 0 + : moduleName.length > this._newLastModuleName.length + 1 + ? moduleName[moduleName.length - this._newLastModuleName.length - 2] !== '.' + ? 1 + : 0 + : 0; + + const length = moduleName.length - this._newLastModuleName.length - dotPrefix; + + //ex) x.y.z used in "from x.y import z" + const newModuleName = moduleName.substr(0, length); + return newModuleName.length > 0 ? newModuleName : '.'; + } + + // ex) x.y.z used in "from x.y.z import ..." + return moduleName; + } + + getEdits(): FileEditAction[] { + const edits: FileEditAction[] = []; + this._results.forEach((v) => edits.push(...v)); + + return edits; + } + + get lastModuleName() { + return this._moduleNames[this._moduleNames.length - 1]; + } + + private get _moduleName() { + return this._moduleNameAndType.moduleName; + } + + private get _newLastModuleName() { + return this._newModuleNames[this._newModuleNames.length - 1]; + } + + private get _newModuleName() { + return this._newModuleNameAndType.moduleName; + } + + private _addImportNameDeletion( + filePath: string, + parseResults: ParseResults, + nameRemoved: Set, + imports: ImportAsNode[], + importToDelete: ImportAsNode + ) { + this._addImportNameDeletionInternal( + filePath, + parseResults, + nameRemoved, + imports, + importToDelete, + ParseNodeType.Import + ); + + // Mark that we don't need to process these node again later. + nameRemoved.add(importToDelete.module.id); + importToDelete.module.nameParts.forEach((n) => nameRemoved.add(n.id)); + if (importToDelete.alias) { + nameRemoved.add(importToDelete.alias.id); + } + } + + private _addFromImportNameDeletion( + filePath: string, + parseResults: ParseResults, + nameRemoved: Set, + imports: ImportFromAsNode[], + importToDelete: ImportFromAsNode + ) { + this._addImportNameDeletionInternal( + filePath, + parseResults, + nameRemoved, + imports, + importToDelete, + ParseNodeType.ImportFrom + ); + + // Mark that we don't need to process these node again later. + nameRemoved.add(importToDelete.name.id); + if (importToDelete.alias) { + nameRemoved.add(importToDelete.alias.id); + } + } + + private _addImportNameDeletionInternal( + filePath: string, + parseResults: ParseResults, + nameRemoved: Set, + imports: ImportFromAsNode[] | ImportAsNode[], + importToDelete: ImportFromAsNode | ImportAsNode, + importKind: ParseNodeType.ImportFrom | ParseNodeType.Import + ) { + const range = getTextRangeForImportNameDeletion( + imports, + imports.findIndex((v) => v === importToDelete) + ); + + this._addResultWithTextRange(filePath, range, parseResults, ''); + + // Mark that we don't need to process these node again later. + nameRemoved.add(importToDelete.id); + + // Check whether we have deleted all trailing import names. + // If either no trailing import is deleted or handled properly + // then, there is nothing to do. otherwise, either delete the whole statement + // or remove trailing comma. + // ex) from x import [y], z or from x import y[, z] + let lastImportIndexNotDeleted = 0; + for ( + lastImportIndexNotDeleted = imports.length - 1; + lastImportIndexNotDeleted >= 0; + lastImportIndexNotDeleted-- + ) { + if (!nameRemoved.has(imports[lastImportIndexNotDeleted].id)) { + break; + } + } + + if (lastImportIndexNotDeleted === -1) { + // Whole statement is deleted. Remove the statement itself. + // ex) [from x import a, b, c] or [import a] + const importStatement = getFirstAncestorOrSelfOfKind(importToDelete, importKind); + if (importStatement) { + this._addResultWithRange( + filePath, + getFullStatementRange(importStatement, parseResults.tokenizerOutput), + '' + ); + } + } else if (lastImportIndexNotDeleted >= 0 && lastImportIndexNotDeleted < imports.length - 2) { + // We need to delete trailing comma + // ex) from x import a, [b, c] + const start = TextRange.getEnd(imports[lastImportIndexNotDeleted]); + const length = TextRange.getEnd(imports[lastImportIndexNotDeleted + 1]) - start; + this._addResultWithTextRange(filePath, { start, length }, parseResults, ''); + } + } + + private _addResultWithTextRange(filePath: string, range: TextRange, parseResults: ParseResults, newName: string) { + const existing = parseResults.text.substr(range.start, range.length); + if (existing === newName) { + // No change. Return as it is. + return; + } + + this._addResultWithRange(filePath, convertTextRangeToRange(range, parseResults.tokenizerOutput.lines), newName); + } + + private _addResultEdits(edits: FileEditAction[]) { + edits.forEach((e) => this._addResultWithRange(e.filePath, e.range, e.replacementText)); + } + + private _getDeletionsForSpan(filePathOrEdit: string | FileEditAction[], range: Range) { + if (isString(filePathOrEdit)) { + filePathOrEdit = this._results.get(filePathOrEdit) ?? []; + } + + return filePathOrEdit.filter((e) => e.replacementText === '' && doRangesIntersect(e.range, range)); + } + + private _removeEdits(filePathOrEdit: string | FileEditAction[], edits: FileEditAction[]) { + if (isString(filePathOrEdit)) { + filePathOrEdit = this._results.get(filePathOrEdit) ?? []; + } + + removeArrayElements(filePathOrEdit, (f) => edits.findIndex((e) => e === f) >= 0); + } + + private _addResultWithRange(filePath: string, range: Range, replacementText: string) { + const edits = getOrAdd(this._results, filePath, () => []); + if (replacementText === '') { + // If it is a deletion, merge with overlapping deletion edit if there is any. + const deletions = this._getDeletionsForSpan(edits, range); + if (deletions.length > 0) { + // Delete the existing ones. + this._removeEdits(edits, deletions); + + // Extend range with deleted ones. + extendRange( + range, + deletions.map((d) => d.range) + ); + } + } + + // Don't put duplicated edit. It can happen if code has duplicated module import. + // ex) from a import b, b, c + // If we need to introduce new "from import" statement for "b", we will add new statement twice. + if (edits.some((e) => rangesAreEqual(e.range, range) && e.replacementText === replacementText)) { + return; + } + + edits.push({ filePath, range, replacementText }); + } + + private _getTextEditsForNewOrExistingFromImport( + filePath: string, + currentFromImport: ImportFromNode, + parseResults: ParseResults, + nameRemoved: Set, + importStatements: ImportStatements, + moduleName: string, + importNameInfo: ImportNameInfo[] + ): FileEditAction[] { + // See whether we have existing from import statement for the same module + // ex) from [|moduleName|] import subModule + const imported = importStatements.orderedImports.find((i) => i.moduleName === moduleName); + if (imported && imported.node.nodeType === ParseNodeType.ImportFrom && !imported.node.isWildcardImport) { + const edits = getTextEditsForAutoImportSymbolAddition(importNameInfo, imported, parseResults); + if (imported.node !== currentFromImport) { + // Add what we want to the existing "import from" statement as long as it is not the same import + // node we are working on. + return edits.map((e) => ({ filePath, range: e.range, replacementText: e.replacementText })); + } + + // Check whether we can avoid creating a new statement. We can't just merge with existing one since + // we could create invalid text edits (2 edits that change the same span, or invalid replacement text since + // texts on the node has changed) + if (this._onlyNameChanged && importNameInfo.length === 1 && edits.length === 1) { + const deletions = this._getDeletionsForSpan(filePath, edits[0].range); + if (deletions.length === 0) { + return [{ filePath, range: edits[0].range, replacementText: edits[0].replacementText }]; + } else { + const alias = + importNameInfo[0].alias === this._newLastModuleName + ? this.lastModuleName + : importNameInfo[0].alias; + + const importName = currentFromImport.imports.find( + (i) => i.name.value === this.lastModuleName && i.alias?.value === alias + ); + if (importName) { + this._removeEdits(filePath, deletions); + if (importName.alias) { + nameRemoved.delete(importName.alias.id); + } + + return [ + { + filePath, + range: convertTextRangeToRange(importName.name, parseResults.tokenizerOutput.lines), + replacementText: this._newLastModuleName, + }, + ]; + } + } + } + } + + return getTextEditsForAutoImportInsertion( + importNameInfo, + importStatements, + moduleName, + getImportGroupFromModuleNameAndType(this._newModuleNameAndType), + parseResults, + convertOffsetToPosition(parseResults.parseTree.length, parseResults.tokenizerOutput.lines) + ).map((e) => ({ filePath, range: e.range, replacementText: e.replacementText })); + } +} + +class ModuleNameCollector extends ParseTreeWalker { + private readonly _result: ModuleNameNode[] = []; + + override walk(node: ParseNode): void { + if (isExpressionNode(node)) { + return; + } + + super.walk(node); + } + + override visitModuleName(node: ModuleNameNode) { + this._result.push(node); + return false; + } + + public static collect(root: ModuleNode) { + const collector = new ModuleNameCollector(); + collector.walk(root); + + return collector._result; + } +} diff --git a/packages/pyright-internal/src/languageService/tooltipUtils.ts b/packages/pyright-internal/src/languageService/tooltipUtils.ts index 57a5bd5fe628..1eea5ebf5a0f 100644 --- a/packages/pyright-internal/src/languageService/tooltipUtils.ts +++ b/packages/pyright-internal/src/languageService/tooltipUtils.ts @@ -29,6 +29,7 @@ import { OverloadedFunctionType, Type, } from '../analyzer/types'; +import { isDefined } from '../common/core'; // 70 is vscode's default hover width size. export function getOverloadedFunctionTooltip( @@ -67,6 +68,28 @@ export function getFunctionDocStringFromType(type: FunctionType, sourceMapper: S return getFunctionDocStringInherited(type, decl, sourceMapper, classResults?.classType); } +export function getOverloadedFunctionDocStringsFromType( + type: OverloadedFunctionType, + sourceMapper: SourceMapper, + evaluator: TypeEvaluator +) { + if (type.overloads.length === 0) { + return []; + } + + const decl = type.overloads[0].details.declaration; + const enclosingClass = decl ? ParseTreeUtils.getEnclosingClass(decl.node) : undefined; + const classResults = enclosingClass ? evaluator.getTypeOfClass(enclosingClass) : undefined; + + return getOverloadedFunctionDocStringsInherited( + type, + type.overloads.map((o) => o.details.declaration).filter(isDefined), + sourceMapper, + evaluator, + classResults?.classType + ); +} + export function getDocumentationPartsForTypeAndDecl( sourceMapper: SourceMapper, type: Type, @@ -86,23 +109,12 @@ export function getDocumentationPartsForTypeAndDecl( return [doc]; } } else if (isFunction(type)) { - if (resolvedDecl?.type === DeclarationType.Function || resolvedDecl?.type === DeclarationType.Class) { - const doc = getFunctionDocStringFromType(type, sourceMapper, evaluator); - if (doc) { - return [doc]; - } + const doc = getFunctionDocStringFromType(type, sourceMapper, evaluator); + if (doc) { + return [doc]; } } else if (isOverloadedFunction(type)) { - const enclosingClass = resolvedDecl ? ParseTreeUtils.getEnclosingClass(resolvedDecl.node) : undefined; - const classResults = enclosingClass ? evaluator.getTypeOfClass(enclosingClass) : undefined; - - return getOverloadedFunctionDocStringsInherited( - type, - resolvedDecl, - sourceMapper, - evaluator, - classResults?.classType - ); + return getOverloadedFunctionDocStringsFromType(type, sourceMapper, evaluator); } else if (resolvedDecl?.type === DeclarationType.Variable) { const doc = getVariableDocString(resolvedDecl, sourceMapper); if (doc) { diff --git a/packages/pyright-internal/src/localization/localize.ts b/packages/pyright-internal/src/localization/localize.ts index 9af4aeca6e08..4f805ad29332 100644 --- a/packages/pyright-internal/src/localization/localize.ts +++ b/packages/pyright-internal/src/localization/localize.ts @@ -87,7 +87,17 @@ function initialize(): StringLookupMap { declare let navigator: { language: string } | undefined; +let localeOverride: string | undefined; + +export function setLocaleOverride(locale: string) { + localeOverride = locale.toLowerCase(); +} + function getLocaleFromEnv() { + if (localeOverride) { + return localeOverride; + } + try { if (navigator?.language) { return navigator.language.toLowerCase(); @@ -201,6 +211,7 @@ export namespace Localizer { export const assignmentExprContext = () => getRawString('Diagnostic.assignmentExprContext'); export const assignmentExprComprehension = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.assignmentExprComprehension')); + export const assignmentExprInSubscript = () => getRawString('Diagnostic.assignmentExprInSubscript'); export const assignmentInProtocol = () => getRawString('Diagnostic.assignmentInProtocol'); export const assignmentTargetExpr = () => getRawString('Diagnostic.assignmentTargetExpr'); export const asyncNotInAsyncFunction = () => getRawString('Diagnostic.asyncNotInAsyncFunction'); @@ -211,6 +222,10 @@ export namespace Localizer { export const baseClassInvalid = () => getRawString('Diagnostic.baseClassInvalid'); export const baseClassFinal = () => new ParameterizedString<{ type: string }>(getRawString('Diagnostic.baseClassFinal')); + export const baseClassMethodTypeIncompatible = () => + new ParameterizedString<{ classType: string; name: string }>( + getRawString('Diagnostic.baseClassMethodTypeIncompatible') + ); export const baseClassUnknown = () => getRawString('Diagnostic.baseClassUnknown'); export const bindTypeMismatch = () => new ParameterizedString<{ type: string; methodName: string; paramName: string }>( @@ -221,6 +236,8 @@ export namespace Localizer { export const callableFirstArg = () => getRawString('Diagnostic.callableFirstArg'); export const callableSecondArg = () => getRawString('Diagnostic.callableSecondArg'); export const casePatternIsIrrefutable = () => getRawString('Diagnostic.casePatternIsIrrefutable'); + export const classAlreadySpecialized = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.classAlreadySpecialized')); export const classDecoratorTypeUnknown = () => getRawString('Diagnostic.classDecoratorTypeUnknown'); export const classDefinitionCycle = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.classDefinitionCycle')); @@ -228,6 +245,11 @@ export namespace Localizer { export const classMethodClsParam = () => getRawString('Diagnostic.classMethodClsParam'); export const classNotRuntimeSubscriptable = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.classNotRuntimeSubscriptable')); + export const classPatternBuiltInArgCount = () => getRawString('Diagnostic.classPatternBuiltInArgCount'); + export const classPatternBuiltInArgPositional = () => + getRawString('Diagnostic.classPatternBuiltInArgPositional'); + export const classPatternTypeAlias = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.classPatternTypeAlias')); export const classVarNotAllowed = () => getRawString('Diagnostic.classVarNotAllowed'); export const classVarFirstArgMissing = () => getRawString('Diagnostic.classVarFirstArgMissing'); export const classVarOverridesInstanceVar = () => @@ -235,6 +257,7 @@ export namespace Localizer { getRawString('Diagnostic.classVarOverridesInstanceVar') ); export const classVarTooManyArgs = () => getRawString('Diagnostic.classVarTooManyArgs'); + export const classVarWithTypeVar = () => getRawString('Diagnostic.classVarWithTypeVar'); export const clsSelfParamTypeMismatch = () => new ParameterizedString<{ name: string; classType: string }>( getRawString('Diagnostic.clsSelfParamTypeMismatch') @@ -255,10 +278,25 @@ export namespace Localizer { new ParameterizedString<{ name: string }>(getRawString('Diagnostic.constantRedefinition')); export const constructorNoArgs = () => new ParameterizedString<{ type: string }>(getRawString('Diagnostic.constructorNoArgs')); + export const constructorParametersMismatch = () => + new ParameterizedString<{ classType: string }>(getRawString('Diagnostic.constructorParametersMismatch')); export const continueInFinally = () => getRawString('Diagnostic.continueInFinally'); export const continueOutsideLoop = () => getRawString('Diagnostic.continueOutsideLoop'); export const dataClassBaseClassNotFrozen = () => getRawString('Diagnostic.dataClassBaseClassNotFrozen'); export const dataClassFieldWithDefault = () => getRawString('Diagnostic.dataClassFieldWithDefault'); + export const dataClassPostInitParamCount = () => + new ParameterizedString<{ expected: number }>(getRawString('Diagnostic.dataClassPostInitParamCount')); + export const dataClassPostInitType = () => + new ParameterizedString<{ fieldName: string }>(getRawString('Diagnostic.dataClassPostInitType')); + export const dataClassSlotsOverwrite = () => getRawString('Diagnostic.dataClassSlotsOverwrite'); + export const dataClassTransformExpectedBoolLiteral = () => + getRawString('Diagnostic.dataClassTransformExpectedBoolLiteral'); + export const dataClassTransformFieldDescriptor = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.dataClassTransformFieldDescriptor')); + export const dataClassTransformPositionalParam = () => + getRawString('Diagnostic.dataClassTransformPositionalParam'); + export const dataClassTransformUnknownArgument = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.dataClassTransformUnknownArgument')); export const declaredReturnTypePartiallyUnknown = () => new ParameterizedString<{ returnType: string }>( getRawString('Diagnostic.declaredReturnTypePartiallyUnknown') @@ -267,6 +305,10 @@ export namespace Localizer { export const defaultValueContainsCall = () => getRawString('Diagnostic.defaultValueContainsCall'); export const defaultValueNotAllowed = () => getRawString('Diagnostic.defaultValueNotAllowed'); export const defaultValueNotEllipsis = () => getRawString('Diagnostic.defaultValueNotEllipsis'); + export const deprecatedType = () => + new ParameterizedString<{ version: string; replacement: string }>( + getRawString('Diagnostic.deprecatedType') + ); export const dictExpandIllegalInComprehension = () => getRawString('Diagnostic.dictExpandIllegalInComprehension'); export const dictInAnnotation = () => getRawString('Diagnostic.dictInAnnotation'); @@ -278,6 +320,8 @@ export namespace Localizer { export const duplicateArgsParam = () => getRawString('Diagnostic.duplicateArgsParam'); export const duplicateBaseClass = () => getRawString('Diagnostic.duplicateBaseClass'); export const duplicateCatchAll = () => getRawString('Diagnostic.duplicateCatchAll'); + export const duplicateEnumMember = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.duplicateEnumMember')); export const duplicateImport = () => new ParameterizedString<{ importName: string }>(getRawString('Diagnostic.duplicateImport')); export const duplicateKwargsParam = () => getRawString('Diagnostic.duplicateKwargsParam'); @@ -292,6 +336,9 @@ export namespace Localizer { export const duplicateUnpack = () => getRawString('Diagnostic.duplicateUnpack'); export const ellipsisContext = () => getRawString('Diagnostic.ellipsisContext'); export const ellipsisSecondArg = () => getRawString('Diagnostic.ellipsisSecondArg'); + export const enumClassOverride = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.enumClassOverride')); + export const exceptionGroupIncompatible = () => getRawString('Diagnostic.exceptionGroupIncompatible'); export const exceptionTypeIncorrect = () => new ParameterizedString<{ type: string }>(getRawString('Diagnostic.exceptionTypeIncorrect')); export const exceptionTypeNotClass = () => @@ -306,8 +353,8 @@ export namespace Localizer { export const expectedBoolLiteral = () => getRawString('Diagnostic.expectedBoolLiteral'); export const expectedCase = () => getRawString('Diagnostic.expectedCase'); export const expectedClassName = () => getRawString('Diagnostic.expectedClassName'); - export const expectedCloseBracket = () => getRawString('Diagnostic.expectedCloseBracket'); export const expectedCloseBrace = () => getRawString('Diagnostic.expectedCloseBrace'); + export const expectedCloseBracket = () => getRawString('Diagnostic.expectedCloseBracket'); export const expectedCloseParen = () => getRawString('Diagnostic.expectedCloseParen'); export const expectedColon = () => getRawString('Diagnostic.expectedColon'); export const expectedComplexNumberLiteral = () => getRawString('Diagnostic.expectedComplexNumberLiteral'); @@ -370,10 +417,13 @@ export namespace Localizer { export const formatStringUnterminated = () => getRawString('Diagnostic.formatStringUnterminated'); export const functionDecoratorTypeUnknown = () => getRawString('Diagnostic.functionDecoratorTypeUnknown'); export const generatorAsyncReturnType = () => getRawString('Diagnostic.generatorAsyncReturnType'); + export const generatorNotParenthesized = () => getRawString('Diagnostic.generatorNotParenthesized'); export const generatorSyncReturnType = () => getRawString('Diagnostic.generatorSyncReturnType'); export const genericClassAssigned = () => getRawString('Diagnostic.genericClassAssigned'); export const genericClassDeleted = () => getRawString('Diagnostic.genericClassDeleted'); export const genericNotAllowed = () => getRawString('Diagnostic.genericNotAllowed'); + export const genericTypeAliasBoundTypeVar = () => + new ParameterizedString<{ names: string }>(getRawString('Diagnostic.genericTypeAliasBoundTypeVar')); export const genericTypeArgMissing = () => getRawString('Diagnostic.genericTypeArgMissing'); export const genericTypeArgTypeVar = () => getRawString('Diagnostic.genericTypeArgTypeVar'); export const genericTypeArgUnique = () => getRawString('Diagnostic.genericTypeArgUnique'); @@ -433,11 +483,15 @@ export namespace Localizer { new ParameterizedString<{ returnType: string }>( getRawString('Diagnostic.lambdaReturnTypePartiallyUnknown') ); + export const listAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.listAssignmentMismatch')); export const listInAnnotation = () => getRawString('Diagnostic.listInAnnotation'); export const literalUnsupportedType = () => getRawString('Diagnostic.literalUnsupportedType'); export const literalEmptyArgs = () => getRawString('Diagnostic.literalEmptyArgs'); export const literalNotCallable = () => getRawString('Diagnostic.literalNotCallable'); export const matchIncompatible = () => getRawString('Diagnostic.matchIncompatible'); + export const matchIsNotExhaustive = () => getRawString('Diagnostic.matchIsNotExhaustive'); + export const maxParseDepthExceeded = () => getRawString('Diagnostic.maxParseDepthExceeded'); export const memberAccess = () => new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.memberAccess')); export const memberDelete = () => @@ -446,6 +500,7 @@ export namespace Localizer { new ParameterizedString<{ name: string; type: string }>(getRawString('Diagnostic.memberSet')); export const metaclassConflict = () => getRawString('Diagnostic.metaclassConflict'); export const metaclassDuplicate = () => getRawString('Diagnostic.metaclassDuplicate'); + export const metaclassIsGeneric = () => getRawString('Diagnostic.metaclassIsGeneric'); export const methodNotDefined = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.methodNotDefined')); export const methodNotDefinedOnType = () => @@ -457,7 +512,11 @@ export namespace Localizer { ); export const methodReturnsNonObject = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.methodReturnsNonObject')); - export const moduleContext = () => getRawString('Diagnostic.moduleContext'); + export const missingProtocolMembers = () => getRawString('Diagnostic.missingProtocolMembers'); + export const missingSuperCall = () => + new ParameterizedString<{ methodName: string }>(getRawString('Diagnostic.missingSuperCall')); + export const moduleAsType = () => getRawString('Diagnostic.moduleAsType'); + export const moduleNotCallable = () => getRawString('Diagnostic.moduleNotCallable'); export const moduleUnknownMember = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.moduleUnknownMember')); export const namedExceptAfterCatchAll = () => getRawString('Diagnostic.namedExceptAfterCatchAll'); @@ -534,6 +593,8 @@ export namespace Localizer { export const paramAfterKwargsParam = () => getRawString('Diagnostic.paramAfterKwargsParam'); export const paramAlreadyAssigned = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramAlreadyAssigned')); + export const paramAnnotationMissing = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramAnnotationMissing')); export const paramNameMissing = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.paramNameMissing')); export const paramSpecArgsKwargsUsage = () => getRawString('Diagnostic.paramSpecArgsKwargsUsage'); @@ -586,6 +647,10 @@ export namespace Localizer { getRawString('Diagnostic.protocolBaseClass') ); export const protocolIllegal = () => getRawString('Diagnostic.protocolIllegal'); + export const protocolMemberNotClassVar = () => + new ParameterizedString<{ className: string; memberName: string }>( + getRawString('Diagnostic.protocolMemberNotClassVar') + ); export const protocolNotAllowedInTypeArgument = () => getRawString('Diagnostic.protocolNotAllowedInTypeArgument'); export const protocolUsedInCall = () => @@ -621,6 +686,17 @@ export namespace Localizer { export const revealLocalsArgs = () => getRawString('Diagnostic.revealLocalsArgs'); export const revealLocalsNone = () => getRawString('Diagnostic.revealLocalsNone'); export const revealTypeArgs = () => getRawString('Diagnostic.revealTypeArgs'); + export const revealTypeExpectedTextArg = () => getRawString('Diagnostic.revealTypeExpectedTextArg'); + export const revealTypeExpectedTextMismatch = () => + new ParameterizedString<{ expected: string; received: string }>( + getRawString('Diagnostic.revealTypeExpectedTextMismatch') + ); + export const revealTypeExpectedTypeMismatch = () => + new ParameterizedString<{ expected: string; received: string }>( + getRawString('Diagnostic.revealTypeExpectedTypeMismatch') + ); + export const selfTypeContext = () => getRawString('Diagnostic.selfTypeContext'); + export const selfTypeWithTypedSelfOrCls = () => getRawString('Diagnostic.selfTypeWithTypedSelfOrCls'); export const setterGetterTypeMismatch = () => getRawString('Diagnostic.setterGetterTypeMismatch'); export const starPatternInAsPattern = () => getRawString('Diagnostic.starPatternInAsPattern'); export const starPatternInOrPattern = () => getRawString('Diagnostic.starPatternInOrPattern'); @@ -632,6 +708,10 @@ export namespace Localizer { new ParameterizedString<{ name: string }>(getRawString('Diagnostic.slotsClassVarConflict')); export const starStarWildcardNotAllowed = () => getRawString('Diagnostic.starStarWildcardNotAllowed'); export const staticClsSelfParam = () => getRawString('Diagnostic.staticClsSelfParam'); + export const strictTypeGuardReturnType = () => + new ParameterizedString<{ type: string; returnType: string }>( + getRawString('Diagnostic.strictTypeGuardReturnType') + ); export const stringNonAsciiBytes = () => getRawString('Diagnostic.stringNonAsciiBytes'); export const stringNotSubscriptable = () => getRawString('Diagnostic.stringNotSubscriptable'); export const stringUnsupportedEscape = () => getRawString('Diagnostic.stringUnsupportedEscape'); @@ -654,26 +734,27 @@ export namespace Localizer { new ParameterizedString<{ name: string }>(getRawString('Diagnostic.symbolIsPossiblyUnbound')); export const symbolOverridden = () => new ParameterizedString<{ name: string; className: string }>(getRawString('Diagnostic.symbolOverridden')); + export const totalOrderingMissingMethod = () => getRawString('Diagnostic.totalOrderingMissingMethod'); export const trailingCommaInFromImport = () => getRawString('Diagnostic.trailingCommaInFromImport'); export const tryWithoutExcept = () => getRawString('Diagnostic.tryWithoutExcept'); + export const tupleAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('Diagnostic.tupleAssignmentMismatch')); export const tupleInAnnotation = () => getRawString('Diagnostic.tupleInAnnotation'); export const tupleIndexOutOfRange = () => - new ParameterizedString<{ length: number; index: number }>(getRawString('Diagnostic.tupleIndexOutOfRange')); - export const tupleSizeMismatch = () => - new ParameterizedString<{ expected: number; received: number }>( - getRawString('Diagnostic.tupleSizeMismatch') - ); + new ParameterizedString<{ type: string; index: number }>(getRawString('Diagnostic.tupleIndexOutOfRange')); export const typeAliasIsRecursiveDirect = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeAliasIsRecursiveDirect')); export const typeAliasIsRecursiveIndirect = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeAliasIsRecursiveIndirect')); - export const typeAliasNotInModule = () => getRawString('Diagnostic.typeAliasNotInModule'); + export const typeAliasNotInModuleOrClass = () => getRawString('Diagnostic.typeAliasNotInModuleOrClass'); export const typeAliasRedeclared = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeAliasRedeclared')); export const typeAnnotationCall = () => getRawString('Diagnostic.typeAnnotationCall'); export const typeAnnotationVariable = () => getRawString('Diagnostic.typeAnnotationVariable'); + export const typeArgListExpected = () => getRawString('Diagnostic.typeArgListExpected'); export const typeArgListNotAllowed = () => getRawString('Diagnostic.typeArgListNotAllowed'); - export const typeArgsExpectingNone = () => getRawString('Diagnostic.typeArgsExpectingNone'); + export const typeArgsExpectingNone = () => + new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeArgsExpectingNone')); export const typeArgsMismatchOne = () => new ParameterizedString<{ received: number }>(getRawString('Diagnostic.typeArgsMismatchOne')); export const typeArgsMissingForAlias = () => @@ -729,10 +810,18 @@ export namespace Localizer { new ParameterizedString<{ leftType: string; rightType: string; operator: string }>( getRawString('Diagnostic.typeNotSupportBinaryOperator') ); + export const typeNotSupportBinaryOperatorBidirectional = () => + new ParameterizedString<{ leftType: string; rightType: string; expectedType: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportBinaryOperatorBidirectional') + ); export const typeNotSupportUnaryOperator = () => new ParameterizedString<{ type: string; operator: string }>( getRawString('Diagnostic.typeNotSupportUnaryOperator') ); + export const typeNotSupportUnaryOperatorBidirectional = () => + new ParameterizedString<{ type: string; expectedType: string; operator: string }>( + getRawString('Diagnostic.typeNotSupportUnaryOperatorBidirectional') + ); export const typePartiallyUnknown = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typePartiallyUnknown')); export const typeUnknown = () => @@ -754,6 +843,7 @@ export namespace Localizer { new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarNotUsedByOuterScope')); export const typeVarSingleConstraint = () => getRawString('Diagnostic.typeVarSingleConstraint'); export const typeVarsNotInGeneric = () => getRawString('Diagnostic.typeVarsNotInGeneric'); + export const typeVarTupleMustBeUnpacked = () => getRawString('Diagnostic.typeVarTupleMustBeUnpacked'); export const typeVarTupleContext = () => getRawString('Diagnostic.typeVarTupleContext'); export const typeVarUnknownParam = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.typeVarUnknownParam')); @@ -777,6 +867,7 @@ export namespace Localizer { export const unexpectedIndent = () => getRawString('Diagnostic.unexpectedIndent'); export const unexpectedUnindent = () => getRawString('Diagnostic.unexpectedUnindent'); export const unionSyntaxIllegal = () => getRawString('Diagnostic.unionSyntaxIllegal'); + export const unionTypeArgCount = () => getRawString('Diagnostic.unionTypeArgCount'); export const uninitializedInstanceVariable = () => new ParameterizedString<{ name: string }>(getRawString('Diagnostic.uninitializedInstanceVariable')); export const unnecessaryCast = () => @@ -789,6 +880,7 @@ export namespace Localizer { new ParameterizedString<{ testType: string; classType: string }>( getRawString('Diagnostic.unnecessaryIsSubclassAlways') ); + export const unnecessaryTypeIgnore = () => getRawString('Diagnostic.unnecessaryTypeIgnore'); export const unpackArgCount = () => getRawString('Diagnostic.unpackArgCount'); export const unpackedArgInTypeArgument = () => getRawString('Diagnostic.unpackedArgInTypeArgument'); export const unpackedArgWithVariadicParam = () => getRawString('Diagnostic.unpackedArgWithVariadicParam'); @@ -799,13 +891,17 @@ export namespace Localizer { new ParameterizedString<{ name1: string; name2: string }>( getRawString('Diagnostic.unpackedTypeVarTupleExpected') ); + export const unpackExpectedTypedDict = () => getRawString('Diagnostic.unpackExpectedTypedDict'); export const unpackExpectedTypeVarTuple = () => getRawString('Diagnostic.unpackExpectedTypeVarTuple'); export const unpackIllegalInComprehension = () => getRawString('Diagnostic.unpackIllegalInComprehension'); + export const unpackInAnnotation = () => getRawString('Diagnostic.unpackInAnnotation'); export const unpackInDict = () => getRawString('Diagnostic.unpackInDict'); export const unpackInSet = () => getRawString('Diagnostic.unpackInSet'); export const unpackNotAllowed = () => getRawString('Diagnostic.unpackNotAllowed'); + export const unpackOperatorNotAllowed = () => getRawString('Diagnostic.unpackOperatorNotAllowed'); export const unpackTuplesIllegal = () => getRawString('Diagnostic.unpackTuplesIllegal'); export const unreachableCode = () => getRawString('Diagnostic.unreachableCode'); + export const unreachableExcept = () => getRawString('Diagnostic.unreachableExcept'); export const unsupportedDunderAllOperation = () => getRawString('Diagnostic.unsupportedDunderAllOperation'); export const unusedCallResult = () => new ParameterizedString<{ type: string }>(getRawString('Diagnostic.unusedCallResult')); @@ -841,17 +937,26 @@ export namespace Localizer { ); export const argsParamMissing = () => new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.argsParamMissing')); - export const argsParamWithVariadic = () => - new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.argsParamWithVariadic')); + export const argsPositionOnly = () => + new ParameterizedString<{ expected: number; received: number }>( + getRawString('DiagnosticAddendum.argsPositionOnly') + ); export const argumentType = () => new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.argumentType')); export const argumentTypes = () => new ParameterizedString<{ types: string }>(getRawString('DiagnosticAddendum.argumentTypes')); export const assignToNone = () => getRawString('DiagnosticAddendum.assignToNone'); export const asyncHelp = () => getRawString('DiagnosticAddendum.asyncHelp'); - export const dataclassFrozen = () => - new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.dataclassFrozen')); + export const baseClassProvidesType = () => + new ParameterizedString<{ baseClass: string; type: string }>( + getRawString('DiagnosticAddendum.baseClassProvidesType') + ); + export const dataClassFrozen = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.dataClassFrozen')); + export const dataClassFieldLocation = () => getRawString('DiagnosticAddendum.dataClassFieldLocation'); export const finalMethod = () => getRawString('DiagnosticAddendum.finalMethod'); + export const functionParamDefaultMissing = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.functionParamDefaultMissing')); export const functionParamName = () => new ParameterizedString<{ destName: string; srcName: string }>( getRawString('DiagnosticAddendum.functionParamName') @@ -867,6 +972,10 @@ export namespace Localizer { export const incompatibleGetter = () => getRawString('DiagnosticAddendum.incompatibleGetter'); export const incompatibleSetter = () => getRawString('DiagnosticAddendum.incompatibleSetter'); export const incompatibleDeleter = () => getRawString('DiagnosticAddendum.incompatibleDeleter'); + export const initMethodLocation = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.initMethodLocation')); + export const initMethodSignature = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.initMethodSignature')); export const functionTooManyParams = () => new ParameterizedString<{ expected: number; received: number }>( getRawString('DiagnosticAddendum.functionTooManyParams') @@ -879,10 +988,15 @@ export namespace Localizer { new ParameterizedString<{ name: string; type: string }>(getRawString('DiagnosticAddendum.keyUndefined')); export const kwargsParamMissing = () => new ParameterizedString<{ paramName: string }>(getRawString('DiagnosticAddendum.kwargsParamMissing')); + export const listAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.listAssignmentMismatch')); export const literalAssignmentMismatch = () => new ParameterizedString<{ sourceType: string; destType: string }>( getRawString('DiagnosticAddendum.literalAssignmentMismatch') ); + export const matchIsNotExhaustiveType = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.matchIsNotExhaustiveType')); + export const matchIsNotExhaustiveHint = () => getRawString('DiagnosticAddendum.matchIsNotExhaustiveHint'); export const memberAssignment = () => new ParameterizedString<{ type: string; name: string; classType: string }>( getRawString('DiagnosticAddendum.memberAssignment') @@ -895,6 +1009,8 @@ export namespace Localizer { new ParameterizedString<{ count: number }>(getRawString('DiagnosticAddendum.memberIsAbstractMore')); export const memberIsFinalInProtocol = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsFinalInProtocol')); + export const memberIsInitVar = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsInitVar')); export const memberIsNotFinalInProtocol = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberIsNotFinalInProtocol')); export const memberSetClassVar = () => @@ -903,6 +1019,10 @@ export namespace Localizer { new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberTypeMismatch')); export const memberUnknown = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.memberUnknown')); + export const missingProtocolMember = () => + new ParameterizedString<{ name: string; classType: string }>( + getRawString('DiagnosticAddendum.missingProtocolMember') + ); export const missingGetter = () => getRawString('DiagnosticAddendum.missingGetter'); export const missingSetter = () => getRawString('DiagnosticAddendum.missingSetter'); export const missingDeleter = () => getRawString('DiagnosticAddendum.missingDeleter'); @@ -914,6 +1034,10 @@ export namespace Localizer { new ParameterizedString<{ name: string; sourceType: string; destType: string }>( getRawString('DiagnosticAddendum.namedParamTypeMismatch') ); + export const newMethodLocation = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.newMethodLocation')); + export const newMethodSignature = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.newMethodSignature')); export const noOverloadAssignable = () => new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.noOverloadAssignable')); export const orPatternMissingName = () => @@ -921,7 +1045,6 @@ export namespace Localizer { export const overloadMethod = () => getRawString('DiagnosticAddendum.overloadMethod'); export const overloadNotAssignable = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.overloadNotAssignable')); - export const overloadTooManyUnions = () => getRawString('DiagnosticAddendum.overloadTooManyUnions'); export const overloadWithImplementation = () => getRawString('DiagnosticAddendum.overloadWithImplementation'); export const overriddenMethod = () => getRawString('DiagnosticAddendum.overriddenMethod'); export const overriddenSymbol = () => getRawString('DiagnosticAddendum.overriddenSymbol'); @@ -933,6 +1056,10 @@ export namespace Localizer { new ParameterizedString<{ index: number; baseName: string; overrideName: string }>( getRawString('DiagnosticAddendum.overrideParamName') ); + export const overrideParamNamePositionOnly = () => + new ParameterizedString<{ index: number; baseName: string }>( + getRawString('DiagnosticAddendum.overrideParamNamePositionOnly') + ); export const overrideParamType = () => new ParameterizedString<{ index: number; baseType: string; overrideType: string }>( getRawString('DiagnosticAddendum.overrideParamType') @@ -954,6 +1081,8 @@ export namespace Localizer { new ParameterizedString<{ module: string }>( getRawString('DiagnosticAddendum.privateImportFromPyTypedSource') ); + export const propertyAccessFromProtocolClass = () => + getRawString('DiagnosticAddendum.propertyAccessFromProtocolClass'); export const propertyMethodIncompatible = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.propertyMethodIncompatible')); export const propertyMethodMissing = () => @@ -970,6 +1099,12 @@ export namespace Localizer { ); export const protocolMemberMissing = () => new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.protocolMemberMissing')); + export const protocolSourceIsNotConcrete = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.protocolSourceIsNotConcrete') + ); + export const readOnlyAttribute = () => + new ParameterizedString<{ name: string }>(getRawString('DiagnosticAddendum.readOnlyAttribute')); export const seeDeclaration = () => getRawString('DiagnosticAddendum.seeDeclaration'); export const seeClassDeclaration = () => getRawString('DiagnosticAddendum.seeClassDeclaration'); export const seeFunctionDeclaration = () => getRawString('DiagnosticAddendum.seeFunctionDeclaration'); @@ -978,6 +1113,8 @@ export namespace Localizer { export const seeVariableDeclaration = () => getRawString('DiagnosticAddendum.seeVariableDeclaration'); export const tupleEntryTypeMismatch = () => new ParameterizedString<{ entry: number }>(getRawString('DiagnosticAddendum.tupleEntryTypeMismatch')); + export const tupleAssignmentMismatch = () => + new ParameterizedString<{ type: string }>(getRawString('DiagnosticAddendum.tupleAssignmentMismatch')); export const tupleSizeMismatch = () => new ParameterizedString<{ expected: number; received: number }>( getRawString('DiagnosticAddendum.tupleSizeMismatch') @@ -1022,6 +1159,10 @@ export namespace Localizer { new ParameterizedString<{ name: string; type: string }>( getRawString('DiagnosticAddendum.typedDictFieldUndefined') ); + export const typedDictFinalMismatch = () => + new ParameterizedString<{ sourceType: string; destType: string }>( + getRawString('DiagnosticAddendum.typedDictFinalMismatch') + ); export const typeIncompatible = () => new ParameterizedString<{ sourceType: string; destType: string }>( getRawString('DiagnosticAddendum.typeIncompatible') @@ -1049,6 +1190,10 @@ export namespace Localizer { export const typeVarNotAllowed = () => getRawString('DiagnosticAddendum.typeVarNotAllowed'); export const typeVarsMissing = () => new ParameterizedString<{ names: string }>(getRawString('DiagnosticAddendum.typeVarsMissing')); + export const unreachableExcept = () => + new ParameterizedString<{ exceptionType: string; parentType: string }>( + getRawString('DiagnosticAddendum.unreachableExcept') + ); export const useDictInstead = () => getRawString('DiagnosticAddendum.useDictInstead'); export const useListInstead = () => getRawString('DiagnosticAddendum.useListInstead'); export const useTupleInstead = () => getRawString('DiagnosticAddendum.useTupleInstead'); @@ -1067,4 +1212,19 @@ export namespace Localizer { export const findingReferences = () => getRawString('CodeAction.findingReferences'); export const organizeImports = () => getRawString('CodeAction.organizeImports'); } + + export namespace Refactoring { + export const moveFile = () => + new ParameterizedString<{ oldModuleName: string; newModuleName: string }>( + getRawString('Refactoring.moveFile') + ); + export const moveFileLabel = () => + new ParameterizedString<{ oldModuleName: string; newModuleName: string }>( + getRawString('Refactoring.moveFileLabel') + ); + export const moveFileDescription = () => + new ParameterizedString<{ oldModuleName: string; newModuleName: string }>( + getRawString('Refactoring.moveFileDescription') + ); + } } diff --git a/packages/pyright-internal/src/localization/package.nls.en-us.json b/packages/pyright-internal/src/localization/package.nls.en-us.json index da21e68b8fc7..9ea0a4f45bad 100644 --- a/packages/pyright-internal/src/localization/package.nls.en-us.json +++ b/packages/pyright-internal/src/localization/package.nls.en-us.json @@ -23,6 +23,7 @@ "assignmentExprContext": "Assignment expression must be within module, function or lambda", "assignmentExprComprehension": "Assignment expression target \"{name}\" cannot use same name as comprehension for target", "assignmentInProtocol": "Instance or class variables within a Protocol class must be explicitly declared within the class body", + "assignmentExprInSubscript": "Assignment expressions within a subscript are supported only in Python 3.10 and newer", "assignmentTargetExpr": "Expression cannot be assignment target", "asyncNotInAsyncFunction": "Use of \"async\" not allowed outside of async function", "awaitIllegal": "Use of \"await\" requires Python 3.5 or newer", @@ -31,6 +32,7 @@ "baseClassCircular": "Class cannot derive from itself", "baseClassFinal": "Base class \"{type}\" is marked final and cannot be subclassed", "baseClassInvalid": "Argument to class must be a base class", + "baseClassMethodTypeIncompatible": "Base classes for class \"{classType}\" define method \"{name}\" in incompatible way", "baseClassUnknown": "Base class type is unknown, obscuring type of derived class", "bindTypeMismatch": "Could not bind method \"{methodName}\" because \"{type}\" is not assignable to parameter \"{paramName}\"", "breakOutsideLoop": "\"break\" can be used only within a loop", @@ -38,15 +40,20 @@ "callableFirstArg": "Expected parameter type list or \"...\"", "callableSecondArg": "Expected return type as second type argument for \"Callable\"", "casePatternIsIrrefutable": "Irrefutable pattern is allowed only for the last case statement", + "classAlreadySpecialized": "Type \"{type}\" is already specialized", "classDecoratorTypeUnknown": "Untyped class decorator obscures type of class; ignoring decorator", "classDefinitionCycle": "Class definition for \"{name}\" depends on itself", "classGetItemClsParam": "__class_getitem__ override should take a \"cls\" parameter", "classMethodClsParam": "Class methods should take a \"cls\" parameter", "classNotRuntimeSubscriptable": "Subscript for class \"{name}\" will generate runtime exception; enclose type annotation in quotes", + "classPatternBuiltInArgCount": "Class pattern accepts at most 1 positional sub-pattern", + "classPatternBuiltInArgPositional": "Class pattern accepts only positional sub-pattern", + "classPatternTypeAlias": "\"{type}\" cannot be used in a class pattern because it is a specialized type alias", "classVarNotAllowed": "\"ClassVar\" is not allowed in this context", "classVarFirstArgMissing": "Expected a type argument after \"ClassVar\"", "classVarOverridesInstanceVar": "Class variable \"{name}\" overrides instance variable of same name in class \"{className}\"", "classVarTooManyArgs": "Expected only one type argument after \"ClassVar\"", + "classVarWithTypeVar": "\"ClassVar\" type cannot include type variables", "clsSelfParamTypeMismatch": "Type of parameter \"{name}\" must be a supertype of its class \"{classType}\"", "comparisonAlwaysFalse": "Condition will always evaluate to False since the types \"{leftType}\" and \"{rightType}\" have no overlap", "comparisonAlwaysTrue": "Condition will always evaluate to True since the types \"{leftType}\" and \"{rightType}\" have no overlap", @@ -55,16 +62,25 @@ "concatenateParamSpecMissing": "Last type argument for \"Concatenate\" must be a ParamSpec", "concatenateTypeArgsMissing": "\"Concatenate\" requires at least two type arguments", "constantRedefinition": "\"{name}\" is constant (because it is uppercase) and cannot be redefined", + "constructorParametersMismatch": "Mismatch between signature of __new__ and __init__ in class \"{classType}\"", "continueInFinally": "\"continue\" cannot be used within a finally clause", "continueOutsideLoop": "\"continue\" can be used only within a loop", "constructorNoArgs": "Expected no arguments to \"{type}\" constructor", "dataClassBaseClassNotFrozen": "A frozen class cannot inherit from a class that is not frozen", "dataClassFieldWithDefault": "Fields without default values cannot appear after fields with default values", + "dataClassPostInitParamCount": "Dataclass __post_init__ incorrect parameter count; number of InitVar fields is {expected}", + "dataClassPostInitType": "Dataclass __post_init__ method parameter type mismatch for field \"{fieldName}\"", + "dataClassSlotsOverwrite": "__slots__ is already defined in class", + "dataClassTransformExpectedBoolLiteral": "Expected expression that statically evaluates to True or False", + "dataClassTransformFieldDescriptor": "Expected tuple of classes or functions but received type \"{type}\"", + "dataClassTransformPositionalParam": "All arguments to \"dataclass_transform\" must be keyword arguments", + "dataClassTransformUnknownArgument": "Argument \"{name}\" is not supported by dataclass_transform", "declaredReturnTypePartiallyUnknown": "Declared return type, \"{returnType}\", is partially unknown", "declaredReturnTypeUnknown": "Declared return type is unknown", "defaultValueContainsCall": "Function calls and mutable objects not allowed within parameter default value expression", "defaultValueNotAllowed": "Parameter with \"*\" or \"**\" cannot have default value", "defaultValueNotEllipsis": "Default values in stub files should be specified as \"...\"", + "deprecatedType": "This type is deprecated as of Python {version}; use \"{replacement}\" instead", "delTargetExpr": "Expression cannot be deleted", "dictExpandIllegalInComprehension": "Dictionary expansion not allowed in comprehension", "dictInAnnotation": "Dictionary expression not allowed in type annotation", @@ -74,6 +90,7 @@ "duplicateArgsParam": "Only one \"*\" parameter allowed", "duplicateBaseClass": "Duplicate base class not allowed", "duplicateCatchAll": "Only one catch-all except clause allowed", + "duplicateEnumMember": "Enum member \"{name}\" is already declared", "duplicateImport": "\"{importName}\" is imported more than once", "duplicateKwargsParam": "Only one \"**\" parameter allowed", "duplicateKeywordOnly": "Only one \"*\" separator allowed", @@ -85,6 +102,8 @@ "duplicateUnpack": "Only one unpack operation allowed in list", "ellipsisContext": "\"...\" not allowed in this context", "ellipsisSecondArg": "\"...\" allowed only as the second of two arguments", + "enumClassOverride": "Enum class \"{name}\" is final and cannot be subclassed", + "exceptionGroupIncompatible": "Exception group syntax (\"except*\") requires Python 3.11 or newer", "exceptionTypeIncorrect": "\"{type}\" does not derive from BaseException", "exceptionTypeNotClass": "\"{type}\" is not valid exception class", "exceptionTypeNotInstantiable": "Constructor for exception type \"{type}\" requires one or more arguments", @@ -96,9 +115,9 @@ "expectedBoolLiteral": "Expected True or False", "expectedCase": "Expected \"case\" statement", "expectedClassName": "Expected class name", - "expectedCloseBrace": "Expected \"}\"", - "expectedCloseBracket": "Expected \"]\"", - "expectedCloseParen": "Expected \")\"", + "expectedCloseBrace": "\"{\" was not closed", + "expectedCloseBracket": "\"[\" was not closed", + "expectedCloseParen": "\"(\" was not closed", "expectedColon": "Expected \":\"", "expectedComplexNumberLiteral": "Expected complex number literal for pattern matching", "expectedDecoratorExpr": "Expression form not supported for decorator prior to Python 3.9", @@ -150,10 +169,12 @@ "formatStringUnterminated": "Unterminated expression in f-string; missing close brace", "functionDecoratorTypeUnknown": "Untyped function decorator obscures type of function; ignoring decorator", "generatorAsyncReturnType": "Return type of async generator function must be \"AsyncGenerator\" or \"AsyncIterable\"", + "generatorNotParenthesized": "Generator expressions must be parenthesized if not sole argument", "generatorSyncReturnType": "Return type of generator function must be \"Generator\" or \"Iterable\"", "genericClassAssigned": "Generic class type cannot be assigned", "genericClassDeleted": "Generic class type cannot be deleted", "genericNotAllowed": "\"Generic\" is not valid in this context", + "genericTypeAliasBoundTypeVar": "Generic type alias within class cannot use bound type variables {names}", "genericTypeArgMissing": "\"Generic\" requires at least one type argument", "genericTypeArgTypeVar": "Type argument for \"Generic\" must be a type variable", "genericTypeArgUnique": "Type arguments for \"Generic\" must be unique", @@ -188,22 +209,29 @@ "keywordSubscriptIllegal": "Keyword arguments within subscripts are not supported", "lambdaReturnTypeUnknown": "Return type of lambda is unknown", "lambdaReturnTypePartiallyUnknown": "Return type of lambda, \"{returnType}\", is partially unknown", + "listAssignmentMismatch": "Expression with type \"{type}\" cannot be assigned to target list", "listInAnnotation": "List expression not allowed in type annotation", "literalUnsupportedType": "Type arguments for \"Literal\" must be None, a literal value (int, bool, str, or bytes), or an enum value", "literalEmptyArgs": "Expected one or more type arguments after \"Literal\"", "literalNotCallable": "Literal type cannot be instantiated", "matchIncompatible": "Match statements require Python 3.10 or newer", + "matchIsNotExhaustive": "Cases within match statement do not exhaustively handle all values", + "maxParseDepthExceeded": "Maximum parse depth exceeded; break expression into smaller sub-expressions", "memberAccess": "Cannot access member \"{name}\" for type \"{type}\"", "memberDelete": "Cannot delete member \"{name}\" for type \"{type}\"", "memberSet": "Cannot assign member \"{name}\" for type \"{type}\"", "metaclassConflict": "The metaclass of a derived class must be a subclass of the metaclasses of all its base classes", "metaclassDuplicate": "Only one metaclass can be provided", + "metaclassIsGeneric": "Metaclass cannot be generic", "methodNotDefined": "\"{name}\" method not defined", "methodNotDefinedOnType": "\"{name}\" method not defined on type \"{type}\"", "methodOrdering": "Cannot create consistent method ordering", "methodOverridden": "\"{name}\" overrides method of same name in class \"{className}\" with incompatible type \"{type}\"", "methodReturnsNonObject": "\"{name}\" method does not return an object", - "moduleContext": "Module not allowed in this context", + "missingProtocolMembers": "Class derives from one or more protocol classes but does not implement all required members", + "missingSuperCall": "Method \"{methodName}\" does not call the method of the same name in parent class", + "moduleAsType": "Module cannot be used as a type", + "moduleNotCallable": "Module is not callable", "moduleUnknownMember": "\"{name}\" is not a known member of module", "namedExceptAfterCatchAll": "A named except clause cannot appear after catch-all except clause", "namedParamAfterParamSpecArgs": "Keyword parameter \"{name}\" cannot appear in signature after ParamSpec args parameter", @@ -234,7 +262,7 @@ "noReturnContainsYield": "Function with declared return type \"NoReturn\" cannot include a yield statement", "noReturnReturnsNone": "Function with declared type of \"NoReturn\" cannot return \"None\"", "notRequiredArgCount": "Expected a single type argument after \"NotRequired\"", - "notRequiredNotInTypedDict": "\"NotRequired\" is allowed only within TypedDict", + "notRequiredNotInTypedDict": "\"NotRequired\" is not allowed in this context", "objectNotCallable": "Object of type \"{type}\" is not callable", "obscuredClassDeclaration": "Class declaration \"{name}\" is obscured by a declaration of the same name", "obscuredFunctionDeclaration": "Function declaration \"{name}\" is obscured by a declaration of the same name", @@ -253,6 +281,7 @@ "overloadWithoutImplementation": "\"{name}\" is marked as overload, but no implementation is provided", "paramAfterKwargsParam": "Parameter cannot follow \"**\" parameter", "paramAlreadyAssigned": "Parameter \"{name}\" is already assigned", + "paramAnnotationMissing": "Type annotation is missing for parameter \"{name}\"", "paramNameMissing": "No parameter named \"{name}\"", "paramSpecArgsKwargsUsage": "\"args\" and \"kwargs\" members of ParamSpec must both appear within a function signature", "paramSpecArgsUsage": "\"args\" member of ParamSpec is valid only when used with *args parameter", @@ -283,6 +312,7 @@ "protectedUsedOutsideOfClass": "\"{name}\" is protected and used outside of the class in which it is declared", "protocolBaseClass": "Protocol class \"{classType}\" cannot derive from non-protocol class \"{baseType}\"", "protocolIllegal": "Use of \"Protocol\" requires Python 3.7 or newer", + "protocolMemberNotClassVar": "Protocol class \"{className}\" does not define \"{memberName}\" as a ClassVar", "protocolNotAllowedInTypeArgument": "\"Protocol\" cannot be used as a type argument", "protocolVarianceContravariant": "Type variable \"{variable}\" used in generic protocol \"{class}\" should be contravariant", "protocolVarianceCovariant": "Type variable \"{variable}\" used in generic protocol \"{class}\" should be covariant", @@ -291,7 +321,7 @@ "relativeImportNotAllowed": "Relative imports cannot be used with \"import .a\" form; use \"from . import a\" instead", "recursiveDefinition": "Type of \"{name}\" could not be determined because it refers to itself", "requiredArgCount": "Expected a single type argument after \"Required\"", - "requiredNotInTypedDict": "\"Required\" is allowed only within TypedDict", + "requiredNotInTypedDict": "\"Required\" is not allowed in this context", "returnOutsideFunction": "\"return\" can be used only within a function", "returnMissing": "Function with declared type of \"{returnType}\" must return value", "returnTypeContravariant": "Contravariant type variable cannot be used in return type", @@ -301,6 +331,11 @@ "revealLocalsArgs": "Expected no arguments for \"reveal_locals\" call", "revealLocalsNone": "No locals in this scope", "revealTypeArgs": "Expected a single positional argument for \"reveal_type\" call", + "revealTypeExpectedTextArg": "The \"expected_text\" argument for function \"reveal_type\" must be a str literal value", + "revealTypeExpectedTextMismatch": "Type text mismatch; expected \"{expected}\" but received \"{received}\"", + "revealTypeExpectedTypeMismatch": "Type mismatch; expected \"{expected}\" but received \"{received}\"", + "selfTypeContext": "\"Self\" is not valid in this context", + "selfTypeWithTypedSelfOrCls": "\"Self\" cannot be used in a function with a `self` or `cls` parameter that has a type annotation other than \"Self\"", "setterGetterTypeMismatch": "Property setter value type is not assignable to the getter return type", "singleOverload": "\"{name}\" is marked as overload, but additional overloads are missing", "slotsAttributeError": "\"{name}\" is not specified in __slots__", @@ -309,6 +344,7 @@ "starPatternInOrPattern": "Star pattern cannot be ORed within other patterns", "starStarWildcardNotAllowed": "** cannot be used with wildcard \"_\"", "staticClsSelfParam": "Static methods should not take a \"self\" or \"cls\" parameter", + "strictTypeGuardReturnType": "Return type of StrictTypeGuard (\"{returnType}\") is not assignable to value parameter type (\"{type}\")", "stringNonAsciiBytes": "Non-ASCII character not allowed in bytes string literal", "stringNotSubscriptable": "String expression cannot be subscripted in type annotation; enclose entire annotation in quotes", "stringUnsupportedEscape": "Unsupported escape sequence in string literal", @@ -324,19 +360,21 @@ "symbolIsUndefined": "\"{name}\" is not defined", "symbolIsPossiblyUnbound": "\"{name}\" is possibly unbound", "symbolOverridden": "\"{name}\" overrides symbol of same name in class \"{className}\"", + "totalOrderingMissingMethod": "Class must define one of \"__lt__\", \"__le__\", \"__gt__\", or \"__ge__\" to use total_ordering", "trailingCommaInFromImport": "Trailing comma not allowed without surrounding parentheses", "tryWithoutExcept": "Try statement must have at least one except or finally clause", + "tupleAssignmentMismatch": "Expression with type \"{type}\" cannot be assigned to target tuple", "tupleInAnnotation": "Tuple expression not allowed in type annotation", - "tupleIndexOutOfRange": "Index {index} is out of range for tuple with length {length}", - "tupleSizeMismatch": "Tuple size mismatch: expected {expected} but received {received}", + "tupleIndexOutOfRange": "Index {index} is out of range for type {type}", "typeAliasIsRecursiveDirect": "Type alias \"{name}\" cannot use itself in its definition", "typeAliasIsRecursiveIndirect": "Type alias \"{name}\" cannot refer to itself indirectly in its definition", - "typeAliasNotInModule": "A TypeAlias can be defined only within a module scope", + "typeAliasNotInModuleOrClass": "A TypeAlias can be defined only within a module or class scope", "typeAliasRedeclared": "\"{name}\" is declared as a TypeAlias and can be assigned only once", + "typeArgListExpected": "Expected ParamSpec, ellipsis, or list of types", "typeArgListNotAllowed": "List expression not allowed for this type argument", "typeAnnotationCall": "Illegal type annotation: call expression not allowed", "typeAnnotationVariable": "Illegal type annotation: variable not allowed unless it is a type alias", - "typeArgsExpectingNone": "Expected no type arguments", + "typeArgsExpectingNone": "Expected no type arguments for class \"{name}\"", "typeArgsMismatchOne": "Expected one type argument but received {received}", "typeArgsMissingForAlias": "Expected type arguments for generic type alias \"{name}\"", "typeArgsMissingForClass": "Expected type arguments for generic class \"{name}\"", @@ -368,7 +406,9 @@ "typeNotSubscriptable": "Object of type \"{type}\" is not subscriptable", "typeNotUsableWith": "Object of type \"{type}\" cannot be used with \"with\" because it does not implement {method}", "typeNotSupportBinaryOperator": "Operator \"{operator}\" not supported for types \"{leftType}\" and \"{rightType}\"", + "typeNotSupportBinaryOperatorBidirectional": "Operator \"{operator}\" not supported for types \"{leftType}\" and \"{rightType}\" when expected type is \"{expectedType}\"", "typeNotSupportUnaryOperator": "Operator \"{operator}\" not supported for type \"{type}\"", + "typeNotSupportUnaryOperatorBidirectional": "Operator \"{operator}\" not supported for type \"{type}\" when expected type is \"{expectedType}\"", "typePartiallyUnknown": "Type of \"{name}\" is partially unknown", "typeUnknown": "Type of \"{name}\" is unknown", "typeVarAssignedName": "TypeVar must be assigned to a variable named \"{name}\"", @@ -382,6 +422,7 @@ "typeVarSingleConstraint": "TypeVar cannot have only a single constraint", "typeVarsNotInGeneric": "Generic[] must include all type variables", "typeVarTupleContext": "TypeVarTuple not allowed in this context", + "typeVarTupleMustBeUnpacked": "Unpack operator is required for TypeVarTuple value", "typeVarUnknownParam": "\"{name}\" is unknown parameter to TypeVar", "typeVarUsedByOuterScope": "TypeVar \"{name}\" is already in use by an outer scope", "typeVarUsedOnlyOnce": "TypeVar \"{name}\" appears only once in generic function signature", @@ -396,29 +437,35 @@ "unexpectedIndent": "Unexpected indentation", "unexpectedUnindent": "Unindent not expected", "unionSyntaxIllegal": "Alternative syntax for unions requires Python 3.10 or newer", + "unionTypeArgCount": "Union requires two or more type arguments", "uninitializedInstanceVariable": "Instance variable \"{name}\" is not initialized in the class body or __init__ method", "unnecessaryCast": "Unnecessary \"cast\" call; type is already \"{type}\"", "unnecessaryIsInstanceAlways": "Unnecessary isinstance call; \"{testType}\" is always an instance of \"{classType}\"", "unnecessaryIsSubclassAlways": "Unnecessary issubclass call; \"{testType}\" is always a subclass of \"{classType}\"", + "unnecessaryTypeIgnore": "Unnecessary '# type ignore' comment", "unpackArgCount": "Expected a single type argument after \"Unpack\"", "unpackedArgInTypeArgument": "Unpacked arguments cannot be used in type argument lists", "unpackedArgWithVariadicParam": "Unpacked argument cannot be used for TupleTypeVar parameter", "unpackedDictArgumentNotMapping": "Argument expression after ** must be a mapping with a \"str\" key type", - "unpackedSubscriptIllegal": "Unpack operator in subscript is not supported", + "unpackedSubscriptIllegal": "Unpack operator in subscript requires Python 3.11 or newer", "unpackedTypedDictArgument": "Unable to match unpacked TypedDict argument to parameters", "unpackedTypeVarTupleExpected": "Expected unpacked TypeVarTuple; use Unpack[{name1}] or *{name2}", - "unpackExpectedTypeVarTuple": "Expected TypeVarTuple as type argument for Unpack", + "unpackExpectedTypedDict": "Expected TypedDict type argument for Unpack", + "unpackExpectedTypeVarTuple": "Expected TypeVarTuple or Tuple as type argument for Unpack", "unpackIllegalInComprehension": "Unpack operation not allowed in comprehension", + "unpackInAnnotation": "Unpack operator not allowed in type annotation", "unpackInDict": "Unpack operation not allowed in dictionaries", "unpackInSet": "Unpack operator not allowed within a set", - "unpackNotAllowed": "Unpack operation not allowed in this context", + "unpackNotAllowed": "Unpack not allowed in this context", + "unpackOperatorNotAllowed": "Unpack operation not allowed in this context", "unpackTuplesIllegal": "Unpack operation not allowed in tuples prior to Python 3.8", "unreachableCode": "Code is unreachable", + "unreachableExcept": "Except clause is unreachable because exception is already handled", "unsupportedDunderAllOperation": "Operation on \"__all__\" is not supported, so exported symbol list may be incorrect", "unusedCallResult": "Result of call expression is of type \"{type}\" and is not used; assign to variable \"_\" if this is intentional", "unusedCoroutine": "Result of async function call is not used; use \"await\" or assign result to variable", "varAnnotationIllegal": "Type annotations for variables requires Python 3.6 or newer; use type comment for compatibility with previous versions", - "variadicTypeArgsTooMany": "Type argument list can have at most one unpacked TypeVarTuple", + "variadicTypeArgsTooMany": "Type argument list can have at most one unpacked TypeVarTuple or Tuple", "variadicTypeParamTooManyAlias": "Type alias can have at most one TypeVarTuple type parameter but received multiple ({names})", "variadicTypeParamTooManyClass": "Generic class can have at most one TypeVarTuple type parameter but received multiple ({names})", "walrusIllegal": "Operator \":=\" requires Python 3.8 or newer", @@ -437,49 +484,61 @@ "argParam": "Argument corresponds to parameter \"{paramName}\"", "argParamFunction": "Argument corresponds to parameter \"{paramName}\" in function \"{functionName}\"", "argsParamMissing": "Parameter \"*{paramName}\" has no corresponding parameter", - "argsParamWithVariadic": "Parameter \"*{paramName}\" is incompatible with TypeVarTuple", + "argsPositionOnly": "Position-only parameter mismatch; expected {expected} but received {received}", "argumentType": "Argument type is \"{type}\"", "argumentTypes": "Argument types: ({types})", "assignToNone": "Type cannot be assigned to type \"None\"", "asyncHelp": "Did you mean \"async with\"?", - "dataclassFrozen": "\"{name}\" is frozen", + "baseClassProvidesType": "Base class \"{baseClass}\" provides type \"{type}\"", + "dataClassFieldLocation": "Field declaration", + "dataClassFrozen": "\"{name}\" is frozen", "finalMethod": "Final method", "keyNotRequired": "\"{name}\" is not a required key in \"{type}\", so access may result in runtime exception", "keyRequiredDeleted": "\"{name}\" is a required key and cannot be deleted", "keyUndefined": "\"{name}\" is not a defined key in \"{type}\"", + "functionParamDefaultMissing": "Parameter \"{name}\" is missing default argument", "functionParamName": "Parameter name mismatch: \"{destName}\" versus \"{srcName}\"", "functionReturnTypeMismatch": "Function return type \"{sourceType}\" is incompatible with type \"{destType}\"", "functionTooFewParams": "Function accepts too few positional parameters; expected {expected} but received {received}", "functionTooManyParams": "Function accepts too many positional parameters; expected {expected} but received {received}", "incompatibleGetter": "Property getter method is incompatible", "incompatibleSetter": "Property setter method is incompatible", + "initMethodLocation": "The __init__ method is defined in class \"{type}\"", "incompatibleDeleter": "Property deleter method is incompatible", + "initMethodSignature": "Signature of __init__ is \"{type}\"", "kwargsParamMissing": "Parameter \"**{paramName}\" has no corresponding parameter", + "listAssignmentMismatch": "Type \"{type}\" is incompatible with target list", "literalAssignmentMismatch": "\"{sourceType}\" cannot be assigned to type \"{destType}\"", + "matchIsNotExhaustiveType": "Unhandled type: \"{type}\"", + "matchIsNotExhaustiveHint": "If exhaustive handling is not intended, add \"case _: pass\"", "memberSetClassVar": "Member \"{name}\" cannot be assigned through a class instance because it is a ClassVar", "memberAssignment": "Expression of type \"{type}\" cannot be assigned to member \"{name}\" of class \"{classType}\"", "memberIsAbstract": "\"{type}.{name}\" is abstract", "memberIsAbstractMore": "and {count} more...", "memberIsFinalInProtocol": "\"{name}\" is marked Final in protocol", + "memberIsInitVar": "Member \"{name}\" is an init-only field", "memberIsNotFinalInProtocol": "\"{name}\" is not marked Final in protocol", "memberTypeMismatch": "\"{name}\" is an incompatible type", "memberUnknown": "Member \"{name}\" is unknown", + "missingProtocolMember": "Member \"{name}\" is declared in protocol class \"{classType}\"", "missingGetter": "Property getter method is missing", "missingSetter": "Property setter method is missing", "missingDeleter": "Property deleter method is missing", "namedParamMissingInDest": "Keyword parameter \"{name}\" is missing in destination", "namedParamMissingInSource": "Keyword parameter \"{name}\" is missing in source", "namedParamTypeMismatch": "Keyword parameter \"{name}\" of type \"{sourceType}\" cannot be assigned to type \"{destType}\"", + "newMethodLocation": "The __new__ method is defined in class \"{type}\"", + "newMethodSignature": "Signature of __new__ is \"{type}\"", "noOverloadAssignable": "No overloaded function matches type \"{type}\"", "orPatternMissingName": "Missing names: {name}", "overloadMethod": "Overload method is defined here", "overloadNotAssignable": "One or more overloads of \"{name}\" is not assignable", "overloadWithImplementation": "The body of a function overload should be \"...\"", "overriddenMethod": "Overridden method", - "overloadTooManyUnions": "Argument union types have too many subtypes to evaluate overload", "overriddenSymbol": "Overridden symbol", "overrideParamCount": "Parameter count mismatch; base method has {baseCount}, but override has {overrideCount}", "overrideParamName": "Parameter {index} name mismatch: base parameter is named \"{baseName}\", override parameter is named \"{overrideName}\"", + "overrideParamNamePositionOnly": "Parameter {index} name mismatch: base parameter is named \"{baseName}\", override parameter is position-only\"", "overrideParamType": "Parameter {index} type mismatch: base parameter is type \"{baseType}\", override parameter is type \"{overrideType}\"", "overrideReturnType": "Return type mismatch: base method returns type \"{baseType}\", override returns type \"{overrideType}\"", "overrideType": "Base class defines type as \"{type}\"", @@ -487,13 +546,16 @@ "paramSpecOverload": "ParamSpec cannot be used with overloaded function", "paramType": "Parameter type is \"{paramType}\"", "privateImportFromPyTypedSource": "Import from \"{module}\" instead", + "propertyAccessFromProtocolClass": "A property defined within a protocol class cannot be accessed as a class variable", "propertyMethodIncompatible": "Property method \"{name}\" is incompatible", "propertyMethodMissing": "Property method \"{name}\" is missing in override", "propertyMissingDeleter": "Property \"{name}\" has no defined deleter", "propertyMissingSetter": "Property \"{name}\" has no defined setter", "protocolIncompatible": "\"{sourceType}\" is incompatible with protocol \"{destType}\"", - "protocolMemberMissing": "\"{name}\" is not present", "protocolMemberClassVar": "\"{name}\" is not a class variable", + "protocolMemberMissing": "\"{name}\" is not present", + "protocolSourceIsNotConcrete": "\"{sourceType}\" is not a concrete class type and cannot be assigned to type \"{destType}\"", + "readOnlyAttribute": "Attribute \"{name}\" is read-only", "seeDeclaration": "See declaration", "seeClassDeclaration": "See class declaration", "seeFunctionDeclaration": "See function declaration", @@ -501,7 +563,8 @@ "seeParameterDeclaration": "See parameter declaration", "seeVariableDeclaration": "See variable declaration", "tupleEntryTypeMismatch": "Tuple entry {entry} is incorrect type", - "tupleSizeMismatch": "Tuple size mismatch; expected {expected} but received {received}", + "tupleAssignmentMismatch": "Type \"{type}\" is incompatible with target tuple", + "tupleSizeMismatch": "Element size mismatch; expected {expected} but received {received}", "tupleSizeMismatchIndeterminate": "Tuple size mismatch; expected {expected} but received indeterminate number", "typeAssignmentMismatch": "Type \"{sourceType}\" cannot be assigned to type \"{destType}\"", "typeBound": "Type \"{sourceType}\" is incompatible with bound type \"{destType}\" for type variable \"{name}\"", @@ -512,6 +575,7 @@ "typedDictFieldTypeMismatch": "Type \"{type}\" is not assignable to field \"{name}\"", "typedDictFieldRedefinition": "Type in parent class is \"{parentType}\" and type in child class is \"{childType}\"", "typedDictFieldUndefined": "\"{name}\" is an undefined field in type \"{type}\"", + "typedDictFinalMismatch": "\"{sourceType}\" is incompatible with \"{destType}\" because of a @final mismatch", "typeIncompatible": "\"{sourceType}\" is incompatible with \"{destType}\"", "typeNotCallable": "Type \"{type}\" is not callable", "typeNotClass": "\"{type}\" is not a class", @@ -525,6 +589,7 @@ "typeVarNotAllowed": "TypeVar or generic type with type arguments not allowed", "typeVarsMissing": "Missing type variables: {names}", "typeVarTupleRequiresKnownLength": "TypeVarTuple cannot be bound to a tuple of unknown length", + "unreachableExcept": "\"{exceptionType}\" is a subclass of \"{parentType}\"", "useDictInstead": "Use Dict[T1, T2] to indicate a dictionary type", "useListInstead": "Use List[T] to indicate a list type or Union[T1, T2] to indicate a union type", "useTupleInstead": "Use Tuple[T1, ..., Tn] to indicate a tuple type or Union[T1, T2] to indicate a union type", @@ -539,5 +604,10 @@ "filesToAnalyzeCount": "{count} files to analyze", "findingReferences": "Finding references", "organizeImports": "Organize Imports" + }, + "Refactoring": { + "moveFile": "Do you want to update all import references for \"{oldModuleName}\" to \"{newModuleName}\"?", + "moveFileLabel": "Update all import references for \"{oldModuleName}\" to \"{newModuleName}\"?", + "moveFileDescription": "Update all import references for \"{oldModuleName}\" to \"{newModuleName}\"?" } } diff --git a/packages/pyright-internal/src/parser/parseNodes.ts b/packages/pyright-internal/src/parser/parseNodes.ts index bda2fdec8fc6..8bf3781c3acd 100644 --- a/packages/pyright-internal/src/parser/parseNodes.ts +++ b/packages/pyright-internal/src/parser/parseNodes.ts @@ -120,6 +120,8 @@ export const enum ErrorExpressionCategory { MissingFunctionParameterList, MissingPattern, MissingPatternSubject, + MissingDictValue, + MaxDepthExceeded, } export interface ParseNodeBase extends TextRange { @@ -129,6 +131,12 @@ export interface ParseNodeBase extends TextRange { id: number; parent?: ParseNode | undefined; + + // For some parse nodes, each child's depth is calculated, + // and the max child depth is recorded here. This is used + // to detect long chains of operations that can result in + // stack overflows during evaluation. + maxChildDepth?: number; } let _nextNodeId = 1; @@ -289,7 +297,7 @@ export namespace ForNode { } } -export type ListComprehensionIterNode = ListComprehensionForNode | ListComprehensionIfNode; +export type ListComprehensionForIfNode = ListComprehensionForNode | ListComprehensionIfNode; export interface ListComprehensionForNode extends ParseNodeBase { readonly nodeType: ParseNodeType.ListComprehensionFor; @@ -375,16 +383,18 @@ export interface ExceptNode extends ParseNodeBase { typeExpression?: ExpressionNode | undefined; name?: NameNode | undefined; exceptSuite: SuiteNode; + isExceptGroup: boolean; } export namespace ExceptNode { - export function create(exceptToken: Token, exceptSuite: SuiteNode) { + export function create(exceptToken: Token, exceptSuite: SuiteNode, isExceptGroup: boolean) { const node: ExceptNode = { start: exceptToken.start, length: exceptToken.length, nodeType: ParseNodeType.Except, id: _nextNodeId++, exceptSuite, + isExceptGroup, }; exceptSuite.parent = node; @@ -491,13 +501,13 @@ export namespace ClassNode { // function or class declaration. export function createDummyForDecorators(decorators: DecoratorNode[]) { const node: ClassNode = { - start: 0, + start: decorators[0].start, length: 0, nodeType: ParseNodeType.Class, id: _nextNodeId++, decorators, name: { - start: 0, + start: decorators[0].start, length: 0, id: 0, nodeType: ParseNodeType.Name, @@ -512,7 +522,7 @@ export namespace ClassNode { }, arguments: [], suite: { - start: 0, + start: decorators[0].start, length: 0, id: 0, nodeType: ParseNodeType.Suite, @@ -778,6 +788,7 @@ export namespace UnaryOperationNode { }; expression.parent = node; + node.maxChildDepth = 1 + (expression.maxChildDepth ?? 0); extendRange(node, expression); @@ -815,6 +826,8 @@ export namespace BinaryOperationNode { leftExpression.parent = node; rightExpression.parent = node; + node.maxChildDepth = 1 + Math.max(leftExpression.maxChildDepth ?? 0, rightExpression.maxChildDepth ?? 0); + extendRange(node, rightExpression); return node; @@ -1079,21 +1092,32 @@ export interface CallNode extends ParseNodeBase { readonly nodeType: ParseNodeType.Call; leftExpression: ExpressionNode; arguments: ArgumentNode[]; + trailingComma: boolean; } export namespace CallNode { - export function create(leftExpression: ExpressionNode) { + export function create(leftExpression: ExpressionNode, argList: ArgumentNode[], trailingComma: boolean) { const node: CallNode = { start: leftExpression.start, length: leftExpression.length, nodeType: ParseNodeType.Call, id: _nextNodeId++, leftExpression, - arguments: [], + arguments: argList, + trailingComma, }; leftExpression.parent = node; + node.maxChildDepth = 1 + (leftExpression.maxChildDepth ?? 0); + + if (argList.length > 0) { + argList.forEach((arg) => { + arg.parent = node; + }); + extendRange(node, argList[argList.length - 1]); + } + return node; } } @@ -1101,7 +1125,8 @@ export namespace CallNode { export interface ListComprehensionNode extends ParseNodeBase { readonly nodeType: ParseNodeType.ListComprehension; expression: ParseNode; - comprehensions: ListComprehensionIterNode[]; + forIfNodes: ListComprehensionForIfNode[]; + isParenthesized?: boolean; } export namespace ListComprehensionNode { @@ -1112,7 +1137,7 @@ export namespace ListComprehensionNode { nodeType: ParseNodeType.ListComprehension, id: _nextNodeId++, expression, - comprehensions: [], + forIfNodes: [], }; expression.parent = node; @@ -1152,6 +1177,8 @@ export namespace IndexNode { extendRange(node, closeBracketToken); + node.maxChildDepth = 1 + (baseExpression.maxChildDepth ?? 0); + return node; } } @@ -1245,6 +1272,8 @@ export namespace MemberAccessNode { extendRange(node, memberName); + node.maxChildDepth = 1 + (leftExpression.maxChildDepth ?? 0); + return node; } } @@ -1333,7 +1362,7 @@ export namespace EllipsisNode { export interface NumberNode extends ParseNodeBase { readonly nodeType: ParseNodeType.Number; - value: number; + value: number | bigint; isInteger: boolean; isImaginary: boolean; } @@ -1419,6 +1448,9 @@ export interface StringListNode extends ParseNodeBase { // a type annotation, they are further parsed // into an expression. typeAnnotation?: ExpressionNode; + + // Indicates that the string list is enclosed in parens. + isParenthesized?: boolean; } export namespace StringListNode { @@ -1445,6 +1477,7 @@ export namespace StringListNode { export interface DictionaryNode extends ParseNodeBase { readonly nodeType: ParseNodeType.Dictionary; entries: DictionaryEntryNode[]; + trailingCommaToken?: Token; } export namespace DictionaryNode { diff --git a/packages/pyright-internal/src/parser/parser.ts b/packages/pyright-internal/src/parser/parser.ts index 4d1b3f1aace3..98506540412c 100644 --- a/packages/pyright-internal/src/parser/parser.ts +++ b/packages/pyright-internal/src/parser/parser.ts @@ -62,9 +62,9 @@ import { ImportNode, IndexNode, LambdaNode, + ListComprehensionForIfNode, ListComprehensionForNode, ListComprehensionIfNode, - ListComprehensionIterNode, ListComprehensionNode, ListNode, MatchNode, @@ -146,12 +146,14 @@ export class ParseOptions { this.pythonVersion = latestStablePythonVersion; this.reportInvalidStringEscapeSequence = false; this.skipFunctionAndClassBody = false; + this.ipythonMode = false; } isStubFile: boolean; pythonVersion: PythonVersion; reportInvalidStringEscapeSequence: boolean; skipFunctionAndClassBody: boolean; + ipythonMode: boolean; } export interface ParseResults { @@ -161,6 +163,7 @@ export interface ParseResults { futureImports: Map; tokenizerOutput: TokenizerOutput; containsWildcardImport: boolean; + typingSymbolAliases: Map; } export interface ParseExpressionTextResults { @@ -179,15 +182,19 @@ export interface ModuleImport { importedSymbols: string[] | undefined; } +export interface ArgListResult { + args: ArgumentNode[]; + trailingComma: boolean; +} + const enum ParseTextMode { Expression, VariableAnnotation, FunctionAnnotation, } -// PEP 637 proposed support for keyword arguments in subscript -// expressions, but it was rejected. -const supportPEP637 = false; +// Limit the max child node depth to prevent stack overflows. +const maxChildNodeDepth = 256; export class Parser { private _fileContents?: string; @@ -250,6 +257,7 @@ export class Parser { futureImports: this._futureImportMap, tokenizerOutput: this._tokenizerOutput!, containsWildcardImport: this._containsWildcardImport, + typingSymbolAliases: this._typingSymbolAliases, }; } @@ -259,18 +267,26 @@ export class Parser { textLength: number, parseOptions: ParseOptions, parseTextMode = ParseTextMode.Expression, - initialParenDepth = 0 + initialParenDepth = 0, + typingSymbolAliases?: Map ): ParseExpressionTextResults { const diagSink = new DiagnosticSink(); this._startNewParse(fileContents, textOffset, textLength, parseOptions, diagSink, initialParenDepth); + if (typingSymbolAliases) { + this._typingSymbolAliases = new Map(typingSymbolAliases); + } + let parseTree: ExpressionNode | undefined; if (parseTextMode === ParseTextMode.VariableAnnotation) { parseTree = this._parseTypeAnnotation(); } else if (parseTextMode === ParseTextMode.FunctionAnnotation) { parseTree = this._parseFunctionTypeAnnotation(); } else { - const exprListResult = this._parseTestOrStarExpressionList(/* allowAssignmentExpression */ false); + const exprListResult = this._parseTestOrStarExpressionList( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true + ); if (exprListResult.parseError) { parseTree = exprListResult.parseError; } else { @@ -310,7 +326,13 @@ export class Parser { // Tokenize the file contents. const tokenizer = new Tokenizer(); - this._tokenizerOutput = tokenizer.tokenize(fileContents, textOffset, textLength, initialParenDepth); + this._tokenizerOutput = tokenizer.tokenize( + fileContents, + textOffset, + textLength, + initialParenDepth, + this._parseOptions.ipythonMode + ); this._tokenIndex = 0; } @@ -418,8 +440,6 @@ export class Parser { // | star_named_expression ',' star_named_expressions? // | named_expression private _parseMatchStatement(): MatchNode | undefined { - const matchToken = this._getKeywordToken(KeywordType.Match); - // Parse the subject expression with errors suppressed. If it's not // followed by a colon, we'll assume this is not a match statement. // We need to do this because "match" is considered a soft keyword, @@ -428,12 +448,16 @@ export class Parser { let smellsLikeMatchStatement = false; this._suppressErrors(() => { const curTokenIndex = this._tokenIndex; - this._parseTestOrStarListAsExpression( + + this._getKeywordToken(KeywordType.Match); + const expression = this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingPatternSubject, Localizer.Diagnostic.expectedReturnExpr() ); - smellsLikeMatchStatement = this._peekToken().type === TokenType.Colon; + smellsLikeMatchStatement = + expression.nodeType !== ParseNodeType.Error && this._peekToken().type === TokenType.Colon; // Set the token index back to the start. this._tokenIndex = curTokenIndex; @@ -443,8 +467,11 @@ export class Parser { return undefined; } + const matchToken = this._getKeywordToken(KeywordType.Match); + const subjectExpression = this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingPatternSubject, Localizer.Diagnostic.expectedReturnExpr() ); @@ -502,6 +529,9 @@ export class Parser { if (!dedentToken.matchesIndent) { this._addError(Localizer.Diagnostic.inconsistentIndent(), dedentToken); } + if (dedentToken.isDedentAmbiguous) { + this._addError(Localizer.Diagnostic.inconsistentTabs(), dedentToken); + } break; } @@ -776,6 +806,7 @@ export class Parser { const patternCaptureOrValue = this._parsePatternCaptureOrValue(); if (patternCaptureOrValue) { + const openParenToken = this._peekToken(); if ( patternCaptureOrValue.nodeType === ParseNodeType.Error || !this._consumeTokenIfType(TokenType.OpenParenthesis) @@ -792,7 +823,7 @@ export class Parser { const classPattern = PatternClassNode.create(classNameExpr, args); if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), openParenToken); // Consume the remainder of tokens on the line for error // recovery. @@ -857,7 +888,7 @@ export class Parser { nextToken.type === TokenType.OpenParenthesis ? Localizer.Diagnostic.expectedCloseParen() : Localizer.Diagnostic.expectedCloseBracket(), - this._peekToken() + nextToken ); this._consumeTokensUntilType([ TokenType.Colon, @@ -874,7 +905,7 @@ export class Parser { if (this._consumeTokenIfType(TokenType.CloseCurlyBrace)) { extendRange(mappingPattern, lastToken); } else { - this._addError(Localizer.Diagnostic.expectedCloseBrace(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseBrace(), nextToken); this._consumeTokensUntilType([TokenType.Colon, TokenType.CloseCurlyBrace]); } @@ -1203,6 +1234,10 @@ export class Parser { } if (nextToken.type === TokenType.Dedent) { + if ((nextToken as DedentToken).isDedentAmbiguous) { + this._addError(Localizer.Diagnostic.inconsistentTabs(), nextToken); + } + indent--; if (indent === 0) { @@ -1275,6 +1310,9 @@ export class Parser { if (!dedentToken.matchesIndent) { this._addError(Localizer.Diagnostic.inconsistentIndent(), dedentToken); } + if (dedentToken.isDedentAmbiguous) { + this._addError(Localizer.Diagnostic.inconsistentTabs(), dedentToken); + } break; } @@ -1318,12 +1356,28 @@ export class Parser { ); forSuite = SuiteNode.create(this._peekToken()); } else { - seqExpr = this._parseTestListAsExpression( + seqExpr = this._parseTestOrStarListAsExpression( + /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedInExpr() ); forSuite = this._parseLoopSuite(); + // Versions of Python earlier than 3.9 didn't allow unpack operators if the + // tuple wasn't enclosed in parentheses. + if (this._getLanguageVersion() < PythonVersion.V3_9 && !this._parseOptions.isStubFile) { + if (seqExpr.nodeType === ParseNodeType.Tuple && !seqExpr.enclosedInParens) { + let sawStar = false; + seqExpr.expressions.forEach((expr) => { + if (expr.nodeType === ParseNodeType.Unpack && !sawStar) { + this._addError(Localizer.Diagnostic.unpackOperatorNotAllowed(), expr); + sawStar = true; + } + }); + } + } + if (this._consumeTokenIfKeyword(KeywordType.Else)) { elseSuite = this._parseSuite(this._isInFunction); } @@ -1361,22 +1415,22 @@ export class Parser { const listCompNode = ListComprehensionNode.create(target); - const compList: ListComprehensionIterNode[] = [compFor]; + const forIfList: ListComprehensionForIfNode[] = [compFor]; while (true) { const compIter = this._tryParseCompForStatement() || this._tryParseCompIfStatement(); if (!compIter) { break; } compIter.parent = listCompNode; - compList.push(compIter); + forIfList.push(compIter); } - listCompNode.comprehensions = compList; - if (compList.length > 0) { - compList.forEach((comp) => { + listCompNode.forIfNodes = forIfList; + if (forIfList.length > 0) { + forIfList.forEach((comp) => { comp.parent = listCompNode; }); - extendRange(listCompNode, compList[compList.length - 1]); + extendRange(listCompNode, forIfList[forIfList.length - 1]); } return listCompNode; } @@ -1437,7 +1491,9 @@ export class Parser { } const ifToken = this._getKeywordToken(KeywordType.If); - const ifExpr = this._tryParseLambdaExpression() || this._parseAssignmentExpression(); + const ifExpr = + this._tryParseLambdaExpression() || + this._parseAssignmentExpression(/* disallowAssignmentExpression */ true); const compIfNode = ListComprehensionIfNode.create(ifToken, ifExpr); @@ -1481,6 +1537,16 @@ export class Parser { break; } + // See if this is a Python 3.11 exception group. + const possibleStarToken = this._peekToken(); + let isExceptGroup = false; + if (this._consumeTokenIfOperator(OperatorType.Multiply)) { + if (this._getLanguageVersion() < PythonVersion.V3_11 && !this._parseOptions.isStubFile) { + this._addError(Localizer.Diagnostic.exceptionGroupIncompatible(), possibleStarToken); + } + isExceptGroup = true; + } + let typeExpr: ExpressionNode | undefined; let symbolName: IdentifierToken | undefined; if (this._peekTokenType() !== TokenType.Colon) { @@ -1515,7 +1581,7 @@ export class Parser { } const exceptSuite = this._parseSuite(this._isInFunction); - const exceptNode = ExceptNode.create(exceptToken, exceptSuite); + const exceptNode = ExceptNode.create(exceptToken, exceptSuite, isExceptGroup); if (typeExpr) { exceptNode.typeExpression = typeExpr; exceptNode.typeExpression.parent = exceptNode; @@ -1569,6 +1635,7 @@ export class Parser { ); } + const openParenToken = this._peekToken(); if (!this._consumeTokenIfType(TokenType.OpenParenthesis)) { this._addError(Localizer.Diagnostic.expectedOpenParen(), this._peekToken()); return ErrorNode.create( @@ -1582,7 +1649,7 @@ export class Parser { const paramList = this._parseVarArgsList(TokenType.CloseParenthesis, /* allowAnnotations */ true); if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), openParenToken); this._consumeTokensUntilType([TokenType.Colon]); } @@ -1715,7 +1782,7 @@ export class Parser { } sawKeywordOnlySeparator = true; } else { - if (sawArgs) { + if (sawKeywordOnlySeparator || sawArgs) { this._addError(Localizer.Diagnostic.duplicateArgsParam(), param); } sawArgs = true; @@ -1768,7 +1835,7 @@ export class Parser { } else if (this._consumeTokenIfOperator(OperatorType.Power)) { starCount = 2; } else if (this._consumeTokenIfOperator(OperatorType.Divide)) { - if (this._getLanguageVersion() < PythonVersion.V3_8) { + if (this._getLanguageVersion() < PythonVersion.V3_8 && !this._parseOptions.isStubFile) { this._addError(Localizer.Diagnostic.positionOnlyIncompatible(), firstToken); } slashCount = 1; @@ -1810,7 +1877,7 @@ export class Parser { } if (allowAnnotations && this._consumeTokenIfType(TokenType.Colon)) { - paramNode.typeAnnotation = this._parseTypeAnnotation(); + paramNode.typeAnnotation = this._parseTypeAnnotation(paramType === ParameterCategory.VarArgList); paramNode.typeAnnotation.parent = paramNode; extendRange(paramNode, paramNode.typeAnnotation); } @@ -1891,7 +1958,7 @@ export class Parser { if (isParenthesizedWithItemList) { if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), possibleParen); } } @@ -2018,11 +2085,12 @@ export class Parser { } let argList: ArgumentNode[] = []; + const openParenToken = this._peekToken(); if (this._consumeTokenIfType(TokenType.OpenParenthesis)) { - argList = this._parseArgList(); + argList = this._parseArgList().args; if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), openParenToken); } } @@ -2086,6 +2154,7 @@ export class Parser { if (!this._isNextTokenNeverExpression()) { const returnExpr = this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedReturnExpr() ); @@ -2130,6 +2199,7 @@ export class Parser { importFromNode.wildcardToken = possibleStarToken; this._containsWildcardImport = true; } else { + const openParenToken = this._peekToken(); const inParen = this._consumeTokenIfType(TokenType.OpenParenthesis); let trailingCommaToken: Token | undefined; @@ -2179,7 +2249,7 @@ export class Parser { const nextToken = this._peekToken(); if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), openParenToken); } else { extendRange(importFromNode, nextToken); } @@ -2286,10 +2356,15 @@ export class Parser { const moduleNameNode = ModuleNameNode.create(this._peekToken()); while (true) { - if (this._consumeTokenIfType(TokenType.Ellipsis)) { - moduleNameNode.leadingDots += 3; - } else if (this._consumeTokenIfType(TokenType.Dot)) { - moduleNameNode.leadingDots++; + const token = this._getTokenIfType(TokenType.Ellipsis) ?? this._getTokenIfType(TokenType.Dot); + if (token) { + if (token.type === TokenType.Ellipsis) { + moduleNameNode.leadingDots += 3; + } else { + moduleNameNode.leadingDots++; + } + + extendRange(moduleNameNode, token); } else { break; } @@ -2457,6 +2532,7 @@ export class Parser { if (!this._isNextTokenNeverExpression()) { exprList = this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ true, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedYieldExpr() ); @@ -2578,7 +2654,7 @@ export class Parser { // that's surrounded by parens. if (exprListResult.list.length === 1 && !exprListResult.trailingComma) { if (exprListResult.list[0].nodeType === ParseNodeType.Unpack) { - this._addError(Localizer.Diagnostic.unpackNotAllowed(), exprListResult.list[0]); + this._addError(Localizer.Diagnostic.unpackOperatorNotAllowed(), exprListResult.list[0]); } return exprListResult.list[0]; } @@ -2632,6 +2708,7 @@ export class Parser { private _parseTestOrStarListAsExpression( allowAssignmentExpression: boolean, + allowMultipleUnpack: boolean, errorCategory: ErrorExpressionCategory, errorString: string ): ExpressionNode { @@ -2639,7 +2716,7 @@ export class Parser { return this._handleExpressionParseError(errorCategory, errorString); } - const exprListResult = this._parseTestOrStarExpressionList(allowAssignmentExpression); + const exprListResult = this._parseTestOrStarExpressionList(allowAssignmentExpression, allowMultipleUnpack); if (exprListResult.parseError) { return exprListResult.parseError; } @@ -2655,13 +2732,15 @@ export class Parser { return this._parseExpressionListGeneric(() => this._parseTestExpression(/* allowAssignmentExpression */ false)); } - private _parseTestOrStarExpressionList(allowAssignmentExpression: boolean): ListResult { + private _parseTestOrStarExpressionList( + allowAssignmentExpression: boolean, + allowMultipleUnpack: boolean + ): ListResult { const exprListResult = this._parseExpressionListGeneric(() => this._parseTestOrStarExpression(allowAssignmentExpression) ); - if (!exprListResult.parseError) { - // Make sure that we don't have more than one star expression in the list. + if (!allowMultipleUnpack && !exprListResult.parseError) { let sawStar = false; for (const expr of exprListResult.list) { if (expr.nodeType === ParseNodeType.Unpack) { @@ -2735,7 +2814,7 @@ export class Parser { } // assign_expr: NAME := test - private _parseAssignmentExpression() { + private _parseAssignmentExpression(disallowAssignmentExpression = false) { const leftExpr = this._parseOrTest(); if (leftExpr.nodeType === ParseNodeType.Error) { return leftExpr; @@ -2750,7 +2829,7 @@ export class Parser { return leftExpr; } - if (!this._assignmentExpressionsAllowed) { + if (!this._assignmentExpressionsAllowed || this._isParsingTypeAnnotation || disallowAssignmentExpression) { this._addError(Localizer.Diagnostic.walrusNotAllowed(), walrusToken); } @@ -2776,7 +2855,7 @@ export class Parser { break; } const rightExpr = this._parseAndTest(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.Or); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.Or); } return leftExpr; @@ -2795,7 +2874,7 @@ export class Parser { break; } const rightExpr = this._parseNotTest(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.And); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.And); } return leftExpr; @@ -2806,7 +2885,7 @@ export class Parser { const notToken = this._peekToken(); if (this._consumeTokenIfKeyword(KeywordType.Not)) { const notExpr = this._parseNotTest(); - return UnaryOperationNode.create(notToken, notExpr, OperatorType.Not); + return this._createUnaryOperationNode(notToken, notExpr, OperatorType.Not); } return this._parseComparison(); @@ -2856,7 +2935,7 @@ export class Parser { } const rightExpr = this._parseComparison(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, comparisonOperator); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, comparisonOperator); } return leftExpr; @@ -2875,7 +2954,7 @@ export class Parser { break; } const rightExpr = this._parseBitwiseXorExpression(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.BitwiseOr); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseOr); } return leftExpr; @@ -2894,7 +2973,7 @@ export class Parser { break; } const rightExpr = this._parseBitwiseAndExpression(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.BitwiseXor); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseXor); } return leftExpr; @@ -2913,7 +2992,7 @@ export class Parser { break; } const rightExpr = this._parseShiftExpression(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.BitwiseAnd); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.BitwiseAnd); } return leftExpr; @@ -2931,7 +3010,7 @@ export class Parser { while (nextOperator === OperatorType.LeftShift || nextOperator === OperatorType.RightShift) { this._getNextToken(); const rightExpr = this._parseArithmeticExpression(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, nextOperator); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); peekToken = this._peekToken(); nextOperator = this._peekOperatorType(); } @@ -2955,7 +3034,7 @@ export class Parser { return rightExpr; } - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, nextOperator); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); peekToken = this._peekToken(); nextOperator = this._peekOperatorType(); } @@ -2981,7 +3060,7 @@ export class Parser { ) { this._getNextToken(); const rightExpr = this._parseArithmeticFactor(); - leftExpr = BinaryOperationNode.create(leftExpr, rightExpr, peekToken, nextOperator); + leftExpr = this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, nextOperator); peekToken = this._peekToken(); nextOperator = this._peekOperatorType(); } @@ -3001,7 +3080,7 @@ export class Parser { ) { this._getNextToken(); const expression = this._parseArithmeticFactor(); - return UnaryOperationNode.create(nextToken, expression, nextOperator); + return this._createUnaryOperationNode(nextToken, expression, nextOperator); } const leftExpr = this._parseAtomExpression(); @@ -3012,7 +3091,7 @@ export class Parser { const peekToken = this._peekToken(); if (this._consumeTokenIfOperator(OperatorType.Power)) { const rightExpr = this._parseArithmeticFactor(); - return BinaryOperationNode.create(leftExpr, rightExpr, peekToken, OperatorType.Power); + return this._createBinaryOperationNode(leftExpr, rightExpr, peekToken, OperatorType.Power); } return leftExpr; @@ -3060,26 +3139,30 @@ export class Parser { // Consume trailers. while (true) { // Is it a function call? + const startOfTrailerToken = this._peekToken(); if (this._consumeTokenIfType(TokenType.OpenParenthesis)) { // Generally, function calls are not allowed within type annotations, // but they are permitted in "Annotated" annotations. const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; this._isParsingTypeAnnotation = false; - const argList = this._parseArgList(); - const callNode = CallNode.create(atomExpression); - callNode.arguments = argList; - if (argList.length > 0) { - argList.forEach((arg) => { - arg.parent = callNode; + const argListResult = this._parseArgList(); + const callNode = CallNode.create(atomExpression, argListResult.args, argListResult.trailingComma); + + if (argListResult.args.length > 1 || argListResult.trailingComma) { + argListResult.args.forEach((arg) => { + if (arg.valueExpression.nodeType === ParseNodeType.ListComprehension) { + if (!arg.valueExpression.isParenthesized) { + this._addError(Localizer.Diagnostic.generatorNotParenthesized(), arg.valueExpression); + } + } }); - extendRange(callNode, argList[argList.length - 1]); } const nextToken = this._peekToken(); let isArgListTerminated = false; if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), startOfTrailerToken); // Consume the remainder of tokens on the line for error // recovery. @@ -3105,6 +3188,11 @@ export class Parser { atomExpression = callNode; + if (atomExpression.maxChildDepth !== undefined && atomExpression.maxChildDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(atomExpression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), atomExpression); + } + // If the argument list wasn't terminated, break out of the loop if (!isArgListTerminated) { break; @@ -3146,11 +3234,17 @@ export class Parser { this._handleExpressionParseError( ErrorExpressionCategory.MissingIndexCloseBracket, Localizer.Diagnostic.expectedCloseBracket(), + startOfTrailerToken, indexNode ); } atomExpression = indexNode; + + if (atomExpression.maxChildDepth !== undefined && atomExpression.maxChildDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(atomExpression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), atomExpression); + } } else if (this._consumeTokenIfType(TokenType.Dot)) { // Is it a member access? const memberName = this._getTokenIfIdentifier(); @@ -3158,10 +3252,16 @@ export class Parser { return this._handleExpressionParseError( ErrorExpressionCategory.MissingMemberAccessName, Localizer.Diagnostic.expectedMemberName(), + startOfTrailerToken, atomExpression ); } atomExpression = MemberAccessNode.create(atomExpression, NameNode.create(memberName)); + + if (atomExpression.maxChildDepth !== undefined && atomExpression.maxChildDepth >= maxChildNodeDepth) { + atomExpression = ErrorNode.create(atomExpression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), atomExpression); + } } else { break; } @@ -3194,6 +3294,7 @@ export class Parser { argType = ArgumentCategory.UnpackedDictionary; } + const startOfSubscriptIndex = this._tokenIndex; let valueExpr = this._parsePossibleSlice(); let nameIdentifier: IdentifierToken | undefined; @@ -3208,6 +3309,17 @@ export class Parser { } else { this._addError(Localizer.Diagnostic.expectedParamName(), nameExpr); } + } else if ( + valueExpr.nodeType === ParseNodeType.Name && + this._peekOperatorType() === OperatorType.Walrus + ) { + this._tokenIndex = startOfSubscriptIndex; + valueExpr = this._parseTestExpression(/* allowAssignmentExpression */ true); + + // Python 3.10 and newer allow assignment expressions to be used inside of a subscript. + if (!this._parseOptions.isStubFile && this._getLanguageVersion() < PythonVersion.V3_10) { + this._addError(Localizer.Diagnostic.assignmentExprInSubscript(), valueExpr); + } } } @@ -3224,20 +3336,15 @@ export class Parser { } argList.push(argNode); - if (supportPEP637) { - if (!this._parseOptions.isStubFile && this._getLanguageVersion() < PythonVersion.V3_10) { - if (argNode.name) { - this._addError(Localizer.Diagnostic.keywordSubscriptIllegal(), argNode.name); - } - if (argType !== ArgumentCategory.Simple) { - this._addError(Localizer.Diagnostic.unpackedSubscriptIllegal(), argNode); - } - } - } else { - if (argNode.name) { - this._addError(Localizer.Diagnostic.keywordSubscriptIllegal(), argNode.name); - } - if (argType !== ArgumentCategory.Simple) { + if (argNode.name) { + this._addError(Localizer.Diagnostic.keywordSubscriptIllegal(), argNode.name); + } + + if (argType !== ArgumentCategory.Simple) { + const unpackAllowed = + this._parseOptions.isStubFile || this._getLanguageVersion() >= PythonVersion.V3_11; + + if (argType === ArgumentCategory.UnpackedDictionary || !unpackAllowed) { this._addError(Localizer.Diagnostic.unpackedSubscriptIllegal(), argNode); } } @@ -3255,6 +3362,7 @@ export class Parser { const errorNode = this._handleExpressionParseError( ErrorExpressionCategory.MissingIndexOrSlice, Localizer.Diagnostic.expectedSliceIndex(), + /* targetToken */ undefined, /* childNode */ undefined, [TokenType.CloseBracket] ); @@ -3323,9 +3431,10 @@ export class Parser { } // arglist: argument (',' argument)* [','] - private _parseArgList(): ArgumentNode[] { + private _parseArgList(): ArgListResult { const argList: ArgumentNode[] = []; let sawKeywordArg = false; + let trailingComma = false; while (true) { const nextTokenType = this._peekTokenType(); @@ -3337,6 +3446,7 @@ export class Parser { break; } + trailingComma = false; const arg = this._parseArgument(); if (arg.name) { sawKeywordArg = true; @@ -3348,9 +3458,11 @@ export class Parser { if (!this._consumeTokenIfType(TokenType.Comma)) { break; } + + trailingComma = true; } - return argList; + return { args: argList, trailingComma }; } // argument: ( test [comp_for] | @@ -3458,6 +3570,15 @@ export class Parser { // expression is parenthesized. possibleTupleNode.parenthesized = true; } + + if (possibleTupleNode.nodeType === ParseNodeType.StringList) { + possibleTupleNode.isParenthesized = true; + } + + if (possibleTupleNode.nodeType === ParseNodeType.ListComprehension) { + possibleTupleNode.isParenthesized = true; + } + return possibleTupleNode; } else if (nextToken.type === TokenType.OpenBracket) { const listNode = this._parseListAtom(); @@ -3508,10 +3629,11 @@ export class Parser { private _handleExpressionParseError( category: ErrorExpressionCategory, errorMsg: string, + targetToken?: Token, childNode?: ExpressionNode, additionalStopTokens?: TokenType[] ): ErrorNode { - this._addError(errorMsg, this._peekToken()); + this._addError(errorMsg, targetToken ?? this._peekToken()); const expr = ErrorNode.create(this._peekToken(), category, childNode); const stopTokens = [TokenType.NewLine]; if (additionalStopTokens) { @@ -3565,7 +3687,9 @@ export class Parser { if (this._peekTokenType() !== TokenType.CloseParenthesis) { return this._handleExpressionParseError( ErrorExpressionCategory.MissingTupleCloseParen, - Localizer.Diagnostic.expectedCloseParen() + Localizer.Diagnostic.expectedCloseParen(), + startParen, + yieldExpr ); } else { extendRange(yieldExpr, this._getNextToken()); @@ -3585,7 +3709,9 @@ export class Parser { if (this._peekTokenType() !== TokenType.CloseParenthesis) { return this._handleExpressionParseError( ErrorExpressionCategory.MissingTupleCloseParen, - Localizer.Diagnostic.expectedCloseParen() + Localizer.Diagnostic.expectedCloseParen(), + startParen, + exprListResult.parseError ?? tupleOrExpression ); } else { const nextToken = this._getNextToken(); @@ -3608,20 +3734,31 @@ export class Parser { if (!this._consumeTokenIfType(TokenType.CloseBracket)) { return this._handleExpressionParseError( ErrorExpressionCategory.MissingListCloseBracket, - Localizer.Diagnostic.expectedCloseBracket() + Localizer.Diagnostic.expectedCloseBracket(), + startBracket, + exprListResult.parseError ?? _createList() ); } - const listAtom = ListNode.create(startBracket); - extendRange(listAtom, closeBracket); - if (exprListResult.list.length > 0) { - exprListResult.list.forEach((expr) => { - expr.parent = listAtom; - }); - extendRange(listAtom, exprListResult.list[exprListResult.list.length - 1]); + return _createList(); + + function _createList() { + const listAtom = ListNode.create(startBracket); + + if (closeBracket) { + extendRange(listAtom, closeBracket); + } + + if (exprListResult.list.length > 0) { + exprListResult.list.forEach((expr) => { + expr.parent = listAtom; + }); + extendRange(listAtom, exprListResult.list[exprListResult.list.length - 1]); + } + + listAtom.entries = exprListResult.list; + return listAtom; } - listAtom.entries = exprListResult.list; - return listAtom; } private _parseTestListWithComprehension(): ListResult { @@ -3659,12 +3796,15 @@ export class Parser { let isSet = false; let sawListComprehension = false; let isFirstEntry = true; + let trailingCommaToken: Token | undefined; while (true) { if (this._peekTokenType() === TokenType.CloseCurlyBrace) { break; } + trailingCommaToken = undefined; + let doubleStarExpression: ExpressionNode | undefined; let keyExpression: ExpressionNode | undefined; let valueExpression: ExpressionNode | undefined; @@ -3725,6 +3865,12 @@ export class Parser { assert(keyExpression !== undefined); if (keyExpression) { if (isDictionary) { + const missingValueErrorNode = ErrorNode.create( + this._peekToken(), + ErrorExpressionCategory.MissingDictValue + ); + const keyEntryNode = DictionaryKeyEntryNode.create(keyExpression, missingValueErrorNode); + dictionaryEntries.push(keyEntryNode); this._addError(Localizer.Diagnostic.dictKeyValuePairs(), keyExpression); } else { const listComp = this._tryParseListComprehension(keyExpression); @@ -3747,16 +3893,18 @@ export class Parser { break; } - if (!this._consumeTokenIfType(TokenType.Comma)) { + if (this._peekTokenType() !== TokenType.Comma) { break; } + trailingCommaToken = this._getNextToken(); + isFirstEntry = false; } let closeCurlyBrace: Token | undefined = this._peekToken(); if (!this._consumeTokenIfType(TokenType.CloseCurlyBrace)) { - this._addError(Localizer.Diagnostic.expectedCloseBrace(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseBrace(), startBrace); closeCurlyBrace = undefined; } @@ -3765,20 +3913,30 @@ export class Parser { if (closeCurlyBrace) { extendRange(setAtom, closeCurlyBrace); } + if (setEntries.length > 0) { extendRange(setAtom, setEntries[setEntries.length - 1]); } + setEntries.forEach((entry) => { entry.parent = setAtom; }); + setAtom.entries = setEntries; return setAtom; } const dictionaryAtom = DictionaryNode.create(startBrace); + + if (trailingCommaToken) { + dictionaryAtom.trailingCommaToken = trailingCommaToken; + extendRange(dictionaryAtom, trailingCommaToken); + } + if (closeCurlyBrace) { extendRange(dictionaryAtom, closeCurlyBrace); } + if (dictionaryEntries.length > 0) { dictionaryEntries.forEach((entry) => { entry.parent = dictionaryAtom; @@ -3835,6 +3993,7 @@ export class Parser { private _parseExpressionStatement(): ExpressionNode { let leftExpr = this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ false, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedExpr() ); @@ -3872,6 +4031,7 @@ export class Parser { this._tryParseYieldExpression() || this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedAssignRightHandExpr() ); @@ -3911,6 +4071,7 @@ export class Parser { this._tryParseYieldExpression() || this._parseTestOrStarListAsExpression( /* allowAssignmentExpression */ false, + /* allowMultipleUnpack */ true, ErrorExpressionCategory.MissingExpression, Localizer.Diagnostic.expectedAssignRightHandExpr() ); @@ -3973,7 +4134,7 @@ export class Parser { } if (!this._consumeTokenIfType(TokenType.CloseParenthesis)) { - this._addError(Localizer.Diagnostic.expectedCloseParen(), this._peekToken()); + this._addError(Localizer.Diagnostic.expectedCloseParen(), openParenToken); this._consumeTokensUntilType([TokenType.Colon]); } @@ -3993,7 +4154,7 @@ export class Parser { return FunctionAnnotationNode.create(openParenToken, isParamListEllipsis, paramAnnotations, returnType); } - private _parseTypeAnnotation(): ExpressionNode { + private _parseTypeAnnotation(allowUnpack = false): ExpressionNode { // Temporary set a flag that indicates we're parsing a type annotation. const wasParsingTypeAnnotation = this._isParsingTypeAnnotation; this._isParsingTypeAnnotation = true; @@ -4001,8 +4162,17 @@ export class Parser { // Allow unpack operators. const startToken = this._peekToken(); const isUnpack = this._consumeTokenIfOperator(OperatorType.Multiply); - let result = this._parseTestExpression(/* allowAssignmentExpression */ false); + if (isUnpack) { + if (!allowUnpack) { + this._addError(Localizer.Diagnostic.unpackInAnnotation(), startToken); + } else if (!this._parseOptions.isStubFile && this._getLanguageVersion() < PythonVersion.V3_11) { + this._addError(Localizer.Diagnostic.unpackedSubscriptIllegal(), startToken); + } + } + + let result = this._parseTestExpression(/* allowAssignmentExpression */ false); + if (isUnpack && allowUnpack) { result = UnpackNode.create(startToken, result); } @@ -4089,7 +4259,9 @@ export class Parser { stringToken.start, stringToken.length, this._parseOptions, - ParseTextMode.VariableAnnotation + ParseTextMode.VariableAnnotation, + /* initialParenDepth */ undefined, + this._typingSymbolAliases ); parseResults.diagnostics.forEach((diag) => { @@ -4112,7 +4284,9 @@ export class Parser { stringToken.start, stringToken.length, this._parseOptions, - ParseTextMode.FunctionAnnotation + ParseTextMode.FunctionAnnotation, + /* initialParenDepth */ undefined, + this._typingSymbolAliases ); parseResults.diagnostics.forEach((diag) => { @@ -4144,7 +4318,8 @@ export class Parser { segmentLength, this._parseOptions, ParseTextMode.Expression, - /* initialParenDepth */ 1 + /* initialParenDepth */ 1, + this._typingSymbolAliases ); parseResults.diagnostics.forEach((diag) => { @@ -4312,6 +4487,40 @@ export class Parser { return segmentExprLength; } + private _createBinaryOperationNode( + leftExpression: ExpressionNode, + rightExpression: ExpressionNode, + operatorToken: Token, + operator: OperatorType + ) { + // Determine if we're exceeding the max parse depth. If so, replace + // the subnode with an error node. Otherwise we risk crashing in the binder + // or type evaluator. + if (leftExpression.maxChildDepth !== undefined && leftExpression.maxChildDepth >= maxChildNodeDepth) { + leftExpression = ErrorNode.create(leftExpression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), leftExpression); + } + + if (rightExpression.maxChildDepth !== undefined && rightExpression.maxChildDepth >= maxChildNodeDepth) { + rightExpression = ErrorNode.create(rightExpression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), rightExpression); + } + + return BinaryOperationNode.create(leftExpression, rightExpression, operatorToken, operator); + } + + private _createUnaryOperationNode(operatorToken: Token, expression: ExpressionNode, operator: OperatorType) { + // Determine if we're exceeding the max parse depth. If so, replace + // the subnode with an error node. Otherwise we risk crashing in the binder + // or type evaluator. + if (expression.maxChildDepth !== undefined && expression.maxChildDepth >= maxChildNodeDepth) { + expression = ErrorNode.create(expression, ErrorExpressionCategory.MaxDepthExceeded); + this._addError(Localizer.Diagnostic.maxParseDepthExceeded(), expression); + } + + return UnaryOperationNode.create(operatorToken, expression, operator); + } + private _parseStringList(): StringListNode { const stringList: (StringNode | FormatStringNode)[] = []; @@ -4353,7 +4562,8 @@ export class Parser { unescapedString.length, this._parseOptions, ParseTextMode.VariableAnnotation, - (stringNode.strings[0].token.flags & StringTokenFlags.Triplicate) !== 0 ? 1 : 0 + (stringNode.strings[0].token.flags & StringTokenFlags.Triplicate) !== 0 ? 1 : 0, + this._typingSymbolAliases ); parseResults.diagnostics.forEach((diag) => { @@ -4549,13 +4759,16 @@ export class Parser { } } - private _consumeTokenIfType(tokenType: TokenType): boolean { + private _getTokenIfType(tokenType: TokenType): Token | undefined { if (this._peekTokenType() === tokenType) { - this._getNextToken(); - return true; + return this._getNextToken(); } - return false; + return undefined; + } + + private _consumeTokenIfType(tokenType: TokenType): boolean { + return !!this._getTokenIfType(tokenType); } private _consumeTokenIfKeyword(keywordType: KeywordType): boolean { @@ -4588,11 +4801,12 @@ export class Parser { } private _suppressErrors(callback: () => void) { + const errorsWereSuppressed = this._areErrorsSuppressed; try { this._areErrorsSuppressed = true; callback(); } finally { - this._areErrorsSuppressed = false; + this._areErrorsSuppressed = errorsWereSuppressed; } } diff --git a/packages/pyright-internal/src/parser/tokenizer.ts b/packages/pyright-internal/src/parser/tokenizer.ts index d798fbd0218e..d2831b288cad 100644 --- a/packages/pyright-internal/src/parser/tokenizer.ts +++ b/packages/pyright-internal/src/parser/tokenizer.ts @@ -26,6 +26,7 @@ import { import { CharacterStream } from './characterStream'; import { Comment, + CommentType, DedentToken, IdentifierToken, IndentToken, @@ -135,6 +136,8 @@ const _byteOrderMarker = 0xfeff; const _maxStringTokenLength = 32 * 1024; +export const defaultTabSize = 8; + export interface TokenizerOutput { // List of all tokens. tokens: TextRangeCollection; @@ -143,10 +146,10 @@ export interface TokenizerOutput { lines: TextRangeCollection; // Map of all line numbers that end in a "type: ignore" comment. - typeIgnoreLines: { [line: number]: boolean }; + typeIgnoreLines: Map; // Program starts with a "type: ignore" comment. - typeIgnoreAll: boolean; + typeIgnoreAll: TextRange | undefined; // Line-end sequence ('/n', '/r', or '/r/n'). predominantEndOfLineSequence: string; @@ -178,8 +181,8 @@ export class Tokenizer { private _parenDepth = 0; private _lineRanges: TextRange[] = []; private _indentAmounts: IndentInfo[] = []; - private _typeIgnoreAll = false; - private _typeIgnoreLines: { [line: number]: boolean } = {}; + private _typeIgnoreAll: TextRange | undefined; + private _typeIgnoreLines = new Map(); private _comments: Comment[] | undefined; // Total times CR, CR/LF, and LF are used to terminate @@ -204,7 +207,16 @@ export class Tokenizer { private _singleQuoteCount = 0; private _doubleQuoteCount = 0; - tokenize(text: string, start?: number, length?: number, initialParenDepth = 0): TokenizerOutput { + // ipython mode + private _ipythonMode = false; + + tokenize( + text: string, + start?: number, + length?: number, + initialParenDepth = 0, + ipythonMode = false + ): TokenizerOutput { if (start === undefined) { start = 0; } else if (start < 0 || start > text.length) { @@ -226,8 +238,14 @@ export class Tokenizer { this._parenDepth = initialParenDepth; this._lineRanges = []; this._indentAmounts = []; + this._ipythonMode = ipythonMode; const end = start + length; + + if (start === 0) { + this._readIndentationAfterNewLine(); + } + while (!this._cs.isEndOfStream()) { this._addNextToken(); @@ -242,7 +260,7 @@ export class Tokenizer { } // Insert any implied dedent tokens. - this._setIndent(0, 0, true, false); + this._setIndent(0, 0, /* isSpacePresent */ false, /* isTabPresent */ false); // Add a final end-of-stream token to make parsing easier. this._tokens.push(Token.create(TokenType.EndOfStream, this._cs.position, 0, this._getComments())); @@ -268,8 +286,8 @@ export class Tokenizer { let averageSpacePerIndent = Math.round(this._indentSpacesTotal / this._indentCount); if (averageSpacePerIndent < 1) { averageSpacePerIndent = 1; - } else if (averageSpacePerIndent > 8) { - averageSpacePerIndent = 8; + } else if (averageSpacePerIndent > defaultTabSize) { + averageSpacePerIndent = defaultTabSize; } predominantTabSequence = ''; for (let i = 0; i < averageSpacePerIndent; i++) { @@ -342,6 +360,13 @@ export class Tokenizer { return true; } + if (this._ipythonMode && this._isIPythonMagics()) { + this._handleIPythonMagics( + this._cs.currentChar === Char.Percent ? CommentType.IPythonMagic : CommentType.IPythonShellEscape + ); + return true; + } + switch (this._cs.currentChar) { case _byteOrderMarker: { // Skip the BOM if it's at the start of the file. @@ -525,7 +550,7 @@ export class Tokenizer { // Translate tabs into spaces assuming both 1-space // and 8-space tab stops. tab1Spaces++; - tab8Spaces += 8 - (tab8Spaces % 8); + tab8Spaces += defaultTabSize - (tab8Spaces % defaultTabSize); isTabPresent = true; this._cs.moveNext(); break; @@ -605,7 +630,22 @@ export class Tokenizer { this._tokens.push( IndentToken.create(this._cs.position, 0, tab8Spaces, isIndentAmbiguous, this._getComments()) ); + } else if (prevTabInfo.tab8Spaces === tab8Spaces) { + // The Python spec says that if there is ambiguity about how tabs should + // be translated into spaces because the user has intermixed tabs and + // spaces, it should be an error. We'll record this condition in the token + // so the parser can later report it. + if ((prevTabInfo.isSpacePresent && isTabPresent) || (prevTabInfo.isTabPresent && isSpacePresent)) { + this._tokens.push(IndentToken.create(this._cs.position, 0, tab8Spaces, true, this._getComments())); + } } else { + // The Python spec says that if there is ambiguity about how tabs should + // be translated into spaces because the user has intermixed tabs and + // spaces, it should be an error. We'll record this condition in the token + // so the parser can later report it. + let isDedentAmbiguous = + (prevTabInfo.isSpacePresent && isTabPresent) || (prevTabInfo.isTabPresent && isSpacePresent); + // The Python spec says that dedent amounts need to match the indent // amount exactly. An error is generated at runtime if it doesn't. // We'll record that error condition within the token, allowing the @@ -627,8 +667,17 @@ export class Tokenizer { const matchesIndent = index < dedentPoints.length - 1 || dedentAmount === tab8Spaces; const actualDedentAmount = index < dedentPoints.length - 1 ? dedentAmount : tab8Spaces; this._tokens.push( - DedentToken.create(this._cs.position, 0, actualDedentAmount, matchesIndent, this._getComments()) + DedentToken.create( + this._cs.position, + 0, + actualDedentAmount, + matchesIndent, + isDedentAmbiguous, + this._getComments() + ) ); + + isDedentAmbiguous = false; }); } } @@ -723,9 +772,18 @@ export class Tokenizer { if (radix > 0) { const text = this._cs.getText().substr(start, this._cs.position - start); - const value = parseInt(text.substr(leadingChars).replace(/_/g, ''), radix); - if (!isNaN(value)) { - this._tokens.push(NumberToken.create(start, text.length, value, true, false, this._getComments())); + const simpleIntText = text.replace(/_/g, ''); + let intValue: number | bigint = parseInt(simpleIntText.substr(leadingChars), radix); + + if (!isNaN(intValue)) { + const bigIntValue = BigInt(simpleIntText); + if (!isFinite(intValue) || BigInt(intValue) !== bigIntValue) { + intValue = bigIntValue; + } + + this._tokens.push( + NumberToken.create(start, text.length, intValue, true, false, this._getComments()) + ); return true; } } @@ -762,16 +820,25 @@ export class Tokenizer { if (isDecimalInteger) { let text = this._cs.getText().substr(start, this._cs.position - start); - const value = parseInt(text.replace(/_/g, ''), 10); - if (!isNaN(value)) { + const simpleIntText = text.replace(/_/g, ''); + let intValue: number | bigint = parseInt(simpleIntText, 10); + + if (!isNaN(intValue)) { let isImaginary = false; + + const bigIntValue = BigInt(simpleIntText); + if (!isFinite(intValue) || BigInt(intValue) !== bigIntValue) { + intValue = bigIntValue; + } + if (this._cs.currentChar === Char.j || this._cs.currentChar === Char.J) { isImaginary = true; text += String.fromCharCode(this._cs.currentChar); this._cs.moveNext(); } + this._tokens.push( - NumberToken.create(start, text.length, value, true, isImaginary, this._getComments()) + NumberToken.create(start, text.length, intValue, true, isImaginary, this._getComments()) ); return true; } @@ -961,6 +1028,41 @@ export class Tokenizer { return prevComments; } + private _isIPythonMagics() { + const prevToken = this._tokens.length > 0 ? this._tokens[this._tokens.length - 1] : undefined; + return ( + (prevToken === undefined || prevToken.type === TokenType.NewLine || prevToken.type === TokenType.Indent) && + (this._cs.currentChar === Char.Percent || this._cs.currentChar === Char.ExclamationMark) + ); + } + + private _handleIPythonMagics(type: CommentType): void { + const start = this._cs.position + 1; + + let begin = start; + do { + this._cs.skipToEol(); + + const length = this._cs.position - begin; + const value = this._cs.getText().substr(begin, length); + + // is it multiline magics? + // %magic command \ + // next arguments + if (!value.match(/\\\s*$/)) { + break; + } + + begin = this._cs.position + 1; + } while (!this._cs.isEndOfStream()); + + const length = this._cs.position - start; + const value = this._cs.getText().substr(start, length); + + const comment = Comment.create(start, length, value, type); + this._addComments(comment); + } + private _handleComment(): void { const start = this._cs.position + 1; this._cs.skipToEol(); @@ -973,14 +1075,24 @@ export class Tokenizer { // ignore comments of the form ignore[errorCode, ...]. We'll treat // these as regular ignore statements (as though no errorCodes were // included). - if (value.match(/^\s*type:\s*ignore(\s|\[|$)/)) { + const regexMatch = value.match(/^\s*type:\s*ignore(\s|\[|$)/); + if (regexMatch) { + const textRange: TextRange = { start, length: regexMatch[0].length }; + if (regexMatch[0].endsWith('[')) { + textRange.length--; + } + if (this._tokens.findIndex((t) => t.type !== TokenType.NewLine && t && t.type !== TokenType.Indent) < 0) { - this._typeIgnoreAll = true; + this._typeIgnoreAll = textRange; } else { - this._typeIgnoreLines[this._lineRanges.length] = true; + this._typeIgnoreLines.set(this._lineRanges.length, textRange); } } + this._addComments(comment); + } + + private _addComments(comment: Comment) { if (this._comments) { this._comments.push(comment); } else { diff --git a/packages/pyright-internal/src/parser/tokenizerTypes.ts b/packages/pyright-internal/src/parser/tokenizerTypes.ts index be5972f3e829..0ae9b51442c0 100644 --- a/packages/pyright-internal/src/parser/tokenizerTypes.ts +++ b/packages/pyright-internal/src/parser/tokenizerTypes.ts @@ -165,15 +165,23 @@ export const enum StringTokenFlags { ExceedsMaxSize = 1 << 17, } +export const enum CommentType { + Regular, + IPythonMagic, + IPythonShellEscape, +} + export interface Comment extends TextRange { + readonly type: CommentType; readonly value: string; readonly start: number; readonly length: number; } export namespace Comment { - export function create(start: number, length: number, value: string) { + export function create(start: number, length: number, value: string, type = CommentType.Regular) { const comment: Comment = { + type, start, length, value, @@ -236,6 +244,7 @@ export interface DedentToken extends Token { readonly type: TokenType.Dedent; readonly indentAmount: number; readonly matchesIndent: boolean; + readonly isDedentAmbiguous: boolean; } export namespace DedentToken { @@ -244,6 +253,7 @@ export namespace DedentToken { length: number, indentAmount: number, matchesIndent: boolean, + isDedentAmbiguous: boolean, comments: Comment[] | undefined ) { const token: DedentToken = { @@ -253,6 +263,7 @@ export namespace DedentToken { comments, indentAmount, matchesIndent, + isDedentAmbiguous, }; return token; @@ -339,7 +350,7 @@ export namespace StringToken { export interface NumberToken extends Token { readonly type: TokenType.Number; - readonly value: number; + readonly value: number | bigint; readonly isInteger: boolean; readonly isImaginary: boolean; } @@ -348,7 +359,7 @@ export namespace NumberToken { export function create( start: number, length: number, - value: number, + value: number | bigint, isInteger: boolean, isImaginary: boolean, comments: Comment[] | undefined @@ -393,11 +404,14 @@ export interface IdentifierToken extends Token { export namespace IdentifierToken { export function create(start: number, length: number, value: string, comments: Comment[] | undefined) { + // Perform "NFKC normalization", as per the Python lexical spec. + const normalizedValue = value.normalize('NFKC'); + const token: IdentifierToken = { start, length, type: TokenType.Identifier, - value, + value: normalizedValue, comments, }; diff --git a/packages/pyright-internal/src/pyright.ts b/packages/pyright-internal/src/pyright.ts index 0e1445255c42..3326d8422a06 100644 --- a/packages/pyright-internal/src/pyright.ts +++ b/packages/pyright-internal/src/pyright.ts @@ -20,7 +20,7 @@ import { CommandLineOptions, OptionDefinition } from 'command-line-args'; import { PackageTypeVerifier } from './analyzer/packageTypeVerifier'; import { AnalyzerService } from './analyzer/service'; import { CommandLineOptions as PyrightCommandLineOptions } from './common/commandLineOptions'; -import { StderrConsole } from './common/console'; +import { LogLevel, StandardConsoleWithLevel, StderrConsoleWithLevel } from './common/console'; import { Diagnostic, DiagnosticCategory } from './common/diagnostic'; import { FileDiagnostics } from './common/diagnosticSink'; import { combinePaths, normalizePath } from './common/pathUtils'; @@ -31,6 +31,7 @@ import { PyrightFileSystem } from './pyrightFileSystem'; import { PackageTypeReport, TypeKnownStatus } from './analyzer/packageTypeReport'; import { createDeferred } from './common/deferred'; import { FullAccessHost } from './common/fullAccessHost'; +import { ChokidarFileWatcherProvider } from './common/chokidarFileWatcherProvider'; const toolName = 'pyright'; @@ -52,6 +53,7 @@ interface PyrightJsonResults { interface PyrightSymbolCount { withKnownType: number; + withAmbiguousType: number; withUnknownType: number; } @@ -79,6 +81,7 @@ interface PyrightPublicSymbolReport { name: string; referenceCount: number; isTypeKnown: boolean; + isTypeAmbiguous: boolean; isExported: boolean; diagnostics: PyrightJsonDiagnostic[]; alternateNames?: string[] | undefined; @@ -130,12 +133,14 @@ async function processArgs(): Promise { { name: 'project', alias: 'p', type: String }, { name: 'pythonplatform', type: String }, { name: 'pythonversion', type: String }, - { name: 'stats' }, + { name: 'skipunannotated', type: Boolean }, + { name: 'stats', type: Boolean }, { name: 'typeshed-path', alias: 't', type: String }, { name: 'venv-path', alias: 'v', type: String }, { name: 'verifytypes', type: String }, { name: 'verbose', type: Boolean }, { name: 'version', type: Boolean }, + { name: 'warnings', type: Boolean }, { name: 'watch', alias: 'w', type: Boolean }, ]; @@ -164,8 +169,15 @@ async function processArgs(): Promise { return ExitStatus.NoErrors; } + for (const [arg, value] of Object.entries(args)) { + if (value === null) { + console.error(`'${arg}' option requires a value`); + return ExitStatus.ParameterError; + } + } + if (args.outputjson) { - const incompatibleArgs = ['watch', 'stats', 'verbose', 'createstub', 'dependencies']; + const incompatibleArgs = ['stats', 'verbose', 'createstub', 'dependencies']; for (const arg of incompatibleArgs) { if (args[arg] !== undefined) { console.error(`'outputjson' option cannot be used with '${arg}' option`); @@ -175,7 +187,7 @@ async function processArgs(): Promise { } if (args['verifytypes'] !== undefined) { - const incompatibleArgs = ['watch', 'stats', 'createstub', 'dependencies']; + const incompatibleArgs = ['watch', 'stats', 'createstub', 'dependencies', 'skipunannotated']; for (const arg of incompatibleArgs) { if (args[arg] !== undefined) { console.error(`'verifytypes' option cannot be used with '${arg}' option`); @@ -185,7 +197,7 @@ async function processArgs(): Promise { } if (args.createstub) { - const incompatibleArgs = ['watch', 'stats', 'verifytypes', 'dependencies']; + const incompatibleArgs = ['watch', 'stats', 'verifytypes', 'dependencies', 'skipunannotated']; for (const arg of incompatibleArgs) { if (args[arg] !== undefined) { console.error(`'createstub' option cannot be used with '${arg}' option`); @@ -241,16 +253,29 @@ async function processArgs(): Promise { options.typeStubTargetImportName = args.createstub; } + options.analyzeUnannotatedFunctions = !args.skipunannotated; + if (args.verbose) { options.verboseOutput = true; } + if (args.lib) { options.useLibraryCodeForTypes = true; } + options.checkOnlyOpenFiles = false; - const output = args.outputjson ? new StderrConsole() : undefined; - const fileSystem = new PyrightFileSystem(createFromRealFileSystem(output)); + if (!!args.stats && !!args.verbose) { + options.logTypeEvaluationTime = true; + } + + const treatWarningsAsErrors = !!args.warnings; + const logLevel = options.logTypeEvaluationTime ? LogLevel.Log : LogLevel.Error; + + // If using outputjson, redirect all console output to stderr so it doesn't mess + // up the JSON output, which goes to stdout. + const output = args.outputjson ? new StderrConsoleWithLevel(logLevel) : new StandardConsoleWithLevel(logLevel); + const fileSystem = new PyrightFileSystem(createFromRealFileSystem(output, new ChokidarFileWatcherProvider(output))); // The package type verification uses a different path. if (args['verifytypes'] !== undefined) { @@ -293,9 +318,15 @@ async function processArgs(): Promise { results.elapsedTime ); errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } } else { const report = reportDiagnosticsAsText(results.diagnostics); errorCount += report.errorCount; + if (treatWarningsAsErrors) { + errorCount += report.warningCount; + } } } @@ -324,7 +355,7 @@ async function processArgs(): Promise { timingStats.printSummary(console); } - if (args.stats !== undefined) { + if (args.stats) { // Print the stats details. service.printStats(); timingStats.printDetails(console); @@ -357,9 +388,8 @@ function verifyPackageTypes( ignoreUnknownTypesFromImports: boolean ): ExitStatus { try { - const verifier = new PackageTypeVerifier(fileSystem); - - const report = verifier.verify(packageName, ignoreUnknownTypesFromImports); + const verifier = new PackageTypeVerifier(fileSystem, packageName, ignoreUnknownTypesFromImports); + const report = verifier.verify(); const jsonReport = buildTypeCompletenessReport(packageName, report); if (outputJson) { @@ -418,10 +448,12 @@ function buildTypeCompletenessReport(packageName: string, completenessReport: Pa pyTypedPath: completenessReport.pyTypedPath, exportedSymbolCounts: { withKnownType: 0, + withAmbiguousType: 0, withUnknownType: 0, }, otherSymbolCounts: { withKnownType: 0, + withAmbiguousType: 0, withUnknownType: 0, }, missingFunctionDocStringCount: completenessReport.missingFunctionDocStringCount, @@ -449,6 +481,7 @@ function buildTypeCompletenessReport(packageName: string, completenessReport: Pa referenceCount: symbol.referenceCount, isExported: symbol.isExported, isTypeKnown: symbol.typeKnownStatus === TypeKnownStatus.Known, + isTypeAmbiguous: symbol.typeKnownStatus === TypeKnownStatus.Ambiguous, diagnostics: symbol.diagnostics.map((diag) => convertDiagnosticToJson(diag.filePath, diag.diagnostic)), }; @@ -466,6 +499,12 @@ function buildTypeCompletenessReport(packageName: string, completenessReport: Pa } else { report.typeCompleteness!.otherSymbolCounts.withKnownType++; } + } else if (symbol.typeKnownStatus === TypeKnownStatus.Ambiguous) { + if (symbol.isExported) { + report.typeCompleteness!.exportedSymbolCounts.withAmbiguousType++; + } else { + report.typeCompleteness!.otherSymbolCounts.withAmbiguousType++; + } } else { if (symbol.isExported) { report.typeCompleteness!.exportedSymbolCounts.withUnknownType++; @@ -476,8 +515,10 @@ function buildTypeCompletenessReport(packageName: string, completenessReport: Pa }); const unknownSymbolCount = report.typeCompleteness.exportedSymbolCounts.withUnknownType; + const ambiguousSymbolCount = report.typeCompleteness.exportedSymbolCounts.withAmbiguousType; const knownSymbolCount = report.typeCompleteness.exportedSymbolCounts.withKnownType; - const totalSymbolCount = unknownSymbolCount + knownSymbolCount; + const totalSymbolCount = unknownSymbolCount + ambiguousSymbolCount + knownSymbolCount; + if (totalSymbolCount > 0) { report.typeCompleteness!.completenessScore = knownSymbolCount / totalSymbolCount; } @@ -509,7 +550,7 @@ function printTypeCompletenessReportText(results: PyrightJsonResults, verboseOut // Print list of all symbols. if (completenessReport.symbols.length > 0 && verboseOutput) { console.log(''); - console.log(`Exported symbols: ${completenessReport.symbols.length}`); + console.log(`Exported symbols: ${completenessReport.symbols.filter((sym) => sym.isExported).length}`); completenessReport.symbols.forEach((symbol) => { if (symbol.isExported) { const refCount = symbol.referenceCount > 1 ? ` (${symbol.referenceCount} references)` : ''; @@ -518,7 +559,7 @@ function printTypeCompletenessReportText(results: PyrightJsonResults, verboseOut }); console.log(''); - console.log(`Other referenced symbols: ${completenessReport.symbols.length}`); + console.log(`Other referenced symbols: ${completenessReport.symbols.filter((sym) => !sym.isExported).length}`); completenessReport.symbols.forEach((symbol) => { if (!symbol.isExported) { const refCount = symbol.referenceCount > 1 ? ` (${symbol.referenceCount} references)` : ''; @@ -553,11 +594,13 @@ function printTypeCompletenessReportText(results: PyrightJsonResults, verboseOut console.log( `Symbols exported by "${completenessReport.packageName}": ${ completenessReport.exportedSymbolCounts.withKnownType + + completenessReport.exportedSymbolCounts.withAmbiguousType + completenessReport.exportedSymbolCounts.withUnknownType }` ); console.log(` With known type: ${completenessReport.exportedSymbolCounts.withKnownType}`); - console.log(` With partially unknown type: ${completenessReport.exportedSymbolCounts.withUnknownType}`); + console.log(` With ambiguous type: ${completenessReport.exportedSymbolCounts.withAmbiguousType}`); + console.log(` With unknown type: ${completenessReport.exportedSymbolCounts.withUnknownType}`); if (completenessReport.ignoreUnknownTypesFromImports) { console.log(` (Ignoring unknown types imported from other packages)`); } @@ -567,11 +610,14 @@ function printTypeCompletenessReportText(results: PyrightJsonResults, verboseOut console.log(''); console.log( `Other symbols referenced but not exported by "${completenessReport.packageName}": ${ - completenessReport.otherSymbolCounts.withKnownType + completenessReport.otherSymbolCounts.withUnknownType + completenessReport.otherSymbolCounts.withKnownType + + completenessReport.otherSymbolCounts.withAmbiguousType + + completenessReport.otherSymbolCounts.withUnknownType }` ); console.log(` With known type: ${completenessReport.otherSymbolCounts.withKnownType}`); - console.log(` With partially unknown type: ${completenessReport.otherSymbolCounts.withUnknownType}`); + console.log(` With ambiguous type: ${completenessReport.otherSymbolCounts.withAmbiguousType}`); + console.log(` With unknown type: ${completenessReport.otherSymbolCounts.withUnknownType}`); console.log(''); console.log(`Type completeness score: ${Math.round(completenessReport.completenessScore * 1000) / 10}%`); console.log(''); @@ -594,12 +640,14 @@ function printUsage() { ' -p,--project Use the configuration file at this location\n' + ' --pythonplatform Analyze for a specific platform (Darwin, Linux, Windows)\n' + ' --pythonversion Analyze for a specific version (3.3, 3.4, etc.)\n' + + ' --skipunannotated Do not analyze functions and methods with no type annotations\n' + ' --stats Print detailed performance stats\n' + ' -t,--typeshed-path Use typeshed type stubs at this location\n' + ' -v,--venv-path Directory that contains virtual environments\n' + ' --verbose Emit verbose diagnostics\n' + ' --verifytypes Verify type completeness of a py.typed package\n' + ' --version Print Pyright version\n' + + ' --warnings Use exit code of 1 if warnings are reported\n' + ' -w,--watch Continue to run and watch for changes\n' ); } @@ -677,9 +725,9 @@ function reportDiagnosticsAsText(fileDiagnostics: FileDiagnostics[]): Diagnostic let informationCount = 0; fileDiagnostics.forEach((fileDiagnostics) => { - // Don't report unused code diagnostics. + // Don't report unused code or deprecated diagnostics. const fileErrorsAndWarnings = fileDiagnostics.diagnostics.filter( - (diag) => diag.category !== DiagnosticCategory.UnusedCode + (diag) => diag.category !== DiagnosticCategory.UnusedCode && diag.category !== DiagnosticCategory.Deprecated ); if (fileErrorsAndWarnings.length > 0) { @@ -701,7 +749,7 @@ function reportDiagnosticsAsText(fileDiagnostics: FileDiagnostics[]): Diagnostic console.log( `${errorCount.toString()} ${errorCount === 1 ? 'error' : 'errors'}, ` + `${warningCount.toString()} ${warningCount === 1 ? 'warning' : 'warnings'}, ` + - `${informationCount.toString()} ${informationCount === 1 ? 'info' : 'infos'} ` + `${informationCount.toString()} ${informationCount === 1 ? 'information' : 'informations'} ` ); return { @@ -732,7 +780,7 @@ function logDiagnosticToConsole(diag: PyrightJsonDiagnostic, prefix = ' ') { ? chalk.red('error') : diag.severity === 'warning' ? chalk.cyan('warning') - : chalk.blue('info'); + : chalk.blue('information'); message += `: ${firstLine}`; if (remainingLines.length > 0) { message += '\n' + prefix + remainingLines.join('\n' + prefix); diff --git a/packages/pyright-internal/src/pyrightFileSystem.ts b/packages/pyright-internal/src/pyrightFileSystem.ts index de59c4d08238..d5e6381cbd2c 100644 --- a/packages/pyright-internal/src/pyrightFileSystem.ts +++ b/packages/pyright-internal/src/pyrightFileSystem.ts @@ -13,36 +13,18 @@ import type * as fs from 'fs'; import { getPyTypedInfo } from './analyzer/pyTypedUtils'; import { ExecutionEnvironment } from './common/configOptions'; -import { - FileSystem, - FileWatcher, - FileWatcherEventHandler, - MkDirOptions, - Stats, - TmpfileOptions, - VirtualDirent, -} from './common/fileSystem'; +import { FileSystem, MkDirOptions } from './common/fileSystem'; import { stubsSuffix } from './common/pathConsts'; import { changeAnyExtension, combinePaths, ensureTrailingDirectorySeparator, - getDirectoryPath, - getFileName, isDirectory, tryStat, } from './common/pathUtils'; +import { ReadOnlyAugmentedFileSystem } from './readonlyAugmentedFileSystem'; -export class PyrightFileSystem implements FileSystem { - // Mapped file to original file map - private readonly _fileMap = new Map(); - - // Original file to mapped file map - private readonly _reverseFileMap = new Map(); - - // Mapped files per a containing folder map - private readonly _folderMap = new Map(); - +export class PyrightFileSystem extends ReadOnlyAugmentedFileSystem { // Root paths processed private readonly _rootSearched = new Set(); @@ -55,118 +37,35 @@ export class PyrightFileSystem implements FileSystem { private readonly _customUriMap = new Map(); - constructor(private _realFS: FileSystem) {} - - existsSync(path: string): boolean { - if (this._isVirtualEntry(path)) { - // Pretend partial stub folder and its files not exist - return false; - } - - return this._realFS.existsSync(this._getPartialStubOriginalPath(path)); + constructor(realFS: FileSystem) { + super(realFS); } - mkdirSync(path: string, options?: MkDirOptions): void { + override mkdirSync(path: string, options?: MkDirOptions): void { this._realFS.mkdirSync(path, options); } - chdir(path: string): void { + override chdir(path: string): void { this._realFS.chdir(path); } - readdirEntriesSync(path: string): fs.Dirent[] { - const entries = this._realFS.readdirEntriesSync(path).filter((item) => { - // Filter out the stub package directory. - return !this._isVirtualEntry(combinePaths(path, item.name)); - }); - - const partialStubs = this._folderMap.get(ensureTrailingDirectorySeparator(path)); - if (!partialStubs) { - return entries; - } - - return entries.concat(partialStubs.map((f) => new VirtualDirent(f, /* file */ true))); - } - - readdirSync(path: string): string[] { - const entries = this._realFS.readdirSync(path).filter((item) => { - // Filter out the stub package directory. - return !this._isVirtualEntry(combinePaths(path, item)); - }); - - const partialStubs = this._folderMap.get(ensureTrailingDirectorySeparator(path)); - if (!partialStubs) { - return entries; - } - - return entries.concat(partialStubs); - } - - readFileSync(path: string, encoding?: null): Buffer; - readFileSync(path: string, encoding: BufferEncoding): string; - readFileSync(path: string, encoding?: BufferEncoding | null): string | Buffer { - return this._realFS.readFileSync(this._getPartialStubOriginalPath(path), encoding); - } - - writeFileSync(path: string, data: string | Buffer, encoding: BufferEncoding | null): void { - this._realFS.writeFileSync(this._getPartialStubOriginalPath(path), data, encoding); - } - - statSync(path: string): Stats { - return this._realFS.statSync(this._getPartialStubOriginalPath(path)); - } - - unlinkSync(path: string): void { - this._realFS.unlinkSync(this._getPartialStubOriginalPath(path)); - } - - realpathSync(path: string): string { - return this._realFS.realpathSync(path); - } - - getModulePath(): string { - return this._realFS.getModulePath(); - } - - createFileSystemWatcher(paths: string[], listener: FileWatcherEventHandler): FileWatcher { - return this._realFS.createFileSystemWatcher(paths, listener); - } - - createReadStream(path: string): fs.ReadStream { - return this._realFS.createReadStream(this._getPartialStubOriginalPath(path)); - } - - createWriteStream(path: string): fs.WriteStream { - return this._realFS.createWriteStream(this._getPartialStubOriginalPath(path)); - } - - copyFileSync(src: string, dst: string): void { - this._realFS.copyFileSync(this._getPartialStubOriginalPath(src), this._getPartialStubOriginalPath(dst)); + override writeFileSync(path: string, data: string | Buffer, encoding: BufferEncoding | null): void { + this._realFS.writeFileSync(this._getOriginalPath(path), data, encoding); } - // Async I/O - readFile(path: string): Promise { - return this._realFS.readFile(this._getPartialStubOriginalPath(path)); + override unlinkSync(path: string): void { + this._realFS.unlinkSync(this._getOriginalPath(path)); } - readFileText(path: string, encoding?: BufferEncoding): Promise { - return this._realFS.readFileText(this._getPartialStubOriginalPath(path), encoding); + override createWriteStream(path: string): fs.WriteStream { + return this._realFS.createWriteStream(this._getOriginalPath(path)); } - // The directory returned by tmpdir must exist and be the same each time tmpdir is called. - tmpdir(): string { - return this._realFS.tmpdir(); + override copyFileSync(src: string, dst: string): void { + this._realFS.copyFileSync(this._getOriginalPath(src), this._getOriginalPath(dst)); } - tmpfile(options?: TmpfileOptions): string { - return this._realFS.tmpfile(options); - } - - realCasePath(path: string): string { - return this._realFS.realCasePath(path); - } - - getUri(originalPath: string): string { + override getUri(originalPath: string): string { const entry = this._customUriMap.get(this.getMappedFilePath(originalPath)); if (entry) { return entry.uri; @@ -310,7 +209,7 @@ export class PyrightFileSystem implements FileSystem { // not the other way around. what that means is that even if a user opens // the file explicitly from real file system (ex, vscode open file), it won't // go to the fake tmp py file. but open as it is. - this._recordVirtualFile(tmpPyFile, originalPyiFile, /* reversible */ false); + this._recordMovedEntry(tmpPyFile, originalPyiFile, /* reversible */ false); // This should be the only way to get to the tmp py file. and used internally // to get some info like doc string of compiled module. @@ -319,7 +218,7 @@ export class PyrightFileSystem implements FileSystem { continue; } - this._recordVirtualFile(mappedPyiFile, originalPyiFile); + this._recordMovedEntry(mappedPyiFile, originalPyiFile); } } catch { // ignore @@ -330,8 +229,7 @@ export class PyrightFileSystem implements FileSystem { } clearPartialStubs(): void { - this._fileMap.clear(); - this._folderMap.clear(); + super._clear(); this._rootSearched.clear(); this._partialStubPackagePaths.clear(); @@ -339,56 +237,12 @@ export class PyrightFileSystem implements FileSystem { this._conflictMap.clear(); } - // See whether the file is mapped to another location. - isMappedFilePath(filepath: string): boolean { - return this._fileMap.has(filepath) || this._realFS.isMappedFilePath(filepath); - } - - // Get original filepath if the given filepath is mapped. - getOriginalFilePath(mappedFilePath: string) { - return this._realFS.getOriginalFilePath(this._getPartialStubOriginalPath(mappedFilePath)); - } - - // Get mapped filepath if the given filepath is mapped. - getMappedFilePath(originalFilepath: string) { - const mappedFilePath = this._realFS.getMappedFilePath(originalFilepath); - return this._reverseFileMap.get(mappedFilePath) ?? mappedFilePath; - } - // If we have a conflict file from the partial stub packages for the given file path, // return it. getConflictedFile(filepath: string) { return this._conflictMap.get(filepath); } - isInZipOrEgg(path: string): boolean { - return this._realFS.isInZipOrEgg(path); - } - - private _recordVirtualFile(mappedFile: string, originalFile: string, reversible = true) { - this._fileMap.set(mappedFile, originalFile); - - if (reversible) { - this._reverseFileMap.set(originalFile, mappedFile); - } - - const directory = ensureTrailingDirectorySeparator(getDirectoryPath(mappedFile)); - let folderInfo = this._folderMap.get(directory); - if (!folderInfo) { - folderInfo = []; - this._folderMap.set(directory, folderInfo); - } - - const fileName = getFileName(mappedFile); - if (!folderInfo.some((entry) => entry === fileName)) { - folderInfo.push(fileName); - } - } - - private _getPartialStubOriginalPath(mappedFilePath: string) { - return this._fileMap.get(mappedFilePath) ?? mappedFilePath; - } - private _getRelativePathPartialStubs(path: string) { const paths: string[] = []; @@ -424,7 +278,7 @@ export class PyrightFileSystem implements FileSystem { return paths; } - private _isVirtualEntry(path: string) { - return this._partialStubPackagePaths.has(path) || this._reverseFileMap.has(path); + protected override _isMovedEntry(path: string) { + return this._partialStubPackagePaths.has(path) || super._isMovedEntry(path); } } diff --git a/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts b/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts new file mode 100644 index 000000000000..219f8a08ea78 --- /dev/null +++ b/packages/pyright-internal/src/readonlyAugmentedFileSystem.ts @@ -0,0 +1,199 @@ +/* + * readonlyAugmentedFileSystem.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * A file system that lets one to augment backing file system but not allow + * modifying the backing file system. + */ + +import type * as fs from 'fs'; + +import { getOrAdd } from './common/collectionUtils'; +import { + FileSystem, + FileWatcher, + FileWatcherEventHandler, + MkDirOptions, + Stats, + TmpfileOptions, + VirtualDirent, +} from './common/fileSystem'; +import { combinePaths, ensureTrailingDirectorySeparator, getDirectoryPath, getFileName } from './common/pathUtils'; + +export class ReadOnlyAugmentedFileSystem implements FileSystem { + // Mapped file to original file map + private readonly _entryMap = new Map(); + + // Original file to mapped file map + private readonly _reverseEntryMap = new Map(); + + // Mapped files per a containing folder map + private readonly _folderMap = new Map(); + + constructor(protected _realFS: FileSystem) {} + + existsSync(path: string): boolean { + if (this._isMovedEntry(path)) { + // Pretend partial stub folder and its files not exist + return false; + } + + return this._realFS.existsSync(this._getOriginalPath(path)); + } + + mkdirSync(path: string, options?: MkDirOptions): void { + throw new Error('Operation is not allowed.'); + } + + chdir(path: string): void { + throw new Error('Operation is not allowed.'); + } + + readdirEntriesSync(path: string): fs.Dirent[] { + const maybeDirectory = ensureTrailingDirectorySeparator(path); + + const entries: fs.Dirent[] = []; + const movedEntries = this._folderMap.get(maybeDirectory); + if (!movedEntries || this._realFS.existsSync(path)) { + entries.push( + ...this._realFS.readdirEntriesSync(path).filter((item) => { + // Filter out the stub package directory. + return !this._isMovedEntry(combinePaths(path, item.name)); + }) + ); + } + + if (!movedEntries) { + return entries; + } + + return entries.concat(movedEntries.map((e) => new VirtualDirent(e.name, e.isFile))); + } + + readdirSync(path: string): string[] { + return this.readdirEntriesSync(path).map((p) => p.name); + } + + readFileSync(path: string, encoding?: null): Buffer; + readFileSync(path: string, encoding: BufferEncoding): string; + readFileSync(path: string, encoding?: BufferEncoding | null): string | Buffer { + return this._realFS.readFileSync(this._getOriginalPath(path), encoding); + } + + writeFileSync(path: string, data: string | Buffer, encoding: BufferEncoding | null): void { + throw new Error('Operation is not allowed.'); + } + + statSync(path: string): Stats { + return this._realFS.statSync(this._getOriginalPath(path)); + } + + unlinkSync(path: string): void { + throw new Error('Operation is not allowed.'); + } + + realpathSync(path: string): string { + if (this._entryMap.has(path)) { + return path; + } + + return this._realFS.realpathSync(path); + } + + getModulePath(): string { + return this._realFS.getModulePath(); + } + + createFileSystemWatcher(paths: string[], listener: FileWatcherEventHandler): FileWatcher { + return this._realFS.createFileSystemWatcher(paths, listener); + } + + createReadStream(path: string): fs.ReadStream { + return this._realFS.createReadStream(this._getOriginalPath(path)); + } + + createWriteStream(path: string): fs.WriteStream { + throw new Error('Operation is not allowed.'); + } + + copyFileSync(src: string, dst: string): void { + throw new Error('Operation is not allowed.'); + } + + // Async I/O + readFile(path: string): Promise { + return this._realFS.readFile(this._getOriginalPath(path)); + } + + readFileText(path: string, encoding?: BufferEncoding): Promise { + return this._realFS.readFileText(this._getOriginalPath(path), encoding); + } + + // The directory returned by tmpdir must exist and be the same each time tmpdir is called. + tmpdir(): string { + return this._realFS.tmpdir(); + } + + tmpfile(options?: TmpfileOptions): string { + return this._realFS.tmpfile(options); + } + + realCasePath(path: string): string { + return this._realFS.realCasePath(path); + } + + getUri(originalPath: string): string { + return this._realFS.getUri(originalPath); + } + + // See whether the file is mapped to another location. + isMappedFilePath(filepath: string): boolean { + return this._entryMap.has(filepath) || this._realFS.isMappedFilePath(filepath); + } + + // Get original filepath if the given filepath is mapped. + getOriginalFilePath(mappedFilePath: string) { + return this._realFS.getOriginalFilePath(this._getOriginalPath(mappedFilePath)); + } + + // Get mapped filepath if the given filepath is mapped. + getMappedFilePath(originalFilepath: string) { + const mappedFilePath = this._realFS.getMappedFilePath(originalFilepath); + return this._reverseEntryMap.get(mappedFilePath) ?? mappedFilePath; + } + + isInZipOrEgg(path: string): boolean { + return this._realFS.isInZipOrEgg(path); + } + + protected _recordMovedEntry(mappedPath: string, originalPath: string, reversible = true, isFile = true) { + this._entryMap.set(mappedPath, originalPath); + + if (reversible) { + this._reverseEntryMap.set(originalPath, mappedPath); + } + + const directory = ensureTrailingDirectorySeparator(getDirectoryPath(mappedPath)); + const folderInfo = getOrAdd(this._folderMap, directory, () => []); + + const name = getFileName(mappedPath); + if (!folderInfo.some((entry) => entry.name === name)) { + folderInfo.push({ name, isFile }); + } + } + + protected _getOriginalPath(mappedFilePath: string) { + return this._entryMap.get(mappedFilePath) ?? mappedFilePath; + } + + protected _isMovedEntry(path: string) { + return this._reverseEntryMap.has(path); + } + + protected _clear() { + this._entryMap.clear(); + this._reverseEntryMap.clear(); + this._folderMap.clear(); + } +} diff --git a/packages/pyright-internal/src/server.ts b/packages/pyright-internal/src/server.ts index 99d8ab70d586..2dd3ba096df1 100644 --- a/packages/pyright-internal/src/server.ts +++ b/packages/pyright-internal/src/server.ts @@ -29,7 +29,7 @@ import { FileBasedCancellationProvider } from './common/fileBasedCancellationUti import { FileSystem } from './common/fileSystem'; import { FullAccessHost } from './common/fullAccessHost'; import { Host } from './common/host'; -import { convertUriToPath, resolvePaths } from './common/pathUtils'; +import { resolvePaths } from './common/pathUtils'; import { ProgressReporter } from './common/progressReporter'; import { createFromRealFileSystem, WorkspaceFileWatcherProvider } from './common/realFileSystem'; import { LanguageServerBase, ServerSettings, WorkspaceServiceInstance } from './languageServerBase'; @@ -61,7 +61,7 @@ export class PyrightServer extends LanguageServerBase { rootDirectory, version, workspaceMap, - fileSystem, + fileSystem: fileSystem, fileWatcherProvider, cancellationProvider: new FileBasedCancellationProvider('bg'), maxAnalysisTimeInForeground, @@ -240,7 +240,7 @@ export class PyrightServer extends LanguageServerBase { ): Promise<(Command | CodeAction)[] | undefined | null> { this.recordUserInteractionTime(); - const filePath = convertUriToPath(this.fs, params.textDocument.uri); + const filePath = this._uriParser.decodeTextDocumentUri(params.textDocument.uri); const workspace = await this.getWorkspaceForFile(filePath); return CodeActionProvider.getCodeActionsForPosition(workspace, filePath, params.range, token); } diff --git a/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts b/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts new file mode 100644 index 000000000000..824eaa68fade --- /dev/null +++ b/packages/pyright-internal/src/tests/chainedSourceFiles.test.ts @@ -0,0 +1,183 @@ +/* + * chainedSourceFiles.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for tokenizer ipython mode + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-jsonrpc'; +import { MarkupKind } from 'vscode-languageserver-types'; + +import { Program } from '../analyzer/program'; +import { AnalyzerService } from '../analyzer/service'; +import { ConfigOptions } from '../common/configOptions'; +import { NullConsole } from '../common/console'; +import { normalizeSlashes } from '../common/pathUtils'; +import { convertOffsetsToRange, convertOffsetToPosition } from '../common/positionUtils'; +import { parseTestData } from './harness/fourslash/fourSlashParser'; +import { TestAccessHost } from './harness/testAccessHost'; +import * as host from './harness/testHost'; +import { createFromFileSystem, distlibFolder, libFolder } from './harness/vfs/factory'; +import * as vfs from './harness/vfs/filesystem'; + +test('check chained files', async () => { + const code = ` +// @filename: test1.py +//// def foo1(): pass + +// @filename: test2.py +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|foo/*marker*/|] + `; + + const basePath = normalizeSlashes('/'); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + const marker = data.markerPositions.get('marker')!; + + const parseResult = service.getParseResult(marker.fileName)!; + const result = await service.getCompletionsForPosition( + marker.fileName, + convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + basePath, + { + format: MarkupKind.Markdown, + lazyEdit: false, + snippet: false, + autoImport: false, + }, + undefined, + CancellationToken.None + ); + + assert(result?.completionList?.items.some((i) => i.label === 'foo1')); + assert(result?.completionList?.items.some((i) => i.label === 'foo2')); + assert(result?.completionList?.items.some((i) => i.label === 'foo3')); +}); + +test('modify chained files', async () => { + const code = ` +// @filename: test1.py +//// def foo1(): pass + +// @filename: test2.py +//// [|/*delete*/|] +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|foo/*marker*/|] + `; + + const basePath = normalizeSlashes('/'); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + // Make sure files are all realized. + const marker = data.markerPositions.get('marker')!; + const parseResult = service.getParseResult(marker.fileName)!; + + // Close file in the middle of the chain + service.setFileClosed(data.markerPositions.get('delete')!.fileName); + + const result = await service.getCompletionsForPosition( + marker.fileName, + convertOffsetToPosition(marker.position, parseResult.tokenizerOutput.lines), + basePath, + { + format: MarkupKind.Markdown, + lazyEdit: false, + snippet: false, + autoImport: false, + }, + undefined, + CancellationToken.None + ); + + assert(result); + + assert(!result.completionList?.items.some((i) => i.label === 'foo1')); + assert(!result.completionList?.items.some((i) => i.label === 'foo2')); + assert(result.completionList?.items.some((i) => i.label === 'foo3')); +}); + +test('modify chained files', async () => { + const code = ` +// @filename: test1.py +//// [|/*changed*/|] +//// def foo1(): pass + +// @filename: test2.py +//// def foo2(): pass + +// @filename: test3.py +//// def foo3(): pass + +// @filename: test4.py +//// [|/*marker*/foo1()|] + `; + + const basePath = normalizeSlashes('/'); + const { data, service } = createServiceWithChainedSourceFiles(basePath, code); + + const marker = data.markerPositions.get('marker')!; + const range = data.ranges.find((r) => r.marker === marker)!; + + const parseResults = service.getParseResult(marker.fileName)!; + analyze(service.test_program); + + // Initially, there should be no error. + const initialDiags = await service.getDiagnosticsForRange( + marker.fileName, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(initialDiags.length, 0); + + // Change test1 content + service.updateOpenFileContents(data.markerPositions.get('changed')!.fileName, 2, [{ text: 'def foo5(): pass' }]); + analyze(service.test_program); + + const finalDiags = await service.getDiagnosticsForRange( + marker.fileName, + convertOffsetsToRange(range.pos, range.end, parseResults.tokenizerOutput.lines), + CancellationToken.None + ); + + assert.strictEqual(finalDiags.length, 1); +}); + +function createServiceWithChainedSourceFiles(basePath: string, code: string) { + const service = new AnalyzerService( + 'test service', + createFromFileSystem(host.HOST, /*ignoreCase*/ false, { cwd: basePath }), + new NullConsole(), + () => new TestAccessHost(vfs.MODULE_PATH, [libFolder, distlibFolder]), + AnalyzerService.createImportResolver, + new ConfigOptions(basePath) + ); + + const data = parseTestData(basePath, code, ''); + + let chainedFilePath: string | undefined; + for (const file of data.files) { + service.setFileOpened(file.fileName, 1, file.content, false, chainedFilePath); + chainedFilePath = file.fileName; + } + return { data, service }; +} + +function analyze(program: Program) { + while (program.analyze()) { + // Process all queued items + } +} diff --git a/packages/pyright-internal/src/tests/checker.test.ts b/packages/pyright-internal/src/tests/checker.test.ts index 7c3194a9b09e..aa6d8d2be33d 100644 --- a/packages/pyright-internal/src/tests/checker.test.ts +++ b/packages/pyright-internal/src/tests/checker.test.ts @@ -116,7 +116,7 @@ test('Constants1', () => { test('NoReturn1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn1.py']); - TestUtils.validateResults(analysisResults, 3); + TestUtils.validateResults(analysisResults, 4); }); test('NoReturn2', () => { @@ -125,6 +125,19 @@ test('NoReturn2', () => { TestUtils.validateResults(analysisResults, 0); }); +// This test is commented out for now because we needed to revert the fix. +// test('NoReturn3', () => { +// const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn3.py']); + +// TestUtils.validateResults(analysisResults, 0); +// }); + +test('NoReturn4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['noreturn4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('With1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with1.py']); @@ -140,7 +153,7 @@ test('With2', () => { test('With3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with3.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 4); }); test('With4', () => { @@ -155,6 +168,12 @@ test('With4', () => { TestUtils.validateResults(analysisResults2, 0); }); +test('With5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['with5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Mro1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro1.py']); @@ -173,6 +192,12 @@ test('Mro3', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Mro4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['mro4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('DefaultInitializer1', () => { const configOptions = new ConfigOptions('.'); @@ -195,7 +220,7 @@ test('UnnecessaryIsInstance1', () => { // Turn on errors. configOptions.diagnosticRuleSet.reportUnnecessaryIsInstance = 'error'; analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryIsInstance1.py'], configOptions); - TestUtils.validateResults(analysisResults, 4); + TestUtils.validateResults(analysisResults, 5); }); test('UnnecessaryIsSubclass1', () => { @@ -210,7 +235,7 @@ test('UnnecessaryIsSubclass1', () => { TestUtils.validateResults(analysisResults, 2); }); -test('UnnecessaryCast', () => { +test('UnnecessaryCast1', () => { const configOptions = new ConfigOptions('.'); let analysisResults = TestUtils.typeAnalyzeSampleFiles(['unnecessaryCast1.py'], configOptions); @@ -258,6 +283,28 @@ test('TypeIgnore3', () => { TestUtils.validateResults(analysisResults, 4); }); +test('TypeIgnore4', () => { + const configOptions = new ConfigOptions('.'); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore4.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore4.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypeIgnore5', () => { + const configOptions = new ConfigOptions('.'); + + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + configOptions.diagnosticRuleSet.reportUnnecessaryTypeIgnoreComment = 'warning'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeIgnore5.py'], configOptions); + TestUtils.validateResults(analysisResults, 0, 1); +}); + test('DuplicateImports1', () => { const configOptions = new ConfigOptions('.'); @@ -297,16 +344,22 @@ test('Python2', () => { TestUtils.validateResults(analysisResults, 6); }); -test('InconsistentSpaceTab', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentSpaceTab.py']); +test('InconsistentSpaceTab1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentSpaceTab1.py']); TestUtils.validateResults(analysisResults, 4); }); +test('InconsistentSpaceTab2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentSpaceTab2.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('DuplicateDeclaration1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['duplicateDeclaration1.py']); - TestUtils.validateResults(analysisResults, 6); + TestUtils.validateResults(analysisResults, 10); }); test('DuplicateDeclaration2', () => { @@ -314,3 +367,31 @@ test('DuplicateDeclaration2', () => { TestUtils.validateResults(analysisResults, 4); }); + +test('Strings1', () => { + const configOptions = new ConfigOptions('.'); + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['strings1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportImplicitStringConcatenation = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['strings1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 2); +}); + +// For now, this functionality is disabled. + +// test('Deprecated1', () => { +// const configOptions = new ConfigOptions('.'); + +// configOptions.defaultPythonVersion = PythonVersion.V3_8; +// const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['deprecated1.py'], configOptions); +// TestUtils.validateResults(analysisResults1, 0, 0, 0, 0, 0); + +// configOptions.defaultPythonVersion = PythonVersion.V3_9; +// const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['deprecated1.py'], configOptions); +// TestUtils.validateResults(analysisResults2, 0, 0, 0, 0, 11); + +// configOptions.defaultPythonVersion = PythonVersion.V3_10; +// const analysisResults3 = TestUtils.typeAnalyzeSampleFiles(['deprecated1.py'], configOptions); +// TestUtils.validateResults(analysisResults3, 0, 0, 0, 0, 13); +// }); diff --git a/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts b/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts new file mode 100644 index 000000000000..9cda22b8b7c8 --- /dev/null +++ b/packages/pyright-internal/src/tests/documentSymbolCollector.test.ts @@ -0,0 +1,450 @@ +/* + * documentSymbolCollector.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests documentSymbolCollector + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-languageserver'; + +import { findNodeByOffset } from '../analyzer/parseTreeUtils'; +import { Program } from '../analyzer/program'; +import { createMapFromItems } from '../common/collectionUtils'; +import { ConfigOptions } from '../common/configOptions'; +import { TextRange } from '../common/textRange'; +import { DocumentSymbolCollector } from '../languageService/documentSymbolCollector'; +import { NameNode } from '../parser/parseNodes'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('folder reference', () => { + const code = ` +// @filename: common/__init__.py +//// from [|io2|] import tools as tools +//// from [|io2|].tools import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .[|io2|] import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .[|io2|].tools import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import tools, pathUtils +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from [|io2|] import tools as tools +//// from [|io2|].tools import pathUtils as pathUtils +//// +//// tools.combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('io2')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'io2', range.fileName, range.pos, ranges); + } +}); + +test('__init__ wildcard import', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import [|tools|] as [|tools|] +//// from io2.[|tools|] import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import [|tools|] as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.[|tools|] import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|tools|], pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import [|tools|] as [|tools|] +//// from io2.[|tools|] import pathUtils as pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('tools')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'tools', range.fileName, range.pos, ranges); + } +}); + +test('submodule wildcard import', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import tools as tools +//// from io2.tools import [|pathUtils|] as [|pathUtils|] + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + +// @filename: test2.py +//// from .io2 import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.tools import [|pathUtils|] as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import tools, [|pathUtils|] +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + +// @filename: test5.py +//// from io2 import tools as tools +//// from io2.tools import [|pathUtils|] as [|pathUtils|] +//// +//// tools.combine(1, 1) +//// [|pathUtils|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + + const ranges = state.getRangesByText().get('pathUtils')!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, 'pathUtils', range.fileName, range.pos, ranges); + } +}); + +test('use localName import alias', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import tools as [|/*marker1*/tools|] +//// from io2.tools import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|/*marker2*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import tools as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.tools import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|/*marker3*/tools|], pathUtils +//// +//// [|/*marker4*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import tools as [|/*marker5*/tools|] +//// from io2.tools import pathUtils as pathUtils +//// +//// [|/*marker6*/tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const references = state + .getRangesByText() + .get('tools')! + .map((r) => ({ path: r.fileName, range: state.convertPositionRange(r) })); + + state.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + }); +}); + +test('use localName import module', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import [|/*marker1*/tools|] as [|tools|] +//// from io2.[|/*marker2*/tools|] import pathUtils as pathUtils + +// @filename: io2/empty.py +//// # empty + +// @filename: io2/tools/__init__.py +//// def combine(a, b): +//// pass + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from common import * +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test2.py +//// from .io2 import [|/*marker3*/tools|] as t +//// +//// t.combine(1, 1) + +// @filename: test3.py +//// from .io2.[|/*marker4*/tools|] import pathUtils as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import [|tools|], pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + +// @filename: test5.py +//// from io2 import [|/*marker5*/tools|] as [|tools|] +//// from io2.[|/*marker6*/tools|] import pathUtils as pathUtils +//// +//// [|tools|].combine(1, 1) +//// pathUtils.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const references = state + .getRangesByText() + .get('tools')! + .map((r) => ({ path: r.fileName, range: state.convertPositionRange(r) })); + + state.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + }); +}); + +test('import dotted name', () => { + const code = ` +// @filename: nest1/__init__.py +//// # empty + +// @filename: nest1/nest2/__init__.py +//// # empty + +// @filename: nest1/nest2/module.py +//// def foo(): +//// pass + +// @filename: test1.py +//// import [|nest1|].[|nest2|].[|module|] +//// +//// [|nest1|].[|nest2|].[|module|] + +// @filename: nest1/test2.py +//// import [|nest1|].[|nest2|].[|module|] +//// +//// [|nest1|].[|nest2|].[|module|] + `; + + const state = parseAndGetTestState(code).state; + + function verify(name: string) { + const ranges = state.getRangesByText().get(name)!; + for (const range of ranges) { + verifyReferencesAtPosition(state.program, state.configOptions, name, range.fileName, range.pos, ranges); + } + } + + verify('nest1'); + verify('nest2'); + verify('module'); +}); + +test('import alias', () => { + const code = ` +// @filename: nest/__init__.py +//// # empty + +// @filename: nest/module2.py +//// # empty + +// @filename: module1.py +//// # empty + +// @filename: test1.py +//// import [|/*marker1*/module1|] as [|module1|] + +// @filename: test2.py +//// import nest.[|/*marker2*/module2|] as [|module2|] + `; + + const state = parseAndGetTestState(code).state; + + const marker1 = state.getMarkerByName('marker1'); + const ranges1 = state.getRangesByText().get('module1')!; + verifyReferencesAtPosition( + state.program, + state.configOptions, + 'module1', + marker1.fileName, + marker1.position, + ranges1 + ); + + const marker2 = state.getMarkerByName('marker2'); + const ranges2 = state.getRangesByText().get('module2')!; + verifyReferencesAtPosition( + state.program, + state.configOptions, + 'module2', + marker2.fileName, + marker2.position, + ranges2 + ); +}); + +test('string in __all__', () => { + const code = ` +// @filename: test1.py +//// class [|/*marker1*/A|]: +//// pass +//// +//// a: "[|A|]" = "A" +//// +//// __all__ = [ "[|A|]" ] + `; + + const state = parseAndGetTestState(code).state; + + const marker1 = state.getMarkerByName('marker1'); + const ranges1 = state.getRangesByText().get('A')!; + verifyReferencesAtPosition(state.program, state.configOptions, 'A', marker1.fileName, marker1.position, ranges1); +}); + +function verifyReferencesAtPosition( + program: Program, + configOption: ConfigOptions, + symbolName: string, + fileName: string, + position: number, + ranges: Range[] +) { + const sourceFile = program.getBoundSourceFile(fileName); + assert(sourceFile); + + const node = findNodeByOffset(sourceFile.getParseResults()!.parseTree, position); + const decls = DocumentSymbolCollector.getDeclarationsForNode( + node as NameNode, + program.evaluator!, + /*resolveLocalName*/ true, + CancellationToken.None, + program.test_createSourceMapper(configOption.findExecEnvironment(fileName)) + ); + + const rangesByFile = createMapFromItems(ranges, (r) => r.fileName); + for (const rangeFileName of rangesByFile.keys()) { + const collector = new DocumentSymbolCollector( + symbolName, + decls, + program.evaluator!, + CancellationToken.None, + program.getBoundSourceFile(rangeFileName)!.getParseResults()!.parseTree, + /*treatModuleInImportAndFromImportSame*/ true + ); + + const results = collector.collect(); + const rangesOnFile = rangesByFile.get(rangeFileName)!; + assert.strictEqual(results.length, rangesOnFile.length, `${rangeFileName}@${symbolName}`); + + for (const result of results) { + assert(rangesOnFile.some((r) => r.pos === result.range.start && r.end === TextRange.getEnd(result.range))); + } + } +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts new file mode 100644 index 000000000000..2408efd899ff --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.autoimport.duplicates.fourslash.ts @@ -0,0 +1,36 @@ +/// +// @indexer: true + +// @filename: test1.py +//// import math +//// import testLib +//// [|ata/*marker*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// def atan(x: float) -> float: ... +{ + const markerRange = helper.getPositionRange('marker'); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'atan', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom math import atan\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'math.atan' }, + }, + { + label: 'atan', + kind: Consts.CompletionItemKind.Function, + documentation: '```\nfrom testLib import atan\n```', + detail: 'Auto-import', + textEdit: { range: markerRange, newText: 'testLib.atan' }, + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts new file mode 100644 index 000000000000..384896f73c63 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.builtinOverride.fourslash.ts @@ -0,0 +1,22 @@ +/// + +// @filename: test.py +//// Cust[|/*marker1*/|] +//// my_v[|/*marker2*/|] + +// @filename: __builtins__.pyi +//// class CustomClass: ... +//// my_var: int = ... + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: 'CustomClass', + kind: Consts.CompletionItemKind.Class, + }, + ], + }, + marker2: { completions: [{ label: 'my_var', kind: Consts.CompletionItemKind.Variable }] }, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts new file mode 100644 index 000000000000..4fd9ba778670 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.fourslash.ts @@ -0,0 +1,247 @@ +/// + +// @filename: test.py +//// from typing import TypedDict, Optional, Union, List, Dict, Any +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// def thing(movie: Movie): +//// pass +//// +//// thing({'[|/*marker1*/|]'}) +//// thing({'name': '[|/*marker2*/|]'}) +//// thing({'name': 'Robert','[|/*marker3*/|]'}) +//// thing({'name': 'Robert', '[|/*marker4*/|]'}) +//// thing('[|/*marker5*/|]') +//// thing({'na[|/*marker6*/|]'}) +//// thing({[|/*marker7*/|]}) +//// thing({'a', '[|/*marker8*/|]'}) +//// +//// class Episode(TypedDict): +//// title: str +//// score: int +//// +//// def thing2(item: Union[Episode, Movie]): +//// pass +//// +//// thing2({'[|/*marker9*/|]'}) +//// thing2({'unknown': 'a', '[|/*marker10*/|]': ''}) +//// thing2({'title': 'Episode 01', '[|/*marker11*/|]': ''}) +//// +//// class Wrapper(TypedDict): +//// age: int +//// wrapped: Union[bool, Movie] +//// data: Dict[str, Any] +//// +//// def thing3(wrapper: Optional[Wrapper]): +//// pass +//// +//// thing3({'data': {'[|/*marker12*/|]'}}) +//// thing3({'wrapped': {'[|/*marker13*/|]'}}) +//// thing3({'age': 1, 'wrapped': {'[|/*marker14*/|]'}}) +//// thing3({'unknown': {'[|/*marker15*/|]'}}) +//// thing3({'age': {'[|/*marker16*/|]'}}) +//// thing3({'wrapped': {'name': 'ET', '[|/*marker17*/|]'}}) + +{ + const marker1Range = helper.expandPositionRange(helper.getPositionRange('marker1'), 1, 1); + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 1); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker6Range = helper.expandPositionRange(helper.getPositionRange('marker6'), 3, 1); + const marker7Range = helper.getPositionRange('marker7'); + const marker8Range = helper.expandPositionRange(helper.getPositionRange('marker8'), 1, 1); + const marker9Range = helper.expandPositionRange(helper.getPositionRange('marker9'), 1, 1); + const marker10Range = helper.expandPositionRange(helper.getPositionRange('marker10'), 1, 1); + const marker11Range = helper.expandPositionRange(helper.getPositionRange('marker11'), 1, 1); + const marker13Range = helper.expandPositionRange(helper.getPositionRange('marker13'), 1, 1); + const marker14Range = helper.expandPositionRange(helper.getPositionRange('marker14'), 1, 1); + const marker17Range = helper.expandPositionRange(helper.getPositionRange('marker17'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'age'" }, + }, + ], + }, + marker2: { + completions: [], + }, + marker3: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [], + }, + marker6: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'name'" }, + }, + ], + }, + marker8: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker8Range, newText: "'age'" }, + }, + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker8Range, newText: "'name'" }, + }, + ], + }, + marker9: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'age'" }, + }, + { + label: "'title'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'title'" }, + }, + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'score'" }, + }, + ], + }, + marker10: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'age'" }, + }, + { + label: "'title'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'title'" }, + }, + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker10Range, newText: "'score'" }, + }, + ], + }, + marker11: { + completions: [ + { + label: "'score'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'score'" }, + }, + ], + }, + marker12: { + completions: [], + }, + marker13: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker13Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker13Range, newText: "'age'" }, + }, + ], + }, + marker14: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker14Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker14Range, newText: "'age'" }, + }, + ], + }, + marker15: { + completions: [], + }, + marker16: { + completions: [], + }, + marker17: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker17Range, newText: "'age'" }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker7: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'age'" }, + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts new file mode 100644 index 000000000000..467fc739bd35 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.list.fourslash.ts @@ -0,0 +1,163 @@ +/// + +// @filename: test.py +//// from typing import TypedDict, Union, List +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// class MultipleInputs(TypedDict): +//// items: List[Movie] +//// union: Union[bool, List[Movie]] +//// unions: Union[Movie, Union[bool, List[Movie]]] +//// +//// def thing(inputs: MultipleInputs): +//// pass +//// +//// thing({'items': ['[|/*marker1*/|]']}) +//// thing({'items': {'[|/*marker2*/|]'}}) +//// thing({'items': [{'[|/*marker3*/|]'}]}) +//// thing({'union': [{'[|/*marker4*/|]'}]}) +//// thing({'unions': {'[|/*marker5*/|]'}}) +//// thing({'unions': [{'[|/*marker6*/|]'}]}) +//// +//// def thing2(movies: List[Movie]): +//// pass +//// +//// thing2([{'[|/*marker7*/|]'}]) +//// thing2({'[|/*marker8*/|]'}) +//// +//// class Wrapper(TypedDict): +//// wrapped: MultipleInputs +//// +//// def thing3(wrapper: Wrapper): +//// pass +//// +//// thing3({'wrapped': {'items': [{'[|/*marker9*/|]'}]}}) +//// thing3({'wrapped': {'items': {'[|/*marker10*/|]'}}}) +//// thing3({'wrapped': {'items': [{'a': 'b'}, {'[|/*marker11*/|]'}]}}) + +{ + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 1); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker5Range = helper.expandPositionRange(helper.getPositionRange('marker5'), 1, 1); + const marker6Range = helper.expandPositionRange(helper.getPositionRange('marker6'), 1, 1); + const marker7Range = helper.expandPositionRange(helper.getPositionRange('marker7'), 1, 1); + const marker9Range = helper.expandPositionRange(helper.getPositionRange('marker9'), 1, 1); + const marker11Range = helper.expandPositionRange(helper.getPositionRange('marker11'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [], + }, + marker2: { + completions: [], + }, + marker3: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'age'" }, + }, + ], + }, + marker6: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker6Range, newText: "'age'" }, + }, + ], + }, + marker7: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker7Range, newText: "'age'" }, + }, + ], + }, + marker8: { + completions: [], + }, + marker9: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker9Range, newText: "'age'" }, + }, + ], + }, + marker10: { + completions: [], + }, + marker11: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker11Range, newText: "'age'" }, + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts new file mode 100644 index 000000000000..0c63b92cf32e --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.call.typedDict.states.fourslash.ts @@ -0,0 +1,135 @@ +/// + +// @filename: test.py +//// from typing import TypedDict +//// +//// class Movie(TypedDict): +//// name: str +//// age: int +//// +//// def thing(movie: Movie): +//// pass +//// +//// thing(movie={'foo': 'a', '[|/*marker1*/|]'}) +//// thing(movie={'foo': 'a', 'a[|/*marker2*/|]'}) +//// thing( +//// movie={ +//// 'name': 'Parasite', +//// '[|/*marker3*/|] +//// } +//// ) +//// thing( +//// movie={ +//// 'name': 'Parasite', +//// '[|/*marker4*/|]' +//// } +//// ) +//// thing({ +//// 'name': 'Parasite', +//// # hello world +//// '[|/*marker5*/|]' +//// }) +//// thing({'foo': '[|/*marker6*/|]'}) + +{ + // completions that rely on token parsing instead of node parsing + const marker1Range = helper.expandPositionRange(helper.getPositionRange('marker1'), 1, 1); + const marker2Range = helper.expandPositionRange(helper.getPositionRange('marker2'), 2, 1); + const marker3Range = helper.expandPositionRange(helper.getPositionRange('marker3'), 1, 0); + const marker4Range = helper.expandPositionRange(helper.getPositionRange('marker4'), 1, 1); + const marker5Range = helper.expandPositionRange(helper.getPositionRange('marker5'), 1, 1); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker1Range, newText: "'age'" }, + }, + ], + }, + marker2: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: "'name'" }, + }, + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker2Range, newText: "'age'" }, + }, + ], + }, + marker6: { + completions: [], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker3: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker3Range, newText: "'age'" }, + }, + ], + }, + marker4: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker4Range, newText: "'age'" }, + }, + ], + }, + marker5: { + completions: [ + { + label: "'age'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: marker5Range, newText: "'age'" }, + }, + ], + }, + }); + + // @ts-ignore + await helper.verifyCompletion('excluded', 'markdown', { + marker3: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker4: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker5: { + completions: [ + { + label: "'name'", + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts new file mode 100644 index 000000000000..c96b8f1ed2d1 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.enums.members.fourslash.ts @@ -0,0 +1,44 @@ +/// + +// @filename: test.py +//// from enum import Enum +//// class Color(Enum): +//// RED = 1 +//// GREEN = 2 +//// BLUE = 3 +//// +//// NotAMember: int = 3 +//// +//// @property +//// def a_prop(self): +//// pass +//// +//// Color./*marker*/ + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [ + { + label: 'BLUE', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'GREEN', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'RED', + kind: Consts.CompletionItemKind.EnumMember, + }, + { + label: 'a_prop', + kind: Consts.CompletionItemKind.Property, + }, + { + label: 'NotAMember', + kind: Consts.CompletionItemKind.Variable, + }, + ], + }, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.importDunderNames.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.importDunderNames.fourslash.ts index 7eb0dbabb664..9cbcad711972 100644 --- a/packages/pyright-internal/src/tests/fourslash/completions.importDunderNames.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/completions.importDunderNames.fourslash.ts @@ -3,12 +3,21 @@ // @filename: test.py //// import _[|/*marker1*/|] //// import __pycache__[|/*marker2*/|] +//// from test2 import _[|/*marker3*/|] +//// from test2 import [|/*marker4*/|] + +// @filename: test2.py +//// def foo(): +//// pass // @ts-ignore await helper.verifyCompletion('included', 'markdown', { marker1: { completions: [{ label: '__future__', kind: Consts.CompletionItemKind.Module }], }, + marker4: { + completions: [{ label: 'foo', kind: Consts.CompletionItemKind.Function }], + }, }); // @ts-ignore @@ -16,4 +25,7 @@ await helper.verifyCompletion('exact', 'markdown', { marker2: { completions: [], }, + marker3: { + completions: [], + }, }); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.importSubmodule.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.importSubmodule.fourslash.ts index 0e1b8299b2a0..4d12b520f5c3 100644 --- a/packages/pyright-internal/src/tests/fourslash/completions.importSubmodule.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/completions.importSubmodule.fourslash.ts @@ -21,9 +21,6 @@ // @ts-ignore await helper.verifyCompletion('exact', 'markdown', { marker1: { - completions: [ - { label: 'setup', kind: Consts.CompletionItemKind.Module }, - { label: 'submodule1', kind: Consts.CompletionItemKind.Module }, - ], + completions: [{ label: 'submodule1', kind: Consts.CompletionItemKind.Module }], }, }); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.importsDuplicates.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.importsDuplicates.fourslash.ts new file mode 100644 index 000000000000..8191766ff482 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.importsDuplicates.fourslash.ts @@ -0,0 +1,23 @@ +// @filename: test_no_duplicate_tseries_completions.py +//// from testLib import [|t/*marker*/|] + +// @filename: testLib/__init__.pyi +// @library: true +//// import tseries +//// __all__ = ['tseries'] + +// @filename: testLib/tseries/__init__.pyi +// @library: true +// + +// @ts-ignore +await helper.verifyCompletion('exact', 'markdown', { + marker: { + completions: [ + { + label: 'tseries', + kind: Consts.CompletionItemKind.Module, + }, + ], + }, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts new file mode 100644 index 000000000000..ae59ea9bbdae --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.inList.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: testList.py +//// a = 42 +//// x = [ +//// a.[|/*marker1*/|] +//// ] + +// @filename: testListWithCall.py +//// b = 42 +//// y = [ +//// print(b.[|/*marker2*/|]) +//// ] + +// @filename: testListWithCallMissingClosedParens.py +//// b = 42 +//// y = [ +//// print(b.[|/*marker3*/|] +//// ] +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + marker2: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + marker3: { + completions: [{ label: 'numerator', kind: Consts.CompletionItemKind.Property }], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts new file mode 100644 index 000000000000..fd30981f722e --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.indexer.keys.getitem.fourslash.ts @@ -0,0 +1,57 @@ +/// + +// @filename: getitem.py +//// from typing import Literal +//// class Foo: +//// def __getitem__(self, key: Literal['a', 'b']): +//// pass + +// @filename: test1.py +//// from getitem import Foo +//// f = Foo() +//// f[[|/*marker1*/|]] + +// @filename: test2.py +//// from getitem import Foo +//// f = Foo() +//// f[[|"/*marker2*/"|]] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { + label: "'a'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'a'" }, + detail: 'Dictionary key', + }, + { + label: "'b'", + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker1'), newText: "'b'" }, + detail: 'Dictionary key', + }, + ], + }, + marker2: { + completions: [ + { + label: '"a"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"a"' }, + detail: 'Dictionary key', + }, + { + label: '"b"', + kind: Consts.CompletionItemKind.Constant, + textEdit: { range: helper.getPositionRange('marker2'), newText: '"b"' }, + detail: 'Dictionary key', + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts new file mode 100644 index 000000000000..05419c0d6336 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.literals.fourslash.ts @@ -0,0 +1,33 @@ +/// + +// @filename: test1.py +//// from typing import Literal +//// a: Literal["Hello"] = "He[|/*marker1*/|] + +// @filename: test2.py +//// from typing import Literal +//// a: Literal["Hello"] = [|/*marker2*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: '"Hello"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + marker2: { + completions: [ + { + label: '"Hello"', + kind: Consts.CompletionItemKind.Constant, + }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts index be0e53f741e0..7472435d917a 100644 --- a/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/completions.override.default.fourslash.ts @@ -23,7 +23,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a=a, b=b)", + newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a, b)", }, }, { @@ -31,7 +31,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method2(self, a=None):\n return super().method2(a=a)', + newText: 'method2(self, a=None):\n return super().method2(a)', }, }, { @@ -39,7 +39,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method3(self, a=1234, b=object()):\n return super().method3(a=a, b=b)', + newText: 'method3(self, a=1234, b=object()):\n return super().method3(a, b)', }, }, ], diff --git a/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts index 86e9e4520c55..dfa7b30e07ce 100644 --- a/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/completions.override.default.imported.fourslash.ts @@ -29,7 +29,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a=a, b=b)", + newText: "method1(self, a: str = 'hello', b: int = 1234):\n return super().method1(a, b)", }, }, { @@ -37,7 +37,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method2(self, a=None):\n return super().method2(a=a)', + newText: 'method2(self, a=None):\n return super().method2(a)', }, }, { @@ -45,7 +45,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method3(self, a=1234, b=...):\n return super().method3(a=a, b=b)', + newText: 'method3(self, a=1234, b=...):\n return super().method3(a, b)', }, }, { @@ -53,7 +53,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method4(self, a=+1234, b=-1.23j, c=1 + 2j):\n return super().method4(a=a, b=b, c=c)', + newText: 'method4(self, a=+1234, b=-1.23j, c=1 + 2j):\n return super().method4(a, b, c)', }, }, ], diff --git a/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts index 904c59073367..06feaab682d2 100644 --- a/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/completions.override.fourslash.ts @@ -39,7 +39,7 @@ await helper.verifyCompletion('included', 'markdown', { kind: Consts.CompletionItemKind.Method, textEdit: { range: helper.getPositionRange('marker'), - newText: 'method3(self, b, *, c: str):\n return super().method3(b, c)', + newText: 'method3(self, b, *, c: str):\n return super().method3(b, c=c)', }, }, ], diff --git a/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts new file mode 100644 index 000000000000..ef9cc90ab650 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.parentFolder.fourslash.ts @@ -0,0 +1,17 @@ +/// + +// @filename: python/test.py +//// from d/*marker*/ + +// @filename: python/data_processing/__init__.py +//// #empty + +// @filename: python/data_processing/create_fullname.py +//// #empty + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { + completions: [{ label: 'data_processing', kind: Consts.CompletionItemKind.Module }], + }, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts new file mode 100644 index 000000000000..0d7bd9f5af43 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.parentFolders.fourslash.ts @@ -0,0 +1,77 @@ +/// + +// @filename: module.py +//// # empty + +// @filename: nested1/__init__.py +//// # empty + +// @filename: nested1/module.py +//// # empty + +// @filename: nested1/nested2/__init__.py +//// # empty + +// @filename: nested1/nested2/test1.py +//// from .[|/*marker1*/|] + +// @filename: nested1/nested2/test2.py +//// from ..[|/*marker2*/|] + +// @filename: nested1/nested2/test3.py +//// from ..nested2.[|/*marker3*/|] + +// @filename: nested1/nested2/test4.py +//// from ...nested1.[|/*marker4*/|] + +// @filename: nested1/nested2/test5.py +//// from ...nested1.nested2.[|/*marker5*/|] + +{ + helper.openFiles(helper.getMarkers().map((m) => m.fileName)); + + // @ts-ignore + await helper.verifyCompletion('exact', 'markdown', { + marker1: { + completions: [ + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker2: { + completions: [ + { label: 'import', kind: Consts.CompletionItemKind.Keyword }, + { label: 'nested2', kind: Consts.CompletionItemKind.Module }, + { label: 'module', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker3: { + completions: [ + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker4: { + completions: [ + { label: 'nested2', kind: Consts.CompletionItemKind.Module }, + { label: 'module', kind: Consts.CompletionItemKind.Module }, + ], + }, + marker5: { + completions: [ + { label: 'test1', kind: Consts.CompletionItemKind.Module }, + { label: 'test2', kind: Consts.CompletionItemKind.Module }, + { label: 'test3', kind: Consts.CompletionItemKind.Module }, + { label: 'test4', kind: Consts.CompletionItemKind.Module }, + { label: 'test5', kind: Consts.CompletionItemKind.Module }, + ], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts new file mode 100644 index 000000000000..25ef5afb8ed9 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/completions.typeshed.fourslash.ts @@ -0,0 +1,9 @@ +/// + +// @filename: test.py +//// from r/*marker*/ + +// @ts-ignore +await helper.verifyCompletion('included', 'markdown', { + marker: { completions: [{ label: 'requests', kind: Consts.CompletionItemKind.Module }] }, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts new file mode 100644 index 000000000000..9064b8e021b4 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.builtinClass.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// [|/*marker1*/a|] = 1 + +// @filename: typeshed-fallback/stdlib/builtins.pyi +//// class [|int|]: +//// @overload +//// def __new__(cls: Type[_T], x: str | bytes | SupportsInt | SupportsIndex | _SupportsTrunc = ...) -> _T: ... +//// @overload +//// def __new__(cls: Type[_T], x: str | bytes | bytearray, base: SupportsIndex) -> _T: ... + +{ + const rangeMap = helper.getRangesByText(); + + helper.verifyFindTypeDefinitions({ + marker1: { + definitions: rangeMap + .get('int')! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts new file mode 100644 index 000000000000..88c7a52f18d6 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.classes.fourslash.ts @@ -0,0 +1,72 @@ +/// + +// @filename: testLib1/__init__.pyi +// @library: true +//// class [|C|]: ... +//// +//// class [|C2|]: ... +//// +//// class [|C3|]: ... +//// +//// class [|C4|]: ... +//// +//// class [|C5|]: ... + +// @filename: testLib1/__init__.py +// @library: true +//// from .M import C2 +//// from . import D +//// +//// class [|C|]: +//// pass +//// +//// [|C3|] = D.C3 +//// [|C4|] = D.N.C4 +//// +//// class [|C5|]: +//// def __init__(self, a, b): +//// pass + +// @filename: testLib1/M.py +// @library: true +//// class [|C2|]: +//// pass + +// @filename: testLib1/D.py +// @library: true +//// class [|C3|]: +//// pass +//// +//// class N: +//// class [|C4|]: +//// pass + +// @filename: test.py +//// import testLib1 +//// +//// [|/*marker1*/a|] = testLib1.C() +//// [|/*marker2*/a|] = testLib1.C2() +//// [|/*marker3*/a|] = testLib1.C3() +//// [|/*marker4*/a|] = testLib1.C4() +//// [|/*marker5*/a|] = testLib1.C5(1, 2) + +{ + const rangeMap = helper.getRangesByText(); + + var _getRanges = function (rangeName: string): _.DocumentRange[] { + return rangeMap + .get(rangeName)! + .filter((r) => !r.marker) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + }; + + helper.verifyFindTypeDefinitions({ + marker1: { definitions: _getRanges('C') }, + marker2: { definitions: _getRanges('C2') }, + marker3: { definitions: _getRanges('C3') }, + marker4: { definitions: _getRanges('C4') }, + marker5: { definitions: _getRanges('C5') }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts new file mode 100644 index 000000000000..4f0658773012 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findTypeDefinitions.unions.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// from typing import Union +//// +//// class [|C1|]: +//// pass +//// +//// class N: +//// class [|C2|]: +//// pass +//// +//// def foo([|/*marker1*/a|]: Union[C1, N.C2]): +//// pass + +{ + helper.verifyFindTypeDefinitions({ + marker1: { + definitions: helper + .getFilteredRanges<{ target?: string }>((m, d, t) => t === 'C1' || t === 'C2') + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts new file mode 100644 index 000000000000..2932b8cd003a --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findallreferences.module.nested.fourslash.ts @@ -0,0 +1,67 @@ +/// + +// @filename: nested/__init__.py +//// from .[|/*module1*/module1|] import module1Func as module1Func + +// @filename: nested/module1.py +//// def module1Func(): +//// pass + +// @filename: test1.py +//// import [|/*nest1*/nested|].[|/*module2*/module1|] +//// import [|/*nest2*/nested|].[|/*module3*/module1|] as m +//// +//// [|/*nest3*/nested|].[|/*module4*/module1|].module1Func() + +// @filename: test2.py +//// from [|/*nest4*/nested|].[|/*module5*/module1|] import module1Func +//// from .[|/*nest5*/nested|].[|/*module6*/module1|] import module1Func as f + +// @filename: test3.py +//// from .[|/*nest6*/nested|] import [|/*module7*/module1|] +//// from .[|/*nest7*/nested|] import [|/*module8*/module1|] as m + +// @filename: code/test4.py +//// from ..[|/*nest8*/nested|] import [|/*module9*/module1|] +//// from ..[|/*nest9*/nested|] import [|/*module10*/module1|] as m +//// from ..[|/*nest10*/nested|].[|/*module11*/module1|] import module1Func + +{ + const nestedReferences = helper + .getRangesByText() + .get('nested')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const moduleReferences = helper + .getRangesByText() + .get('module1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + nest1: { references: nestedReferences }, + nest2: { references: nestedReferences }, + nest3: { references: nestedReferences }, + nest4: { references: nestedReferences }, + nest5: { references: nestedReferences }, + nest6: { references: nestedReferences }, + nest7: { references: nestedReferences }, + nest8: { references: nestedReferences }, + nest9: { references: nestedReferences }, + nest10: { references: nestedReferences }, + module1: { references: moduleReferences }, + module2: { references: moduleReferences }, + module3: { references: moduleReferences }, + module4: { references: moduleReferences }, + module5: { references: moduleReferences }, + module6: { references: moduleReferences }, + module7: { references: moduleReferences }, + module8: { references: moduleReferences }, + module9: { references: moduleReferences }, + module10: { references: moduleReferences }, + module11: { references: moduleReferences }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts new file mode 100644 index 000000000000..5cbe5dfff6e9 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.duplicated.fourslash.ts @@ -0,0 +1,59 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: nest/__init__.py +//// # empty + +// @filename: nest/module1.py +//// def nestModule1Func(): +//// pass + +// @filename: test1.py +//// from [|/*marker1*/nest|] import [|/*marker2*/module1|] +//// +//// from [|/*marker3*/nest|].[|/*marker4*/module1|] import module1Func +//// +//// import [|/*marker5*/nest|].[|/*marker6*/module1|] +//// import [|/*marker7*/module1|] +//// +//// [|/*marker8*/nest|].[|/*marker9*/module1|] + +{ + const nestReferences = helper + .getRangesByText() + .get('nest')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const marker7 = helper.getMarkerByName('marker7'); + const module1References = helper + .getRangesByText() + .get('module1')! + .filter((r) => r.marker !== marker7) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + marker1: { references: nestReferences }, + marker2: { references: module1References }, + marker3: { references: nestReferences }, + marker4: { references: module1References }, + marker5: { references: nestReferences }, + marker6: { references: module1References }, + marker7: { + references: helper + .getRanges() + .filter((r) => r.marker === marker7) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + marker8: { references: nestReferences }, + marker9: { references: module1References }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts new file mode 100644 index 000000000000..348b493d787b --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.fourslash.ts @@ -0,0 +1,46 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: test1.py +//// import [|/*marker1*/module1|] +//// import [|/*marker2*/module1|] as m +//// +//// [|/*marker3*/module1|].module1Func() + +// @filename: test2.py +//// from [|/*marker4*/module1|] import module1Func +//// from .[|/*marker5*/module1|] import module1Func as f + +// @filename: test3.py +//// from . import [|/*marker6*/module1|] +//// from . import [|/*marker7*/module1|] as m + +// @filename: nested/test4.py +//// from .. import [|/*marker8*/module1|] +//// from .. import [|/*marker9*/module1|] as m +//// from ..[|/*marker10*/module1|] import module1Func + +{ + const references = helper + .getRangesByText() + .get('module1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + marker1: { references }, + marker2: { references }, + marker3: { references }, + marker4: { references }, + marker5: { references }, + marker6: { references }, + marker7: { references }, + marker8: { references }, + marker9: { references }, + marker10: { references }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts new file mode 100644 index 000000000000..550871c418d5 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/findallreferences.modules.shadow.fourslash.ts @@ -0,0 +1,95 @@ +/// + +// @filename: module1.py +//// def module1Func(): +//// pass + +// @filename: nest1/__init__.py +//// # empty + +// @filename: nest1/module1.py +//// def nest1Module1Func(): +//// pass + +// @filename: nest1/nest2/__init__.py +//// # empty + +// @filename: nest1/nest2/module1.py +//// def nest2Module1Func(): +//// pass + +// @filename: test1.py +//// from [|/*nest1_1*/nest1|] import [|{| "name":"nest1_module1", "target":"nest1" |}module1|] +//// from [|/*nest1_2*/nest1|].[|/*nest2_1*/nest2|] import [|{| "name":"nest2_module1", "target":"nest2" |}module1|] +//// +//// import [|/*nest1_3*/nest1|] +//// import [|/*nest1_4*/nest1|].[|/*nest2_2*/nest2|] +//// import [|/*nest1_5*/nest1|].[|/*nest2_3*/nest2|].[|{| "name":"nest2_module2", "target":"nest2" |}module1|] +//// +//// from [|/*nest1_6*/nest1|] import [|/*nest2_4*/nest2|] +//// +//// [|{| "name":"module4" |}module1|] +//// [|/*nest1_7*/nest1|] +//// [|/*nest1_8*/nest1|].[|/*nest2_5*/nest2|] +//// [|/*nest1_9*/nest1|].[|{| "name":"module5", "target":"none" |}module1|] + +{ + const nest1References = helper + .getRangesByText() + .get('nest1')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const nest2References = helper + .getRangesByText() + .get('nest2')! + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + const nest2ModuleReferences = helper + .getFilteredRanges<{ target?: string }>( + (m, d, t) => t === 'module1' && !!d && (!d.target || d.target === 'nest2') + ) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }); + + helper.verifyFindAllReferences({ + nest1_1: { references: nest1References }, + nest1_2: { references: nest1References }, + nest1_3: { references: nest1References }, + nest1_4: { references: nest1References }, + nest1_5: { references: nest1References }, + nest1_6: { references: nest1References }, + nest1_8: { references: nest1References }, + nest1_9: { references: nest1References }, + nest2_1: { references: nest2References }, + nest2_2: { references: nest2References }, + nest2_3: { references: nest2References }, + nest2_4: { references: nest2References }, + nest2_5: { references: nest2References }, + nest2_module1: { references: nest2ModuleReferences }, + nest2_module2: { references: nest2ModuleReferences }, + nest1_module1: { + references: helper + .getFilteredRanges<{ target?: string }>( + (m, d, t) => t === 'module1' && !!d && (!d.target || d.target === 'nest1') + ) + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + module4: { + references: helper + .getFilteredRanges<{ target?: string }>((m, d, t) => t === 'module1' && !!d && d.target !== 'none') + .map((r) => { + return { path: r.fileName, range: helper.convertPositionRange(r) }; + }), + }, + module5: { + references: [], + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/fourslash.ts b/packages/pyright-internal/src/tests/fourslash/fourslash.ts index c377128afc0f..1a71e66ba395 100644 --- a/packages/pyright-internal/src/tests/fourslash/fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/fourslash.ts @@ -138,6 +138,73 @@ declare namespace _ { importName: string; } + interface TextDocumentIdentifier { + uri: string; + } + + interface OptionalVersionedTextDocumentIdentifier extends TextDocumentIdentifier { + version: number | null; + } + + interface AnnotatedTextEdit extends TextEdit { + annotationId: string; + } + + interface TextDocumentEdit { + textDocument: OptionalVersionedTextDocumentIdentifier; + edits: (TextEdit | AnnotatedTextEdit)[]; + } + + interface FileOptions { + overwrite?: boolean; + ignoreIfExists?: boolean; + } + + interface ResourceOperation { + kind: string; + annotationId?: string; + } + + interface CreateFile extends ResourceOperation { + kind: 'create'; + uri: string; + options?: FileOptions; + } + + interface RenameFile extends ResourceOperation { + kind: 'rename'; + oldUri: string; + newUri: string; + options?: FileOptions; + } + + interface DeleteFileOptions { + recursive?: boolean; + ignoreIfNotExists?: boolean; + } + + interface DeleteFile extends ResourceOperation { + kind: 'delete'; + uri: string; + options?: DeleteFileOptions; + } + + interface ChangeAnnotation { + label: string; + needsConfirmation?: boolean; + description?: string; + } + + interface WorkspaceEdit { + changes?: { + [uri: string]: TextEdit[]; + }; + documentChanges?: (TextDocumentEdit | CreateFile | RenameFile | DeleteFile)[]; + changeAnnotations?: { + [id: string]: ChangeAnnotation; + }; + } + type MarkupKind = 'markdown' | 'plaintext'; type DefinitionFilter = 'all' | 'preferSource' | 'preferStubs'; @@ -154,9 +221,14 @@ declare namespace _ { getRanges(): Range[]; getRangesInFile(fileName: string): Range[]; getRangesByText(): Map; - + getFilteredRanges( + predicate: (m: Marker | undefined, d: T | undefined, text: string) => boolean + ): Range[]; getPositionRange(markerString: string): PositionRange; + expandPositionRange(range: PositionRange, start: number, end: number): PositionRange; convertPositionRange(range: Range): PositionRange; + convertPathToUri(path: string): string; + getDirectoryPath(path: string): string; goToBOF(): void; goToEOF(): void; @@ -239,6 +311,11 @@ declare namespace _ { }, filter?: DefinitionFilter ): void; + verifyFindTypeDefinitions(map: { + [marker: string]: { + definitions: DocumentRange[]; + }; + }): void; verifyRename(map: { [marker: string]: { newName: string; diff --git a/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts new file mode 100644 index 000000000000..b8ff9b65e0e4 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/hover.docstring.overloads.fourslash.ts @@ -0,0 +1,41 @@ +/// + +// @filename: test.py +//// import mylib +//// +//// mylib.[|/*marker1*/dontwork|] +//// mylib.[|/*marker2*/works|] + +// @filename: mylib/__init__.pyi +//// from typing import overload +//// +//// class RandomState: +//// @overload +//// def dontwork(self, x:int) -> None: ... +//// @overload +//// def dontwork(self, x:float) -> None: ... +//// def works(self) -> None: ... +//// +//// _rand = RandomState +//// +//// dontwork = _rand.dontwork +//// works = _rand.works + +// @filename: mylib/__init__.py +//// from typing import Union, overload +//// +//// class RandomState: +//// @overload +//// def dontwork(self, x:int) -> None: ... +//// def dontwork(self, x:Union[int, float]) -> None: +//// 'dontwork docstring' +//// ... +//// def works(self) -> None: +//// 'works docstring' +//// ... + +helper.verifyHover('markdown', { + marker1: + '```python\n(variable) dontwork: Overload[(self: _rand, x: int) -> None, (self: _rand, x: float) -> None]\n```\n---\ndontwork docstring', + marker2: '```python\n(variable) works: (self: _rand) -> None\n```\n---\nworks docstring', +}); diff --git a/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts new file mode 100644 index 000000000000..a82cc021e47a --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/hover.optionalAliasParameter.fourslash.ts @@ -0,0 +1,14 @@ +/// + +// @filename: test.py +//// from typing import Literal, Union +//// +//// A = Union[int, str, None] +//// +//// def func([|/*marker1*/param|]: A = None) -> None: +//// print([|/*marker2*/param|]) + +helper.verifyHover('markdown', { + marker1: '```python\n(parameter) param: A\n```', + marker2: '```python\n(parameter) param: A\n```', +}); diff --git a/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts index 419d98ff3c77..9769231420fb 100644 --- a/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/import.pytyped.typeCheckingBasic.fourslash.ts @@ -11,16 +11,28 @@ // @filename: testLib/__init__.py // @library: true -//// # This method is missing a return annotation -//// def foo(): -//// return +//// class Foo: +//// def method1(self): +//// '''Method docs''' +//// return None +//// +//// # This method has no annotation +//// def foo(a): +//// return Foo() // @filename: .src/test.py -//// # pyright: strict //// from testLib import foo -//// [|/*marker*/a|] = foo() +//// foo(1).me[|/*marker1*/|] // @ts-ignore -await helper.verifyDiagnostics({ - marker: { category: 'error', message: `Type of "a" is unknown` }, +await helper.verifyCompletion('included', 'markdown', { + marker1: { + completions: [ + { + label: 'method1', + kind: Consts.CompletionItemKind.Method, + documentation: '```python\ntestLib.Foo.method1()\n```\n---\nMethod docs', + }, + ], + }, }); diff --git a/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts new file mode 100644 index 000000000000..1c418ed3b6f9 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.multipart.fourslash.ts @@ -0,0 +1,48 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib/aa/bb/__init__.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: testLib/aa/bb/cc.py +// @library: true +//// MyAlias = int + +// @filename: test.py +//// import [|/*marker*/testLib.aa.b|]b + +const filename2 = helper.getMarkerByName('marker').fileName; +const command2 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib.aa.bb', filename2], +}; + +// @ts-ignore +await helper.verifyCommand(command2, { + ['/typings/testLib/aa/bb/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, + ['/typings/testLib/aa/bb/cc.pyi']: `""" +This type stub file was generated by pyright. +""" + +MyAlias = int +`, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts new file mode 100644 index 000000000000..5a5ebfe29bba --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlefile.fourslash.ts @@ -0,0 +1,38 @@ +/// + +// @filename: pyrightconfig.json +//// { +//// "reportMissingTypeStubs": "warning" +//// } + +// @filename: testLib.py +// @library: true +//// # This is a library file +//// class MyLibrary: +//// def DoEveryThing(self, code: str): +//// ... + +// @filename: test.py +//// import [|/*marker*/testLi|]b + +const filename3 = helper.getMarkerByName('marker').fileName; +const command3 = { + title: 'Create Type Stub', + command: Consts.Commands.createTypeStub, + arguments: ['/', 'testLib', filename3], +}; + +// @ts-ignore +await helper.verifyCommand(command3, { + ['/typings/testLib/__init__.pyi']: `""" +This type stub file was generated by pyright. +""" + +class MyLibrary: + def DoEveryThing(self, code: str): # -> None: + ... + + + +`, +}); diff --git a/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts similarity index 79% rename from packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.fourslash.ts rename to packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts index cea9df03231c..5c9c3eea28c6 100644 --- a/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/missingTypeStub.command.singlepart.fourslash.ts @@ -15,15 +15,15 @@ // @filename: test.py //// import [|/*marker*/testLi|]b -const filename = helper.getMarkerByName('marker').fileName; -const command = { +const filename1 = helper.getMarkerByName('marker').fileName; +const command1 = { title: 'Create Type Stub', command: Consts.Commands.createTypeStub, - arguments: ['/', 'testLib', filename], + arguments: ['/', 'testLib', filename1], }; // @ts-ignore -await helper.verifyCommand(command, { +await helper.verifyCommand(command1, { ['/typings/testLib/__init__.pyi']: `""" This type stub file was generated by pyright. """ diff --git a/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts new file mode 100644 index 000000000000..4c4d083d4be6 --- /dev/null +++ b/packages/pyright-internal/src/tests/fourslash/rename.string.fourslash.ts @@ -0,0 +1,26 @@ +/// + +// @filename: test.py +//// class [|/*marker*/A|]: +//// pass +//// +//// __all__ = ["[|A|]"] + +// @filename: test2.py +//// from test import [|A|] +//// +//// a: "[|A|]" = [|A|]() + +{ + helper.verifyRename({ + marker: { + newName: 'RenamedA', + changes: helper + .getRangesByText() + .get('A')! + .map((r) => { + return { filePath: r.fileName, range: helper.convertPositionRange(r), replacementText: 'RenamedA' }; + }), + }, + }); +} diff --git a/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts b/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts index 31102e7e642a..7d9fae9ef952 100644 --- a/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts +++ b/packages/pyright-internal/src/tests/fourslash/signature.simple.fourslash.ts @@ -12,7 +12,18 @@ //// [|/*s7*/|] ) //// //// x = 1234[|/*sNoCall*/|] - +//// +//// 'string([|/*sNoCallInString*/|]'.capitalize() +//// +//// f'format string([|/*sNoCallInFormatString*/|]'.capitalize() +//// +//// f'format string {int.as_integer_ratio([|/*s8*/|])} '.capitalize() +//// +//// def foo(f:str): ... +//// +//// def bar(b:str): ... +//// +//// bar([|/*nestedString1*/|]foo([|/*nestedString2*/|])) { const simpleSignatures = [ { @@ -56,5 +67,38 @@ sNoCall: { noSig: true, }, + sNoCallInString: { + noSig: true, + }, + sNoCallInFormatString: { + noSig: true, + }, + s8: { + signatures: [ + { + label: 'builtins.int.as_integer_ratio()', + parameters: [], + }, + ], + activeParameters: [undefined], + }, + nestedString1: { + signatures: [ + { + label: 'simple.bar(b)', + parameters: ['b'], + }, + ], + activeParameters: [0], + }, + nestedString2: { + signatures: [ + { + label: 'simple.foo(f)', + parameters: ['f'], + }, + ], + activeParameters: [0], + }, }); } diff --git a/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts b/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts index 1ba3dbaabf85..e27e21b01e50 100644 --- a/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts +++ b/packages/pyright-internal/src/tests/harness/fourslash/fourSlashTypes.ts @@ -21,6 +21,7 @@ export const enum MetadataOptionNames { fileName = 'filename', library = 'library', distLibrary = 'distlibrary', + ipythonMode = 'ipythonmode', } /** List of allowed file metadata names */ @@ -28,6 +29,7 @@ export const fileMetadataNames = [ MetadataOptionNames.fileName, MetadataOptionNames.library, MetadataOptionNames.distLibrary, + MetadataOptionNames.ipythonMode, ]; /** all the necessary information to set the right compiler settings */ @@ -104,16 +106,16 @@ export class TestCancellationToken implements HostCancellationToken { // 0 - cancelled // >0 - not cancelled // <0 - not cancelled and value denotes number of isCancellationRequested after which token become cancelled - private static readonly notCanceled = -1; - private numberOfCallsBeforeCancellation = TestCancellationToken.notCanceled; + private static readonly _notCanceled = -1; + private _numberOfCallsBeforeCancellation = TestCancellationToken._notCanceled; isCancellationRequested(): boolean { - if (this.numberOfCallsBeforeCancellation < 0) { + if (this._numberOfCallsBeforeCancellation < 0) { return false; } - if (this.numberOfCallsBeforeCancellation > 0) { - this.numberOfCallsBeforeCancellation--; + if (this._numberOfCallsBeforeCancellation > 0) { + this._numberOfCallsBeforeCancellation--; return false; } @@ -122,10 +124,10 @@ export class TestCancellationToken implements HostCancellationToken { setCancelled(numberOfCalls = 0): void { debug.assert(numberOfCalls >= 0); - this.numberOfCallsBeforeCancellation = numberOfCalls; + this._numberOfCallsBeforeCancellation = numberOfCalls; } resetCancelled(): void { - this.numberOfCallsBeforeCancellation = TestCancellationToken.notCanceled; + this._numberOfCallsBeforeCancellation = TestCancellationToken._notCanceled; } } diff --git a/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts b/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts index 6d5359e65aca..3956989ea041 100644 --- a/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts +++ b/packages/pyright-internal/src/tests/harness/fourslash/testLanguageService.ts @@ -13,17 +13,22 @@ import { ImportResolverFactory } from '../../../analyzer/importResolver'; import { AnalyzerService } from '../../../analyzer/service'; import { BackgroundAnalysisBase } from '../../../backgroundAnalysisBase'; import { CommandController } from '../../../commands/commandController'; +import { ConfigOptions } from '../../../common/configOptions'; import { ConsoleInterface } from '../../../common/console'; import * as debug from '../../../common/debug'; +import { createDeferred } from '../../../common/deferred'; import { FileSystem } from '../../../common/fileSystem'; import { Range } from '../../../common/textRange'; +import { UriParser } from '../../../common/uriParser'; import { LanguageServerInterface, + MessageAction, ServerSettings, WindowInterface, WorkspaceServiceInstance, } from '../../../languageServerBase'; import { CodeActionProvider } from '../../../languageService/codeActionProvider'; +import { TestAccessHost } from '../testAccessHost'; import { HostSpecificFeatures } from './testState'; export class TestFeatures implements HostSpecificFeatures { @@ -47,17 +52,42 @@ export class TestFeatures implements HostSpecificFeatures { export class TestLanguageService implements LanguageServerInterface { private readonly _workspace: WorkspaceServiceInstance; + private readonly _defaultWorkspace: WorkspaceServiceInstance; + private readonly _uriParser: UriParser; constructor(workspace: WorkspaceServiceInstance, readonly console: ConsoleInterface, readonly fs: FileSystem) { this._workspace = workspace; + this._uriParser = new UriParser(this.fs); + this._defaultWorkspace = { + workspaceName: '', + rootPath: '', + rootUri: '', + serviceInstance: new AnalyzerService( + 'test service', + this.fs, + this.console, + () => new TestAccessHost(), + AnalyzerService.createImportResolver, + new ConfigOptions('.') + ), + disableLanguageServices: false, + disableOrganizeImports: false, + isInitialized: createDeferred(), + }; + } + decodeTextDocumentUri(uriString: string): string { + return this._uriParser.decodeTextDocumentUri(uriString); } - async getWorkspaceForFile(filePath: string): Promise { - debug.assertDefined(this._workspace.serviceInstance.test_program.getSourceFile(filePath)); - return this._workspace; + getWorkspaceForFile(filePath: string): Promise { + if (filePath.startsWith(this._workspace.rootPath)) { + return Promise.resolve(this._workspace); + } + + return Promise.resolve(this._defaultWorkspace); } - async getSettings(workspace: WorkspaceServiceInstance): Promise { + getSettings(workspace: WorkspaceServiceInstance): Promise { const settings: ServerSettings = { venvPath: this._workspace.serviceInstance.getConfigOptions().venvPath, pythonPath: this._workspace.serviceInstance.getConfigOptions().pythonPath, @@ -68,7 +98,7 @@ export class TestLanguageService implements LanguageServerInterface { autoImportCompletions: this._workspace.serviceInstance.getConfigOptions().autoImportCompletions, }; - return settings; + return Promise.resolve(settings); } createBackgroundAnalysis(): BackgroundAnalysisBase | undefined { @@ -87,18 +117,25 @@ export class TestLanguageService implements LanguageServerInterface { readonly rootPath = path.sep; readonly window = new TestWindow(); + readonly supportAdvancedEdits = true; } class TestWindow implements WindowInterface { - showErrorMessage(message: string): void { + showErrorMessage(message: string): void; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise; + showErrorMessage(message: string, ...actions: MessageAction[]): Promise | void { debug.fail("shouldn't be called"); } - showWarningMessage(message: string): void { + showWarningMessage(message: string): void; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise; + showWarningMessage(message: string, ...actions: MessageAction[]): Promise | void { debug.fail("shouldn't be called"); } - showInformationMessage(message: string): void { + showInformationMessage(message: string): void; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise; + showInformationMessage(message: string, ...actions: MessageAction[]): Promise | void { // Don't do anything } } diff --git a/packages/pyright-internal/src/tests/harness/fourslash/testState.ts b/packages/pyright-internal/src/tests/harness/fourslash/testState.ts index 02e78e9630a0..92a14d6971ca 100644 --- a/packages/pyright-internal/src/tests/harness/fourslash/testState.ts +++ b/packages/pyright-internal/src/tests/harness/fourslash/testState.ts @@ -7,25 +7,33 @@ * the test states. */ -import * as assert from 'assert'; +import assert from 'assert'; import * as JSONC from 'jsonc-parser'; import Char from 'typescript-char'; import { + AnnotatedTextEdit, CancellationToken, + ChangeAnnotation, CodeAction, Command, CompletionItem, + CreateFile, + DeleteFile, Diagnostic, DocumentHighlight, DocumentHighlightKind, ExecuteCommandParams, MarkupContent, MarkupKind, + OptionalVersionedTextDocumentIdentifier, + RenameFile, + TextDocumentEdit, TextEdit, WorkspaceEdit, } from 'vscode-languageserver'; import { ImportResolver, ImportResolverFactory } from '../../../analyzer/importResolver'; +import { findNodeByOffset } from '../../../analyzer/parseTreeUtils'; import { Program } from '../../../analyzer/program'; import { AnalyzerService, configFileNames } from '../../../analyzer/service'; import { ConfigOptions } from '../../../common/configOptions'; @@ -40,7 +48,9 @@ import { comparePaths, convertPathToUri, getBaseFileName, + getDirectoryPath, getFileExtension, + getFileSpec, normalizePath, normalizeSlashes, } from '../../../common/pathUtils'; @@ -52,14 +62,16 @@ import { LanguageServerInterface, WorkspaceServiceInstance } from '../../../lang import { AbbreviationInfo } from '../../../languageService/autoImporter'; import { DefinitionFilter } from '../../../languageService/definitionProvider'; import { convertHoverResults } from '../../../languageService/hoverProvider'; +import { ParseNode } from '../../../parser/parseNodes'; import { ParseResults } from '../../../parser/parser'; import { Tokenizer } from '../../../parser/tokenizer'; import { PyrightFileSystem } from '../../../pyrightFileSystem'; import { TestAccessHost } from '../testAccessHost'; import * as host from '../testHost'; import { stringify } from '../utils'; -import { createFromFileSystem, distlibFolder, libFolder } from '../vfs/factory'; +import { createFromFileSystem, distlibFolder, libFolder, typeshedFolder } from '../vfs/factory'; import * as vfs from '../vfs/filesystem'; +import { parseTestData } from './fourSlashParser'; import { CompilerSettings, FourSlashData, @@ -98,8 +110,8 @@ export class TestState { private readonly _cancellationToken: TestCancellationToken; private readonly _files: string[] = []; private readonly _hostSpecificFeatures: HostSpecificFeatures; - private readonly _testFS: vfs.TestFileSystem; + readonly testFS: vfs.TestFileSystem; readonly fs: PyrightFileSystem; readonly workspace: WorkspaceServiceInstance; readonly console: ConsoleInterface; @@ -127,7 +139,7 @@ export class TestState { const ignoreCase = toBoolean(testData.globalOptions[GlobalMetadataOptionNames.ignoreCase]); this._cancellationToken = new TestCancellationToken(); - const configOptions = this._convertGlobalOptionsToConfigOptions(this.testData.globalOptions); + const configOptions = this._convertGlobalOptionsToConfigOptions(this.testData.globalOptions, mountPaths); const sourceFiles = []; const files: vfs.FileSet = {}; @@ -152,14 +164,14 @@ export class TestState { } this.console = nullConsole; - this._testFS = createFromFileSystem( + this.testFS = createFromFileSystem( host.HOST, ignoreCase, { cwd: basePath, files, meta: testData.globalOptions }, mountPaths ); - this.fs = new PyrightFileSystem(this._testFS); + this.fs = new PyrightFileSystem(this.testFS); this._files = sourceFiles; const service = this._createAnalysisService( @@ -189,6 +201,13 @@ export class TestState { // Open the first file by default this.openFile(this._files[0]); } + + for (const filePath of this._files) { + const file = files[filePath] as vfs.File; + if (file.meta?.[MetadataOptionNames.ipythonMode]) { + this.program.getSourceFile(filePath)?.test_enableIPythonMode(true); + } + } } get importResolver(): ImportResolver { @@ -204,7 +223,7 @@ export class TestState { } cwd() { - return this._testFS.cwd(); + return this.testFS.cwd(); } // Entry points from fourslash.ts @@ -282,14 +301,29 @@ export class TestState { return this.convertPositionRange(range); } + expandPositionRange(range: PositionRange, start: number, end: number) { + return { + start: { line: range.start.line, character: range.start.character - start }, + end: { line: range.end.line, character: range.end.character + end }, + }; + } + convertPositionRange(range: Range) { return this.convertOffsetsToRange(range.fileName, range.pos, range.end); } + convertPathToUri(path: string) { + return convertPathToUri(this.fs, path); + } + + getDirectoryPath(path: string) { + return getDirectoryPath(path); + } + goToPosition(positionOrLineAndColumn: number | Position) { const pos = isNumber(positionOrLineAndColumn) ? positionOrLineAndColumn - : this._convertPositionToOffset(this.activeFile.fileName, positionOrLineAndColumn); + : this.convertPositionToOffset(this.activeFile.fileName, positionOrLineAndColumn); this.currentCaretPosition = pos; this.selectionEnd = -1; } @@ -318,7 +352,7 @@ export class TestState { } selectLine(index: number) { - const lineStart = this._convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); + const lineStart = this.convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); const lineEnd = lineStart + this._getLineContent(index).length; this.selectRange({ fileName: this.activeFile.fileName, pos: lineStart, end: lineEnd }); } @@ -355,6 +389,17 @@ export class TestState { return result; } + getFilteredRanges( + predicate: (m: Marker | undefined, d: T | undefined, text: string) => boolean + ): Range[] { + return this.getRanges().filter((r) => predicate(r.marker, r.marker?.data as T | undefined, this._rangeText(r))); + } + + getRangeByMarkerName(markerName: string): Range | undefined { + const marker = this.getMarkerByName(markerName); + return this.getRanges().find((r) => r.marker === marker); + } + goToBOF() { this.goToPosition(0); } @@ -429,8 +474,8 @@ export class TestState { } deleteLineRange(startIndex: number, endIndexInclusive: number) { - const startPos = this._convertPositionToOffset(this.activeFile.fileName, { line: startIndex, character: 0 }); - const endPos = this._convertPositionToOffset(this.activeFile.fileName, { + const startPos = this.convertPositionToOffset(this.activeFile.fileName, { line: startIndex, character: 0 }); + const endPos = this.convertPositionToOffset(this.activeFile.fileName, { line: endIndexInclusive + 1, character: 0, }); @@ -691,6 +736,158 @@ export class TestState { return commandResult; } + protected verifyWorkspaceEdit(expected: WorkspaceEdit, actual: WorkspaceEdit) { + if (actual.changes) { + this._verifyTextEditMap(expected.changes!, actual.changes); + } else { + assert(!expected.changes); + } + + if (actual.documentChanges) { + this._verifyDocumentEdits(expected.documentChanges!, actual.documentChanges); + } else { + assert(!expected.documentChanges); + } + + if (actual.changeAnnotations) { + this._verifyChangeAnnotations(expected.changeAnnotations!, actual.changeAnnotations); + } else { + assert(!expected.changeAnnotations); + } + } + + private _verifyChangeAnnotations( + expected: { [id: string]: ChangeAnnotation }, + actual: { [id: string]: ChangeAnnotation } + ) { + assert.strictEqual(Object.entries(expected).length, Object.entries(actual).length); + + for (const key of Object.keys(expected)) { + const expectedAnnotation = expected[key]; + const actualAnnotation = actual[key]; + + // We need to improve it to test localized strings. + assert.strictEqual(expectedAnnotation.label, actualAnnotation.label); + assert.strictEqual(expectedAnnotation.description, actualAnnotation.description); + + assert.strictEqual(expectedAnnotation.needsConfirmation, actualAnnotation.needsConfirmation); + } + } + + private _textDocumentAreSame( + expected: OptionalVersionedTextDocumentIdentifier, + actual: OptionalVersionedTextDocumentIdentifier + ) { + return expected.version === actual.version && expected.uri === actual.uri; + } + + private _verifyDocumentEdits( + expected: (TextDocumentEdit | CreateFile | RenameFile | DeleteFile)[], + actual: (TextDocumentEdit | CreateFile | RenameFile | DeleteFile)[] + ) { + assert.strictEqual(expected.length, actual.length); + + for (const op of expected) { + assert( + actual.some((a) => { + const expectedKind = TextDocumentEdit.is(op) ? 'edit' : op.kind; + const actualKind = TextDocumentEdit.is(a) ? 'edit' : a.kind; + if (expectedKind !== actualKind) { + return false; + } + + switch (expectedKind) { + case 'edit': { + const expectedEdit = op as TextDocumentEdit; + const actualEdit = a as TextDocumentEdit; + + if (!this._textDocumentAreSame(expectedEdit.textDocument, actualEdit.textDocument)) { + return false; + } + + return this._textEditsAreSame(expectedEdit.edits, actualEdit.edits); + } + case 'create': { + const expectedOp = op as CreateFile; + const actualOp = a as CreateFile; + return ( + expectedOp.kind === actualOp.kind && + expectedOp.annotationId === actualOp.annotationId && + expectedOp.uri === actualOp.uri && + expectedOp.options?.ignoreIfExists === actualOp.options?.ignoreIfExists && + expectedOp.options?.overwrite === actualOp.options?.overwrite + ); + } + case 'rename': { + const expectedOp = op as RenameFile; + const actualOp = a as RenameFile; + return ( + expectedOp.kind === actualOp.kind && + expectedOp.annotationId === actualOp.annotationId && + expectedOp.oldUri === actualOp.oldUri && + expectedOp.newUri === actualOp.newUri && + expectedOp.options?.ignoreIfExists === actualOp.options?.ignoreIfExists && + expectedOp.options?.overwrite === actualOp.options?.overwrite + ); + } + case 'delete': { + const expectedOp = op as DeleteFile; + const actualOp = a as DeleteFile; + return ( + expectedOp.annotationId === actualOp.annotationId && + expectedOp.kind === actualOp.kind && + expectedOp.uri === actualOp.uri && + expectedOp.options?.ignoreIfNotExists === actualOp.options?.ignoreIfNotExists && + expectedOp.options?.recursive === actualOp.options?.recursive + ); + } + default: + debug.assertNever(expectedKind); + } + }) + ); + } + } + + private _verifyTextEditMap(expected: { [uri: string]: TextEdit[] }, actual: { [uri: string]: TextEdit[] }) { + assert.strictEqual(Object.entries(expected).length, Object.entries(actual).length); + + for (const key of Object.keys(expected)) { + assert(this._textEditsAreSame(expected[key], actual[key])); + } + } + + private _textEditsAreSame( + expectedEdits: (TextEdit | AnnotatedTextEdit)[], + actualEdits: (TextEdit | AnnotatedTextEdit)[] + ) { + if (expectedEdits.length !== actualEdits.length) { + return false; + } + + for (const edit of expectedEdits) { + if (actualEdits.some((a) => this._textEditAreSame(edit, a))) { + return true; + } + } + + return false; + } + + private _textEditAreSame(expected: TextEdit, actual: TextEdit) { + if (!rangesAreEqual(expected.range, actual.range)) { + return false; + } + + if (expected.newText !== actual.newText) { + return false; + } + + const expectedAnnotation = AnnotatedTextEdit.is(expected) ? expected.annotationId : ''; + const actualAnnotation = AnnotatedTextEdit.is(actual) ? actual.annotationId : ''; + return expectedAnnotation === actualAnnotation; + } + async verifyInvokeCodeAction( map: { [marker: string]: { title: string; files?: { [filePath: string]: string }; edits?: TextEdit[] }; @@ -855,7 +1052,7 @@ export class TestState { const expectedCompletions = map[markerName].completions; const completionPosition = this.convertOffsetToPosition(filePath, marker.position); - const options = { format: docFormat, snippet: true, lazyEdit: true }; + const options = { format: docFormat, snippet: true, lazyEdit: true, autoImport: true }; const nameMap = abbrMap ? new Map(Object.entries(abbrMap)) : undefined; const result = await this.workspace.serviceInstance.getCompletionsForPosition( filePath, @@ -1090,7 +1287,7 @@ export class TestState { CancellationToken.None ); - assert.equal(actual?.length ?? 0, expected.length); + assert.strictEqual(actual?.length ?? 0, expected.length, `${name} has failed`); for (const r of expected) { assert.equal(actual?.filter((d) => this._deepEqual(d, r)).length, 1); @@ -1172,6 +1369,34 @@ export class TestState { } } + verifyFindTypeDefinitions(map: { + [marker: string]: { + definitions: DocumentRange[]; + }; + }) { + this._analyze(); + + for (const marker of this.getMarkers()) { + const fileName = marker.fileName; + const name = this.getMarkerName(marker); + + if (!(name in map)) { + continue; + } + + const expected = map[name].definitions; + + const position = this.convertOffsetToPosition(fileName, marker.position); + const actual = this.program.getTypeDefinitionsForPosition(fileName, position, CancellationToken.None); + + assert.strictEqual(actual?.length ?? 0, expected.length, name); + + for (const r of expected) { + assert.strictEqual(actual?.filter((d) => this._deepEqual(d, r)).length, 1, name); + } + } + } + verifyRename(map: { [marker: string]: { newName: string; @@ -1220,17 +1445,19 @@ export class TestState { return configFileNames.some((f) => comparer(getBaseFileName(file.fileName), f) === Comparison.EqualTo); } - private _convertGlobalOptionsToConfigOptions(globalOptions: CompilerSettings): ConfigOptions { + private _convertGlobalOptionsToConfigOptions( + globalOptions: CompilerSettings, + mountPaths?: Map + ): ConfigOptions { const srtRoot: string = GlobalMetadataOptionNames.projectRoot; const projectRoot = normalizeSlashes(globalOptions[srtRoot] ?? vfs.MODULE_PATH); const configOptions = new ConfigOptions(projectRoot); // add more global options as we need them - - return this._applyTestConfigOptions(configOptions); + return this._applyTestConfigOptions(configOptions, mountPaths); } - private _applyTestConfigOptions(configOptions: ConfigOptions) { + private _applyTestConfigOptions(configOptions: ConfigOptions, mountPaths?: Map) { // Always enable "test mode". configOptions.internalTestMode = true; @@ -1242,17 +1469,28 @@ export class TestState { configOptions.stubPath = normalizePath(combinePaths(vfs.MODULE_PATH, 'typings')); } + configOptions.include.push(getFileSpec(configOptions.projectRoot, '.')); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, typeshedFolder)); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, distlibFolder)); + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, libFolder)); + + if (mountPaths) { + for (const mountPath of mountPaths.keys()) { + configOptions.exclude.push(getFileSpec(configOptions.projectRoot, mountPath)); + } + } + return configOptions; } private _getFileContent(fileName: string): string { const files = this.testData.files.filter( - (f) => comparePaths(f.fileName, fileName, this._testFS.ignoreCase) === Comparison.EqualTo + (f) => comparePaths(f.fileName, fileName, this.testFS.ignoreCase) === Comparison.EqualTo ); return files[0].content; } - private _convertPositionToOffset(fileName: string, position: Position): number { + protected convertPositionToOffset(fileName: string, position: Position): number { const lines = this._getTextRangeCollection(fileName); return convertPositionToOffset(position, lines)!; } @@ -1362,7 +1600,7 @@ export class TestState { } } - private _rangeText({ fileName, pos, end }: Range): string { + protected _rangeText({ fileName, pos, end }: Range): string { return this._getFileContent(fileName).slice(pos, end); } @@ -1400,7 +1638,7 @@ export class TestState { private _getLineContent(index: number) { const text = this._getFileContent(this.activeFile.fileName); - const pos = this._convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); + const pos = this.convertPositionToOffset(this.activeFile.fileName, { line: index, character: 0 }); let startPos = pos; let endPos = pos; @@ -1675,3 +1913,46 @@ export class TestState { this._verifyEdits(actual.additionalTextEdits, expected.additionalTextEdits); } } + +export function parseAndGetTestState(code: string, projectRoot = '/', anonymousFileName = 'unnamedFile.py') { + const data = parseTestData(normalizeSlashes(projectRoot), code, anonymousFileName); + const state = new TestState(normalizeSlashes('/'), data); + + return { data, state }; +} + +export function getNodeForRange(codeOrState: string | TestState, markerName = 'marker'): ParseNode { + const state = isString(codeOrState) ? parseAndGetTestState(codeOrState).state : codeOrState; + const range = state.getRangeByMarkerName(markerName); + assert(range); + + const textRange = TextRange.fromBounds(range.pos, range.end); + + const node = getNodeAtMarker(state, markerName); + let current: ParseNode | undefined = node; + while (current) { + if (TextRange.containsRange(current, textRange)) { + return current; + } + + current = current.parent; + } + + return node; +} + +export function getNodeAtMarker(codeOrState: string | TestState, markerName = 'marker'): ParseNode { + const state = isString(codeOrState) ? parseAndGetTestState(codeOrState).state : codeOrState; + const marker = state.getMarkerByName(markerName); + + const sourceFile = state.program.getBoundSourceFile(marker.fileName); + assert(sourceFile); + + const parserResults = sourceFile.getParseResults(); + assert(parserResults); + + const node = findNodeByOffset(parserResults.parseTree, marker.position); + assert(node); + + return node; +} diff --git a/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts b/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts index 813a6c83c41d..deec709e927d 100644 --- a/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts +++ b/packages/pyright-internal/src/tests/harness/vfs/filesystem.ts @@ -1616,12 +1616,12 @@ export class Link { /** Removes a directory in a `FileSet` */ export class Rmdir { - _rmdirBrand?: never; // brand necessary for proper type guards + ' rmdirBrand'?: never; // brand necessary for proper type guards } /** Unlinks a file in a `FileSet` */ export class Unlink { - _unlinkBrand?: never; // brand necessary for proper type guards + ' unlinkBrand'?: never; // brand necessary for proper type guards } /** Extended options for a symbolic link in a `FileSet` */ diff --git a/packages/pyright-internal/src/tests/importAdder.test.ts b/packages/pyright-internal/src/tests/importAdder.test.ts new file mode 100644 index 000000000000..f1177ae39808 --- /dev/null +++ b/packages/pyright-internal/src/tests/importAdder.test.ts @@ -0,0 +1,1361 @@ +/* + * importAdder.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * tests for importMover. + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-languageserver'; + +import { rangesAreEqual, TextRange } from '../common/textRange'; +import { ImportAdder } from '../languageService/importAdder'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('builtin types', () => { + const code = ` +// @filename: test1.py +//// [|/*src*/a: str = "hello" +//// b: int = 1 +//// c: True = True +//// d: None = None|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('intrinsic types', () => { + const code = ` +// @filename: test1.py +//// if __name__ == __path__: +//// pass +//// +//// [|/*src*/if __name__ === "__main__": +//// pass +//// b = __path__|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('handle variable in range', () => { + const code = ` +// @filename: test1.py +//// [|/*src*/variableToMove = 1|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move variable', () => { + const code = ` +// @filename: test1.py +//// variableToMove = 1 +//// [|/*src*/a = variableToMove|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import variableToMove!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move multiple variables', () => { + const code = ` +// @filename: test1.py +//// variableToMove1 = 1 +//// variableToMove2 = 2 +//// [|/*src*/a = variableToMove1 +//// a = variableToMove2|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import variableToMove1, variableToMove2!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle local variables', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// variableToMove1 = 1 +//// variableToMove2 = 2 +//// [|/*src*/a = variableToMove1 +//// a = variableToMove2|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('handle parameter variable', () => { + const code = ` +// @filename: test1.py +//// def foo(p: int): +//// [|/*src*/a = p|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move private variable', () => { + const code = ` +// @filename: test1.py +//// __private = 1 +//// [|/*src*/a = __private|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import __private!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle function in range', () => { + const code = ` +// @filename: test1.py +//// [|/*src*/def foo(): +//// pass|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move function', () => { + const code = ` +// @filename: test1.py +//// def foo(): pass +//// [|/*src*/foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move multiple functions', () => { + const code = ` +// @filename: test1.py +//// def foo(): pass +//// def bar(): pass +//// [|/*src*/foo() +//// bar()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import bar, foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle inner function', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// def bar(): pass +//// [|/*src*/bar()|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move private function', () => { + const code = ` +// @filename: test1.py +//// def __private(): pass +//// [|/*src*/__private()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import __private!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class in range', () => { + const code = ` +// @filename: test1.py +//// [|/*src*/class A: pass|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move class', () => { + const code = ` +// @filename: test1.py +//// class A: pass +//// [|/*src*/a = A()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import A!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move multiple classes', () => { + const code = ` +// @filename: test1.py +//// class A: pass +//// class B: pass +//// [|/*src*/a = A() +//// a = B()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import A, B!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle inner class through self', () => { + const code = ` +// @filename: test1.py +//// class A: +//// class B: pass +//// def foo(self): +//// [|/*src*/b = self.B()|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('handle inner class through type', () => { + const code = ` +// @filename: test1.py +//// class A: +//// class B: pass +//// def foo(self): +//// [|/*src*/b = A.B()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import A!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class variable', () => { + const code = ` +// @filename: test1.py +//// class A: +//// def __init__(self): +//// self.a = 1 +//// c = A(); +//// [|/*src*/a = c.a|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import c!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class static variable', () => { + const code = ` +// @filename: test1.py +//// class A: +//// V = 1 +//// [|/*src*/a = A.V|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import A!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class function', () => { + const code = ` +// @filename: test1.py +//// class A: +//// def __init__(self): pass +//// def foo(self): pass +//// c = A(); +//// [|/*src*/a = c.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import c!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class static function', () => { + const code = ` +// @filename: test1.py +//// class A: +//// def Foo(): pass +//// [|/*src*/a = A.Foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import A!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('handle class function parameter', () => { + const code = ` +// @filename: test1.py +//// class A: +//// def __init__(self): +//// [|/*src*/self.a = 1|] + +// @filename: test2.py +//// [|/*dest*/|] + `; + + testImportMove(code); +}); + +test('move private class', () => { + const code = ` +// @filename: test1.py +//// class __A: +//// class B: pass +//// [|/*src*/a = __A()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from test1 import __A!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move simple import statement', () => { + const code = ` +// @filename: test1.py +//// import typing +//// +//// [|/*src*/a: typing.Any = 1|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import typing!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move import statement with alias', () => { + const code = ` +// @filename: test1.py +//// import typing as t +//// +//// [|/*src*/a: t.Any = 1|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import typing as t!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move dotted import statement', () => { + const code = ` +// @filename: test1.py +//// import json.encoder +//// +//// [|/*src*/a = json.encoder.JSONEncoder()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json.encoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move dotted statement with alias', () => { + const code = ` +// @filename: test1.py +//// import json.encoder as j +//// +//// [|/*src*/a = j.JSONEncoder()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json.encoder as j!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move both dotted import and regular statement', () => { + const code = ` +// @filename: test1.py +//// import json +//// import json.encoder +//// +//// a = json.encoder.JSONEncoder() +//// [|/*src*/b = json.loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move both dotted import and regular statement with alias', () => { + const code = ` +// @filename: test1.py +//// import json as j +//// import json.encoder +//// +//// a = json.encoder.JSONEncoder() +//// [|/*src*/b = j.loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json as j!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple import statements', () => { + const code = ` +// @filename: test1.py +//// import json +//// import json.encoder +//// +//// [|/*src*/a = json.encoder.JSONEncoder() +//// b = json.loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json!n!import json.encoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple import statements with alias', () => { + const code = ` +// @filename: test1.py +//// import json as j +//// import json.encoder as j2 +//// +//// [|/*src*/a = j2.JSONEncoder() +//// b = j.loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json as j!n!import json.encoder as j2!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple import statements - nested', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// import json +//// import json.encoder +//// +//// [|/*src*/a = json.encoder.JSONEncoder() +//// b = json.loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json!n!import json.encoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple import statements - part of nested body', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// import json +//// import json.encoder +//// +//// [|/*src*/a = json.encoder.JSONEncoder()|] +//// b = json.loads("") + +// @filename: test2.py +//// [|/*dest*/{|"r":"import json.encoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple import statements - multi dotted name', () => { + const code = ` +// @filename: nested/__init__.py +//// def foo(): pass + +// @filename: nested/nested2/__init__.py +//// def foo(): pass + +// @filename: nested/nested2/module.py +//// def foo(): pass + +// @filename: test1.py +//// import nested +//// import nested.nested2 +//// import nested.nested2.module +//// +//// nested.foo() +//// +//// [|/*src*/nested.nested2.foo() +//// nested.nested2.module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import nested.nested2!n!import nested.nested2.module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move simple from import statement', () => { + const code = ` +// @filename: test1.py +//// from typing import Any +//// +//// [|/*src*/a: Any = 1|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from typing import Any!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move from import statement with alias', () => { + const code = ` +// @filename: test1.py +//// from typing import Any as t +//// +//// [|/*src*/a: t = 1|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from typing import Any as t!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move submodule from import statement', () => { + const code = ` +// @filename: test1.py +//// from json import encoder +//// +//// [|/*src*/a = encoder.JSONEncoder()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import encoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move submodule from import statement with alias', () => { + const code = ` +// @filename: test1.py +//// from json import encoder as e +//// +//// [|/*src*/a = e.JSONEncoder()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import encoder as e!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move dotted from import statement', () => { + const code = ` +// @filename: test1.py +//// from json.encoder import JSONEncoder +//// +//// [|/*src*/a = JSONEncoder()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json.encoder import JSONEncoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move dotted from import statement with alias', () => { + const code = ` +// @filename: test1.py +//// from json.encoder import JSONEncoder as j +//// +//// [|/*src*/a = j()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json.encoder import JSONEncoder as j!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move both dotted from import and regular statement', () => { + const code = ` +// @filename: test1.py +//// from json import loads +//// from json.encoder import JSONEncoder +//// +//// a = JSONEncoder() +//// [|/*src*/b = loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import loads!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move both dotted from import and regular statement with alias', () => { + const code = ` +// @filename: test1.py +//// from json import loads as j +//// from json.encoder import JSONEncoder +//// +//// a = JSONEncoder() +//// [|/*src*/b = j("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import loads as j!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple from import statements', () => { + const code = ` +// @filename: test1.py +//// from json import loads +//// from json.encoder import JSONEncoder +//// +//// [|/*src*/a = JSONEncoder() +//// b = loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import loads!n!from json.encoder import JSONEncoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple from import statements with alias', () => { + const code = ` +// @filename: test1.py +//// from json import loads as j +//// from json.encoder import JSONEncoder as j2 +//// +//// [|/*src*/a = j2() +//// b = j("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import loads as j!n!from json.encoder import JSONEncoder as j2!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple from import statements - nested', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// from json import loads +//// from json.encoder import JSONEncoder +//// +//// [|/*src*/a = JSONEncoder() +//// b = loads("")|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json import loads!n!from json.encoder import JSONEncoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple from import statements - part of nested body', () => { + const code = ` +// @filename: test1.py +//// def foo(): +//// from json import loads +//// from json.encoder import JSONEncoder +//// +//// [|/*src*/a = JSONEncoder()|] +//// b = loads("") + +// @filename: test2.py +//// [|/*dest*/{|"r":"from json.encoder import JSONEncoder!n!!n!!n!"|}|] +//// + `; + + testImportMove(code); +}); + +test('move multiple from import statements - multi dotted name', () => { + const code = ` +// @filename: nested/__init__.py +//// def foo(): pass + +// @filename: nested/nested2/__init__.py +//// def foo2(): pass + +// @filename: nested/nested2/module.py +//// def foo3(): pass + +// @filename: test1.py +//// from nested import foo +//// from nested.nested2 import foo2 +//// from nested.nested2.module import foo3 +//// +//// foo() +//// +//// [|/*src*/foo2() +//// foo3()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested.nested2 import foo2!n!from nested.nested2.module import foo3!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('relative path from import', () => { + const code = ` +// @filename: nested/__init__.py +//// def foo(): pass + +// @filename: nested/nested2/__init__.py +//// def foo2(): pass + +// @filename: nested/nested2/module.py +//// def foo3(): pass + +// @filename: nested/nested2/test1.py +//// from ...nested import foo +//// from ..nested2 import foo2 +//// from .module import foo3 +//// [|/*src*/foo() +//// foo2() +//// foo3()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested import foo!n!from nested.nested2 import foo2!n!from nested.nested2.module import foo3!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('namespace package from import', () => { + const code = ` +// @filename: nested/module.py +//// def foo(): pass + +// @filename: test1.py +//// from nested.module import foo +//// +//// [|/*src*/foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested.module import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('namespace package with submodule from import', () => { + const code = ` +// @filename: nested/module.py +//// def foo(): pass + +// @filename: test1.py +//// from nested import module +//// +//// [|/*src*/module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested import module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('multi nested namespace package with submodule from import', () => { + const code = ` +// @filename: nested/nested2/nested3/module.py +//// def foo(): pass + +// @filename: test1.py +//// from nested.nested2.nested3 import module +//// +//// [|/*src*/module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested.nested2.nested3 import module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('multi nested namespace package with __init__ from import', () => { + const code = ` +// @filename: nested/nested2/nested3/__init__.py +//// def foo(): pass + +// @filename: test1.py +//// from nested.nested2.nested3 import foo +//// +//// [|/*src*/foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested.nested2.nested3 import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('namespace package with relative path to root - from import', () => { + const code = ` +// @filename: module.py +//// def foo(): pass + +// @filename: test1.py +//// from . import module +//// +//// [|/*src*/module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from . import module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('namespace package with relative path from import', () => { + const code = ` +// @filename: nested/module.py +//// def foo(): pass + +// @filename: test1.py +//// from .nested import module +//// +//// [|/*src*/module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from nested import module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('namespace package import', () => { + const code = ` +// @filename: nested/module.py +//// def foo(): pass + +// @filename: test1.py +//// import nested.module +//// +//// [|/*src*/nested.module.foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"import nested.module!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('__init__ at root', () => { + const code = ` +// @filename: __init__.py +//// def foo(): pass + +// @filename: test1.py +//// from . import foo +//// +//// [|/*src*/foo()|] + +// @filename: test2.py +//// [|/*dest*/{|"r":"from . import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('__init__ at root to nested file', () => { + const code = ` +// @filename: __init__.py +//// def foo(): pass + +// @filename: test1.py +//// from . import foo +//// +//// [|/*src*/foo()|] + +// @filename: nested/test2.py +//// [|/*dest*/{|"r":"from .. import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move wild card imports', () => { + const code = ` +// @filename: module.py +//// def foo(): pass +//// __all__ = [ 'foo' ] + +// @filename: test1.py +//// from module import * +//// +//// [|/*src*/foo()|] + +// @filename: nested/test2.py +//// [|/*dest*/{|"r":"from module import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('move wild card imports from __init__', () => { + const code = ` +// @filename: nested/__init__.py +//// def foo(): pass +//// __all__ = [ 'foo' ] + +// @filename: test1.py +//// from nested import * +//// +//// [|/*src*/foo()|] + +// @filename: nested/test2.py +//// [|/*dest*/{|"r":"from nested import foo!n!!n!!n!"|}|] + `; + + testImportMove(code); +}); + +test('merge with existing import', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// [|/*src*/a = val2|] + +// @filename: test2.py +//// from test1 import val1[|/*dest*/{|"r":", val2"|}|] + `; + + testImportMove(code); +}); + +test('merge multiple symbols with existing import', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// val3 = 3 +//// [|/*src*/a = val2 +//// b = val3|] + +// @filename: test2.py +//// from test1 import val1[|/*dest*/{|"r":", val2, val3"|}|] + `; + + testImportMove(code); +}); + +test('move with existing import with wild card', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// [|/*src*/a = val2|] + +// @filename: test2.py +//// from test1 import *[|/*dest*/{|"r":"!n!from test1 import val2"|}|] + `; + + testImportMove(code); +}); + +test('merge multiple symbols with multiple existing import and wildcard', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// val3 = 3 +//// [|/*src*/a = val2 +//// b = val3|] + +// @filename: test2.py +//// from test1 import * +//// from test1 import val1[|/*dest*/{|"r":", val2, val3"|}|] + `; + + testImportMove(code); +}); + +test('merge multiple symbols with multiple existing import', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// val3 = 3 +//// [|/*src*/a = val2 +//// b = val3|] + +// @filename: test2.py +//// from test1 import val1[|{|"r":", val3"|}|] +//// from test1 import val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('merge multiple symbols with multiple existing import with alias', () => { + const code = ` +// @filename: test1.py +//// val1 = 1 +//// val2 = 2 +//// val3 = 3 +//// [|/*src*/a = val2 +//// b = val3|] + +// @filename: test2.py +//// from test1 import val1[|{|"r":", val2, val3"|}|] +//// from test1 import val2 as v[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('skip with existing import statement', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// import module +//// +//// [|/*src*/a = module.val1|] + +// @filename: test2.py +//// import module +//// module.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('skip with existing import statement with alias', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// import module as m +//// +//// [|/*src*/a = m.val1|] + +// @filename: test2.py +//// import module as m +//// m.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('merge with existing import statement with alias', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// import module +//// +//// [|/*src*/a = module.val1|] + +// @filename: test2.py +//// import module as m[|{|"r":"!n!import module"|}|] +//// m.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('merge with existing import statement with alias 2', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// import module as m +//// +//// [|/*src*/a = m.val1|] + +// @filename: test2.py +//// import module[|{|"r":"!n!import module as m"|}|] +//// module.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('mixed with submodule and import - duplicated import', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// import module +//// +//// [|/*src*/a = module.val1|] + +// @filename: test2.py +//// [|{|"r":"import module!n!"|}|]from . import module +//// module.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('mixed with submodule and import - duplicated import 2', () => { + const code = ` +// @filename: module.py +//// val1 = 1 +//// val2 = 3 + +// @filename: test1.py +//// from . import module +//// +//// [|/*src*/a = module.val1|] + +// @filename: test2.py +//// [|{|"r":"from . import module!n!"|}|]import module +//// module.val2[|/*dest*/|] + `; + + testImportMove(code); +}); + +test('multiple mixed import statements', () => { + const code = ` +// @filename: test1.py +//// import typing +//// from os import path +//// import json.encoder as j +//// import json.decoder +//// +//// [|/*src*/def foo(p1: str, p2: typing.Any, p3: typing.Union[int, str]): +//// b = path.join(p1) +//// e = j.JSONEncoder(skipkeys=True) +//// d = json.decoder.JSONDecoder()|] +// @filename: test2.py +//// [|{|"r":"import json.decoder!n!import json.encoder as j!n!"|}|]import os[|{|"r":"!n!from os import path"|}|] +//// import sys +//// import typing +//// from json import decoder[|/*dest*/|] + `; + testImportMove(code); +}); + +test('multiple mixed import statements with merge', () => { + const code = ` +// @filename: test1.py +//// import typing +//// from os import path +//// from json import encoder as j +//// from json import decoder +//// +//// [|/*src*/def foo(p1: str, p2: typing.Any, p3: typing.Union[int, str]): +//// b = path.join(p1) +//// e = j.JSONEncoder(skipkeys=True) +//// d = decoder.JSONDecoder()|] +// @filename: test2.py +//// import sys +//// import typing +//// from os import abort[|{|"r":", path"|}|] +//// from json import decoder[|{|"r":", encoder as j"|}|][|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file import statement', () => { + const code = ` +// @filename: test1.py +//// import test2 +//// +//// [|/*src*/test2.foo()|] + +// @filename: test2.py +//// [|{|"r":"import test2!n!!n!!n!"|}|]def foo(): pass +//// [|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file from import statement', () => { + const code = ` +// @filename: test1.py +//// from test2 import foo +//// +//// [|/*src*/foo()|] + +// @filename: test2.py +//// def foo(): pass +//// [|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file from import statement with alias', () => { + const code = ` +// @filename: test1.py +//// from test2 import foo as f +//// +//// [|/*src*/f()|] + +// @filename: test2.py +//// [|{|"r":"from test2 import foo as f!n!!n!!n!"|}|]def foo(): pass +//// [|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file from import statement for __init__', () => { + const code = ` +// @filename: test1.py +//// from nested import foo +//// +//// [|/*src*/foo()|] + +// @filename: nested/__init__.py +//// def foo(): pass +//// [|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file from import statement for __init__ with alias', () => { + const code = ` +// @filename: test1.py +//// from nested import foo as f +//// +//// [|/*src*/f()|] + +// @filename: nested/__init__.py +//// [|{|"r":"from nested import foo as f!n!!n!!n!"|}|]def foo(): pass +//// [|/*dest*/|] + `; + testImportMove(code); +}); + +test('move into the same file from import statement for submodule', () => { + const code = ` +// @filename: test1.py +//// from nested import module +//// +//// [|/*src*/module.foo()|] + +// @filename: nested/__init__.py +//// [|{|"r":"from nested import module!n!!n!!n!"|}|][|/*dest*/|] + +// @filename: nested/module.py +//// def foo(): pass + `; + testImportMove(code); +}); + +function testImportMove(code: string) { + const state = parseAndGetTestState(code).state; + + const src = state.getRangeByMarkerName('src')!; + const dest = state.getMarkerByName('dest'); + + const importMover = new ImportAdder(state.configOptions, state.importResolver, state.program.evaluator!); + const importData = importMover.collectImportsForSymbolsUsed( + state.program.getBoundSourceFile(src.fileName)!.getParseResults()!, + TextRange.fromBounds(src.pos, src.end), + CancellationToken.None + ); + + const edits = importMover.applyImports( + importData, + state.program.getBoundSourceFile(dest.fileName)!.getParseResults()!, + dest.position, + CancellationToken.None + ); + + assert(edits); + + const ranges = state.getRanges().filter((r) => !!r.marker?.data); + assert.strictEqual(edits.length, ranges.length); + + for (const edit of edits) { + assert( + ranges.some((r) => { + const data = r.marker!.data as { r: string }; + const expectedText = data.r; + const expectedRange = state.convertPositionRange(r); + return ( + rangesAreEqual(expectedRange, edit.range) && + expectedText.replace(/!n!/g, '\n') === edit.replacementText + ); + }), + `can't find '${edit.replacementText}'@'(${edit.range.start.line},${edit.range.start.character})'` + ); + } +} diff --git a/packages/pyright-internal/src/tests/importResolver.test.ts b/packages/pyright-internal/src/tests/importResolver.test.ts index 32d73666ccb0..ba7972dc6065 100644 --- a/packages/pyright-internal/src/tests/importResolver.test.ts +++ b/packages/pyright-internal/src/tests/importResolver.test.ts @@ -137,6 +137,10 @@ test('stub package', () => { path: combinePaths(libraryRoot, 'myLib-stubs', 'stub.pyi'), content: '# empty', }, + { + path: combinePaths(libraryRoot, 'myLib-stubs', '__init__.pyi'), + content: '# empty', + }, { path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), content: 'def test(): pass', @@ -148,6 +152,28 @@ test('stub package', () => { assert(!importResult.isImportFound); }); +test('stub namespace package', () => { + const files = [ + { + path: combinePaths(libraryRoot, 'myLib-stubs', 'stub.pyi'), + content: '# empty', + }, + { + path: combinePaths(libraryRoot, 'myLib', 'partialStub.py'), + content: 'def test(): pass', + }, + ]; + + // If fully typed stub package exists, that wins over the real package. + const importResult = getImportResult(files, ['myLib', 'partialStub']); + assert(importResult.isImportFound); + assert(!importResult.isStubFile); + assert.strictEqual( + 1, + importResult.resolvedPaths.filter((f) => f === combinePaths(libraryRoot, 'myLib', 'partialStub.py')).length + ); +}); + test('stub in typing folder over partial stub package', () => { const typingFolder = combinePaths(normalizeSlashes('/'), 'typing'); const files = [ @@ -337,6 +363,108 @@ test('no empty import roots', () => { importResolver.getImportRoots(configOptions.getDefaultExecEnvironment()).forEach((path) => assert(path)); }); +test('import side by side file root', () => { + const files = [ + { + path: combinePaths('/', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual(1, importResult.resolvedPaths.filter((f) => f === combinePaths('/', 'file1.py')).length); +}); + +test('import side by side file sub folder', () => { + const files = [ + { + path: combinePaths('/test', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/test', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual(1, importResult.resolvedPaths.filter((f) => f === combinePaths('/test', 'file1.py')).length); +}); + +test('import side by side file sub under src folder', () => { + const files = [ + { + path: combinePaths('/src/nested', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/src/nested', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedPaths.filter((f) => f === combinePaths('/src/nested', 'file1.py')).length + ); +}); + +test('import file sub under containing folder', () => { + const files = [ + { + path: combinePaths('/src/nested', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/src/nested/nested2', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(importResult.isImportFound); + assert.strictEqual( + 1, + importResult.resolvedPaths.filter((f) => f === combinePaths('/src/nested', 'file1.py')).length + ); +}); + +test('import side by side file sub under lib folder', () => { + const files = [ + { + path: combinePaths('/lib/site-packages/myLib', 'file1.py'), + content: 'def test1(): ...', + }, + { + path: combinePaths('/lib/site-packages/myLib', 'file2.py'), + content: 'def test2(): ...', + }, + ]; + + const importResult = getImportResult(files, ['file1']); + assert(!importResult.isImportFound); +}); + +test('dont walk up the root', () => { + const files = [ + { + path: combinePaths('/', 'file1.py'), + content: 'def test1(): ...', + }, + ]; + + const importResult = getImportResult(files, ['notExist'], (c) => (c.projectRoot = '')); + assert(!importResult.isImportFound); +}); + function getImportResult( files: { path: string; content: string }[], nameParts: string[], @@ -348,16 +476,18 @@ function getImportResult( /* empty */ }); - const file = combinePaths('src', 'file.py'); - files.push({ - path: file, - content: '# not used', - }); - const fs = createFileSystem(files); const configOptions = new ConfigOptions(normalizeSlashes('/')); setup(configOptions); + const file = files.length > 0 ? files[files.length - 1].path : combinePaths('src', 'file.py'); + if (files.length === 0) { + files.push({ + path: file, + content: '# not used', + }); + } + const importResolver = new ImportResolver(fs, configOptions, new TestAccessHost(fs.getModulePath(), [libraryRoot])); const importResult = importResolver.resolveImport(file, configOptions.findExecEnvironment(file), { leadingDots: 0, diff --git a/packages/pyright-internal/src/tests/importStatementUtils.test.ts b/packages/pyright-internal/src/tests/importStatementUtils.test.ts new file mode 100644 index 000000000000..c4f077979b66 --- /dev/null +++ b/packages/pyright-internal/src/tests/importStatementUtils.test.ts @@ -0,0 +1,526 @@ +/* + * importStatementUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for importStatementUtils module. + */ + +import assert from 'assert'; + +import { ImportType } from '../analyzer/importResult'; +import { + getRelativeModuleName, + getTextEditsForAutoImportInsertions, + getTextEditsForAutoImportSymbolAddition, + getTopLevelImports, + ImportNameInfo, + ImportNameWithModuleInfo, +} from '../analyzer/importStatementUtils'; +import { isArray } from '../common/core'; +import { TextEditAction } from '../common/editAction'; +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { convertOffsetToPosition } from '../common/positionUtils'; +import { rangesAreEqual } from '../common/textRange'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { parseAndGetTestState, TestState } from './harness/fourslash/testState'; + +test('getTextEditsForAutoImportInsertion - import empty', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', [], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - import', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', {}, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - import alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s"|}|] + `; + + testInsertion(code, 'marker1', { alias: 's' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys"|}|] + `; + + testInsertion(code, 'marker1', [{}, {}], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s, sys as y"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { alias: 'y' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple imports alias duplicated', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { alias: 's' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - from import', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import path"|}|] + `; + + testInsertion(code, 'marker1', { name: 'path' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - from import alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', { name: 'path', alias: 'p' }, 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path, path"|}|] + `; + + testInsertion(code, 'marker1', [{ name: 'path' }, { name: 'meta_path' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports with alias', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path as m, path as p"|}|] + `; + + testInsertion( + code, + 'marker1', + [ + { name: 'path', alias: 'p' }, + { name: 'meta_path', alias: 'm' }, + ], + 'sys', + ImportType.BuiltIn + ); +}); + +test('getTextEditsForAutoImportInsertion - multiple from imports with alias duplicated', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!from sys import meta_path as m, path as p"|}|] + `; + + testInsertion( + code, + 'marker1', + [ + { name: 'path', alias: 'p' }, + { name: 'meta_path', alias: 'm' }, + { name: 'path', alias: 'p' }, + ], + 'sys', + ImportType.BuiltIn + ); +}); + +test('getTextEditsForAutoImportInsertion - multiple import statements', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!import sys as s!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - different group', () => { + const code = ` +//// import os[|/*marker1*/{|"r":"!n!!n!import sys as s!n!from sys import path as p"|}|] + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.Local); +}); + +test('getTextEditsForAutoImportInsertion - at the top', () => { + const code = ` +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertion - at the top after module doc string', () => { + const code = ` +//// ''' module doc string ''' +//// __author__ = "Software Authors Name" +//// __copyright__ = "Copyright (C) 2004 Author Name" +//// __license__ = "Public Domain" +//// __version__ = "1.0" +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + testInsertion(code, 'marker1', [{ alias: 's' }, { name: 'path', alias: 'p' }], 'sys', ImportType.BuiltIn); +}); + +test('getTextEditsForAutoImportInsertions - mix of import and from import statements', () => { + const code = ` +//// [|/*marker1*/{|"r":"import sys as s!n!from sys import path as p!n!!n!!n!"|}|]import os + `; + + const module = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + testInsertions(code, 'marker1', [ + { module, alias: 's' }, + { module, name: 'path', alias: 'p' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with different group', () => { + const code = ` +//// [|/*marker1*/|][|{|"r":"from sys import path as p!n!!n!!n!"|}|][|{|"r":"import numpy!n!!n!!n!"|}|][|{|"r":"from test import join!n!!n!!n!"|}|]import os + `; + + const module1 = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + const module2 = { moduleName: 'numpy', importType: ImportType.ThirdParty, isLocalTypingsFile: false }; + const module3 = { moduleName: 'test', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with existing imports', () => { + const code = ` +//// import os[|/*marker1*/|][|{|"r":"!n!from sys import path as p"|}|][|{|"r":"!n!!n!import numpy"|}|][|{|"r":"!n!!n!from test import join"|}|] + `; + + const module1 = { moduleName: 'sys', importType: ImportType.BuiltIn, isLocalTypingsFile: false }; + const module2 = { moduleName: 'numpy', importType: ImportType.ThirdParty, isLocalTypingsFile: false }; + const module3 = { moduleName: 'test', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportInsertions - multiple modules with same group', () => { + const code = ` +//// import os[|/*marker1*/|][|{|"r":"!n!!n!import module2!n!from module1 import path as p!n!from module3 import join"|}|] + `; + + const module1 = { moduleName: 'module1', importType: ImportType.Local, isLocalTypingsFile: false }; + const module2 = { moduleName: 'module2', importType: ImportType.Local, isLocalTypingsFile: false }; + const module3 = { moduleName: 'module3', importType: ImportType.Local, isLocalTypingsFile: false }; + + testInsertions(code, 'marker1', [ + { module: module1, name: 'path', alias: 'p' }, + { module: module2 }, + { module: module3, name: 'join' }, + ]); +}); + +test('getTextEditsForAutoImportSymbolAddition', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path, "|}|]path + `; + + testAddition(code, 'marker1', { name: 'meta_path' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - already exist', () => { + const code = ` +//// from sys import path[|/*marker1*/|] + `; + + testAddition(code, 'marker1', { name: 'path' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - with alias', () => { + const code = ` +//// from sys import path[|/*marker1*/{|"r":", path as p"|}|] + `; + + testAddition(code, 'marker1', { name: 'path', alias: 'p' }, 'sys'); +}); + +test('getTextEditsForAutoImportSymbolAddition - multiple names', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path as m, "|}|]path[|{|"r":", zoom as z"|}|] + `; + + testAddition( + code, + 'marker1', + [ + { name: 'meta_path', alias: 'm' }, + { name: 'zoom', alias: 'z' }, + ], + 'sys' + ); +}); + +test('getTextEditsForAutoImportSymbolAddition - multiple names at some spot', () => { + const code = ` +//// from sys import [|/*marker1*/{|"r":"meta_path as m, noon as n, "|}|]path + `; + + testAddition( + code, + 'marker1', + [ + { name: 'meta_path', alias: 'm' }, + { name: 'noon', alias: 'n' }, + ], + 'sys' + ); +}); + +test('getTextEditsForAutoImportSymbolAddition - wildcard', () => { + const code = ` +//// from sys import *[|/*marker1*/|] + `; + + testAddition(code, 'marker1', [{ name: 'path' }], 'sys'); +}); + +test('getRelativeModuleName - same file', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] [|/*dest*/|] + `; + + testRelativeModuleName(code, '.source'); +}); + +test('getRelativeModuleName - same file __init__', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*src*/|] [|/*dest*/|] + `; + + testRelativeModuleName(code, '.'); +}); + +test('getRelativeModuleName - same folder', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.dest'); +}); + +test('getRelativeModuleName - different folder move down', () => { + const code = ` +// @filename: common/source.py +//// [|/*src*/|] + +// @filename: dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '..dest'); +}); + +test('getRelativeModuleName - different folder move up', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: common/dest.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.common.dest'); +}); + +test('getRelativeModuleName - folder move down __init__ parent folder', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: nest1/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '..'); +}); + +test('getRelativeModuleName - folder move down __init__ parent folder ignore folder structure', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: nest1/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '...nest1', /*ignoreFolderStructure*/ true); +}); + +test('getRelativeModuleName - different folder move down __init__ sibling folder', () => { + const code = ` +// @filename: nest1/nest2/source.py +//// [|/*src*/|] + +// @filename: different/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '...different'); +}); + +test('getRelativeModuleName - different folder move up __init__', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: common/__init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.common'); +}); + +test('getRelativeModuleName - root __init__', () => { + const code = ` +// @filename: source.py +//// [|/*src*/|] + +// @filename: __init__.py +//// [|/*dest*/|] + `; + + testRelativeModuleName(code, '.'); +}); + +test('getRelativeModuleName over fake file', () => { + const code = ` +// @filename: target.py +//// [|/*dest*/|] + `; + + const state = parseAndGetTestState(code).state; + const dest = state.getMarkerByName('dest')!.fileName; + + assert.strictEqual( + getRelativeModuleName( + state.fs, + combinePaths(getDirectoryPath(dest), 'source.py'), + dest, + /*ignoreFolderStructure*/ false, + /*sourceIsFile*/ true + ), + '.target' + ); +}); + +function testRelativeModuleName(code: string, expected: string, ignoreFolderStructure = false) { + const state = parseAndGetTestState(code).state; + const src = state.getMarkerByName('src')!.fileName; + const dest = state.getMarkerByName('dest')!.fileName; + + assert.strictEqual(getRelativeModuleName(state.fs, src, dest, ignoreFolderStructure), expected); +} + +function testAddition( + code: string, + markerName: string, + importNameInfo: ImportNameInfo | ImportNameInfo[], + moduleName: string +) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName(markerName)!; + const parseResults = state.program.getBoundSourceFile(marker!.fileName)!.getParseResults()!; + + const importStatement = getTopLevelImports(parseResults.parseTree).orderedImports.find( + (i) => i.moduleName === moduleName + )!; + const edits = getTextEditsForAutoImportSymbolAddition(importNameInfo, importStatement, parseResults); + + const ranges = [...state.getRanges().filter((r) => !!r.marker?.data)]; + assert.strictEqual(edits.length, ranges.length, `${markerName} expects ${ranges.length} but got ${edits.length}`); + + testTextEdits(state, edits, ranges); +} + +function testInsertions( + code: string, + markerName: string, + importNameInfo: ImportNameWithModuleInfo | ImportNameWithModuleInfo[] +) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName(markerName)!; + const parseResults = state.program.getBoundSourceFile(marker!.fileName)!.getParseResults()!; + + const importStatements = getTopLevelImports(parseResults.parseTree); + const edits = getTextEditsForAutoImportInsertions( + importNameInfo, + importStatements, + parseResults, + convertOffsetToPosition(marker.position, parseResults.tokenizerOutput.lines) + ); + + const ranges = [...state.getRanges().filter((r) => !!r.marker?.data)]; + assert.strictEqual(edits.length, ranges.length, `${markerName} expects ${ranges.length} but got ${edits.length}`); + + testTextEdits(state, edits, ranges); +} + +function testInsertion( + code: string, + markerName: string, + importNameInfo: ImportNameInfo | ImportNameInfo[], + moduleName: string, + importType: ImportType +) { + importNameInfo = isArray(importNameInfo) ? importNameInfo : [importNameInfo]; + if (importNameInfo.length === 0) { + importNameInfo.push({}); + } + + testInsertions( + code, + markerName, + importNameInfo.map((i) => { + return { + module: { + moduleName, + importType, + isLocalTypingsFile: false, + }, + name: i.name, + alias: i.alias, + }; + }) + ); +} + +function testTextEdits(state: TestState, edits: TextEditAction[], ranges: Range[]) { + for (const edit of edits) { + assert( + ranges.some((r) => { + const data = r.marker!.data as { r: string }; + const expectedText = data.r; + return ( + rangesAreEqual(state.convertPositionRange(r), edit.range) && + expectedText.replace(/!n!/g, '\n') === edit.replacementText + ); + }), + `can't find '${edit.replacementText}'@'${edit.range.start.line},${edit.range.start.character}'` + ); + } +} diff --git a/packages/pyright-internal/src/tests/indentationUtils.ptvs.test.ts b/packages/pyright-internal/src/tests/indentationUtils.ptvs.test.ts new file mode 100644 index 000000000000..544c192d2d6b --- /dev/null +++ b/packages/pyright-internal/src/tests/indentationUtils.ptvs.test.ts @@ -0,0 +1,388 @@ +/* + * indentationUtils.ptvs.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for indentationUtils module. some tests ported from ptvs indentation tests. + */ + +import assert from 'assert'; + +import { getIndentation } from '../languageService/indentationUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('top level statement - pass', () => { + const code = ` +//// pass +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('top level statement - function', () => { + const code = ` +//// def f(): +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('function with open paren at end of file', () => { + const code = ` +//// def f( +//// [|/*marker*/|] + `; + + // This is due to how our tokenizer associate new line at + // end of stream. + testIndentation(code, 0); +}); + +test('function with open paren between top level statement', () => { + const code = ` +//// def f( +//// [|/*marker*/|] +//// +//// def bar(): pass + `; + + testIndentation(code, 4); +}); + +test('function with open paren', () => { + const code = ` +//// def f( +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('function with parameter', () => { + const code = ` +//// def f(x, +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('call with open paren at end of file', () => { + const code = ` +//// f( +//// [|/*marker*/|] + `; + + // This is due to how our tokenizer associate new line at + // end of stream. + testIndentation(code, 0); +}); + +test('call with open paren between top level statement', () => { + const code = ` +//// f( +//// [|/*marker*/|] +//// +//// bar() + `; + + testIndentation(code, 4); +}); + +test('class with open paren', () => { + const code = ` +//// f( +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('call with parameter', () => { + const code = ` +//// f(x, +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('list', () => { + const code = ` +//// [ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('list with spaces', () => { + const code = ` +//// [ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 17); +}); + +test('list with nested', () => { + const code = ` +//// [[[[[[[ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('list with spaces and element', () => { + const code = ` +//// [x, +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 17); +}); + +test('list with nested with element', () => { + const code = ` +//// [[[[[[[x, +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('set', () => { + const code = ` +//// { +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('function body', () => { + const code = ` +//// def f(): +//// print('hi') +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('function body - pass', () => { + const code = ` +//// def f(): +//// pass +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 0); +}); + +test('list in dict', () => { + const code = ` +//// abc = {'x': [ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('nested list in dict', () => { + const code = ` +//// abc = {'x': [ +//// ['''str''', +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('nested list in dict', () => { + const code = ` +//// abc = {'x': [ +//// ['''str''', +//// '''str2''']], +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 5); +}); + +test('inner function', () => { + const code = ` +//// def f(): +//// print 'hi' +//// +//// def inner(): pass +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('dict keys with comment', () => { + const code = ` +//// x = { #comment +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('dict first key with list', () => { + const code = ` +//// x = { #comment +//// 'a': [ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 6); +}); + +test('dict key list element on its own line', () => { + const code = ` +//// x = { #comment +//// 'a': [ +//// 1, +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 10); +}); + +test('dict second key', () => { + const code = ` +//// x = { #comment +//// 'a': [ +//// 1, +//// ], +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 6); +}); + +test('after dict', () => { + const code = ` +//// x = { #comment +//// 'a': [ +//// 1, +//// ], +//// 'b': 42 +//// } +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 0); +}); + +test('explicit multiline expression', () => { + const code = ` +//// def f(): +//// assert False, \\ +//// [|/*marker*/|] +//// 'A message" +//// + `; + + testIndentation(code, 8); +}); + +test('explicit multiline expression next statement', () => { + const code = ` +//// def f(): +//// assert False, \\ +//// 'A message" +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('nested block first', () => { + const code = ` +//// def a(): +//// [|/*marker*/|] +//// if b(): +//// if c(): +//// d() +//// p + `; + + testIndentation(code, 4); +}); + +test('nested block second', () => { + const code = ` +//// def a(): +//// if b(): +//// [|/*marker*/|] +//// if c(): +//// d() +//// p + `; + + testIndentation(code, 8); +}); + +test('nested block third', () => { + const code = ` +//// def a(): +//// if b(): +//// if c(): +//// [|/*marker*/|] +//// d() +//// p + `; + + testIndentation(code, 12); +}); + +test('nested block last', () => { + const code = ` +//// def a(): +//// if b(): +//// if c(): +//// d() +//// [|/*marker*/|] +//// p + `; + + testIndentation(code, 12); +}); + +function testIndentation(code: string, indentation: number, preferDedent?: boolean) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + + const parseResults = state.program.getBoundSourceFile(marker.fileName)!.getParseResults()!; + const actual = getIndentation(parseResults, marker.position, preferDedent); + assert.strictEqual(actual, indentation); +} diff --git a/packages/pyright-internal/src/tests/indentationUtils.reindent.test.ts b/packages/pyright-internal/src/tests/indentationUtils.reindent.test.ts new file mode 100644 index 000000000000..417b1b517474 --- /dev/null +++ b/packages/pyright-internal/src/tests/indentationUtils.reindent.test.ts @@ -0,0 +1,403 @@ +/* + * indentationUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for indentationUtils module. + */ + +import assert from 'assert'; + +import { TextRange } from '../common/textRange'; +import { reindentSpan } from '../languageService/indentationUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('re-indentation simple', () => { + const code = ` +//// [|/*marker*/def foo(): pass|] + `; + + const expected = `def foo(): pass`; + testIndentation(code, 0, expected); +}); + +test('re-indentation indent first token', () => { + const code = ` +//// [|/*marker*/ def foo(): pass|] + `; + + const expected = ` def foo(): pass`; + testIndentation(code, 2, expected); +}); + +test('re-indentation explicit multiline expression', () => { + const code = ` +//// [|/*marker*/def foo(): +//// i = \\ +//// 1 \\ +//// + \\ +//// 2|] + `; + + const expected = ` def foo(): + i = \\ + 1 \\ + + \\ + 2`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation explicit multiline expression with multiple backslash', () => { + const code = ` +//// [|/*marker*/def foo(): +//// i = \\ +//// \\ +//// \\ +//// 1|] + `; + + const expected = ` def foo(): + i = \\ + \\ + \\ + 1`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation multiline construct', () => { + const code = ` +//// [|/*marker*/def \\ +//// foo(): +//// i = 1 + 2|] + `; + + const expected = ` def \\ + foo(): + i = 1 + 2`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation implicit multiline expression', () => { + const code = ` +//// [|/*marker*/def foo(): +//// i = ( +//// 1 +//// + +//// 2 +//// )|] + `; + + const expected = ` def foo(): + i = ( + 1 + + + 2 + )`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation implicit multiline expression with multiple blank lines', () => { + const code = ` +//// [|/*marker*/def foo(): +//// i = [ +//// 1, +//// +//// 2 +//// ]|] + `; + + const expected = ` def foo(): + i = [ + 1, + + 2 + ]`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation single line string', () => { + const code = ` +//// [|/*marker*/def foo(): +//// str = "string data"|] + `; + + const expected = ` def foo(): + str = "string data"`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation multiline line string', () => { + const code = ` +//// [|/*marker*/def foo(): +//// str = """first line +//// second line +//// last line +//// """|] + `; + + const expected = ` def foo(): + str = """first line + second line + last line +"""`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation with comments', () => { + const code = ` +//// [|/*marker*/def foo(): # comment at the end +//// # commend above the line +//// a = ( # commend at multiline expression +//// 1 + 2 # comment after expression +//// ) +//// # commend before end of file|] + `; + + const expected = ` def foo(): # comment at the end + # commend above the line + a = ( # commend at multiline expression + 1 + 2 # comment after expression + ) + # commend before end of file`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation with comments with backslash', () => { + const code = ` +//// [|/*marker*/def foo(): # comment at the end +//// # commend above the line +//// a = \\ +//// 1 + 2 # comment after expression +//// +//// # commend before end of file|] + `; + + const expected = ` def foo(): # comment at the end + # commend above the line + a = \\ + 1 + 2 # comment after expression + + # commend before end of file`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation doc comment', () => { + const code = ` +//// [|/*marker*/def foo(): +//// """ doc comment """ +//// a = 1|] + `; + + const expected = ` def foo(): + """ doc comment """ + a = 1`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation multiline doc comment', () => { + const code = ` +//// [|/*marker*/def foo(): +//// """ doc comment +//// line 1 +//// line 2 +//// """ +//// a = 1|] + `; + + const expected = ` def foo(): + """ doc comment + line 1 + line 2 + """ + a = 1`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation top level multiline doc comment', () => { + const code = ` +//// [|/*marker*/ """ doc comment +//// line 1 +//// line 2 +//// """ +//// a = 1|] + `; + + const expected = ` """ doc comment + line 1 + line 2 + """ +a = 1`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation invalid code', () => { + const code = ` +//// [|/*marker*/ASDF +//// ASDFASDFASD +//// +//// asdf asdf asd fasdf sdf asdf asdf asdf +//// 23234235 +//// $%^#&*()_++ +//// asdfas asdf|] + `; + + const expected = ` ASDF + ASDFASDFASD + + asdf asdf asd fasdf sdf asdf asdf asdf + 23234235 + $%^#&*()_++ + asdfas asdf`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation without first token', () => { + const code = ` +//// """ doc string """ +//// i = [|/*marker*/( +//// 1 + 2 +//// )|] + `; + + const expected = `( + 1 + 2 +)`; + + testIndentation(code, 0, expected, /*indentFirstToken*/ false); +}); + +test('re-indentation single line variable doc string', () => { + const code = ` +//// [|/*marker*/i = 1 +//// """ doc string """|] + `; + + const expected = ` i = 1 + """ doc string """`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation multiple line variable doc string', () => { + const code = ` +//// [|/*marker*/i = 1 +//// """ doc string +//// line 1 +//// """|] + `; + + const expected = ` i = 1 + """ doc string + line 1 + """`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation single token', () => { + const code = ` +//// [|/*marker*/a|] + `; + + const expected = ` a`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation between statements', () => { + const code = ` +//// def foo(): +//// pass +//// [|/*marker*/i = 1|] +//// """ doc string +//// line 1 +//// """ + `; + + const expected = ` i = 1`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation inside of body', () => { + const code = ` +//// def foo(): +//// [|/*marker*/ """ doc string +//// line 1 +//// """ +//// i = 10|] + `; + + const expected = ` """ doc string + line 1 + """ + i = 10`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation tab', () => { + const code = ` +//// def foo(): +//// [|/*marker*/\t""" doc string +//// \t\tline 1 +//// \t""" +//// \ti = 10|] + `; + + const expected = ` """ doc string +\t line 1 + """ + i = 10`; + + testIndentation(code, 2, expected); +}); + +test('re-indentation tab on multiline text', () => { + const code = ` +//// def foo(): +//// [|/*marker*/\ta = """ line 1 +//// \t\tline 2 +//// \t""" +//// \ti = 10 +//// \tif True: +//// \t\tpass|] + `; + + const expected = ` a = """ line 1 +\t\tline 2 +\t""" + i = 10 + if True: +\t pass`; + + testIndentation(code, 2, expected); +}); + +function testIndentation(code: string, indentation: number, expected: string, indentFirstToken = true) { + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const parseResults = state.program.getBoundSourceFile(range.fileName)!.getParseResults()!; + const actual = reindentSpan( + parseResults, + TextRange.fromBounds(range.pos, range.end), + indentation, + indentFirstToken + ); + + assert.strictEqual(actual, expected); +} diff --git a/packages/pyright-internal/src/tests/indentationUtils.test.ts b/packages/pyright-internal/src/tests/indentationUtils.test.ts new file mode 100644 index 000000000000..c9121be1f4bb --- /dev/null +++ b/packages/pyright-internal/src/tests/indentationUtils.test.ts @@ -0,0 +1,455 @@ +/* + * indentationUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for indentationUtils module. + */ + +import assert from 'assert'; + +import { getIndentation } from '../languageService/indentationUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('top level indentation', () => { + const code = ` +//// [|/*marker*/|]def foo(): pass + `; + + testIndentation(code, 0); +}); + +test('multiple top level indentation', () => { + const code = ` +//// def foo(): pass +//// def foo(): pass +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('sibling indentation', () => { + const code = ` +//// def foo(): +//// i = 1 +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('first child indentation', () => { + const code = ` +//// def foo(): +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('nested first child indentation', () => { + const code = ` +//// class A: +//// def foo(self): +//// [|/*marker*/|] + `; + + testIndentation(code, 8); +}); + +test('nested sibling indentation', () => { + const code = ` +//// class A: +//// def foo(self): +//// i = 1 +//// [|/*marker*/|] + `; + + testIndentation(code, 8); +}); + +test('sibling indentation next line', () => { + const code = ` +//// def foo(): +//// i = 1 +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('sibling indentation next line after indentation point', () => { + const code = ` +//// def foo(): +//// i = 1 +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('sibling indentation next line at 0 char position', () => { + const code = ` +//// def foo(): +//// i = 1 +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('sibling indentation after blank line', () => { + const code = ` +//// def foo(): +//// i = 1 +//// +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('parent indentation after pass', () => { + const code = ` +//// def foo(): +//// pass +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('parent indentation after return', () => { + const code = ` +//// def foo(): +//// return +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('parent indentation after raise', () => { + const code = ` +//// def foo(): +//// raise +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('parent indentation after continue', () => { + const code = ` +//// def foo(): +//// while True: +//// continue +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('parent indentation after break', () => { + const code = ` +//// def foo(): +//// while True: +//// break +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('between statements', () => { + const code = ` +//// def foo(): +//// while True: +//// i = 10 +//// [|/*marker*/|] +//// i = 10 + `; + + testIndentation(code, 8); +}); + +test('between statements with prefer dedent', () => { + const code = ` +//// def foo(): +//// while True: +//// i = 10 +//// [|/*marker*/|] +//// i = 10 + `; + + testIndentation(code, 4, /*preferDedent*/ true); +}); + +test('single line multiple statements', () => { + const code = ` +//// def foo(): +//// import os; import sys +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('single line pass', () => { + const code = ` +//// def foo(): pass +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('single line return', () => { + const code = ` +//// def foo(): return +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('single line raise', () => { + const code = ` +//// def foo(): raise +//// [|/*marker*/|] + `; + + testIndentation(code, 0); +}); + +test('single line continue', () => { + const code = ` +//// def foo(): +//// while True: continue +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('single line break', () => { + const code = ` +//// def foo(): +//// while True: break +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('first member', () => { + const code = ` +//// def foo(): +//// [|/*marker*/|] +//// i = 1 + `; + + testIndentation(code, 8); +}); + +test('inner first member', () => { + const code = ` +//// def foo(): +//// def bar(): +//// [|/*marker*/|] +//// i = 1 + `; + + testIndentation(code, 6); +}); + +test('single line comment', () => { + const code = ` +//// def foo(): +//// # single line comment +//// [|/*marker*/|] + + `; + + testIndentation(code, 4); +}); + +test('multiline string literals top', () => { + const code = ` +//// def foo(): +//// """ +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('multiline string literals - multiple statements', () => { + const code = ` +//// def foo(): +//// import os; a = """ +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('multiline string literals - blank lines', () => { + const code = ` +//// def foo(): +//// """ +//// +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('multiline string literals - first non blank line', () => { + const code = ` +//// def foo(): +//// i = \\ +//// 1; a = """ +//// [|/*marker*/|] +//// """ + `; + + testIndentation(code, 7); +}); + +test('multiline string literals - align to content', () => { + const code = ` +//// def foo(): +//// """ +//// Hello +//// [|/*marker*/|] +//// """ + `; + + testIndentation(code, 8); +}); + +test('multiline string literals - align to content with multiple blank lines', () => { + const code = ` +//// def foo(): +//// """ +//// Title +//// 1. Second +//// +//// +//// +//// +//// [|/*marker*/|] +//// """ + `; + + testIndentation(code, 12); +}); + +test('explicit multiline construct', () => { + const code = ` +//// def \\ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 4); +}); + +test('multiple explicit multiline construct', () => { + const code = ` +//// def foo \\ +//// \\ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 8); +}); + +test('explicit multiline expression', () => { + const code = ` +//// def foo(): +//// a = 1 + \\ +//// [|/*marker*/|] +//// + `; + + testIndentation(code, 8); +}); + +test('explicit multiline expression between lines', () => { + const code = ` +//// def foo(): +//// a = 1 + \\ +//// [|/*marker*/|] +//// b = 1 + `; + + testIndentation(code, 8); +}); + +test('implicit multiline constructs', () => { + const code = ` +//// def foo( +//// [|/*marker*/|] + `; + + testIndentation(code, 4); +}); + +test('multiple implicit multiline constructs', () => { + const code = ` +//// def foo( +//// a, +//// [|/*marker*/|] + `; + + testIndentation(code, 9); +}); + +test('multiple implicit multiline constructs with multiple statements', () => { + const code = ` +//// b = 1 +//// +//// def foo( +//// a, +//// [|/*marker*/|] + `; + + testIndentation(code, 9); +}); + +test('multiline list', () => { + const code = ` +//// a = [ +//// 1, +//// [|/*marker*/|] +//// ] + `; + + testIndentation(code, 7); +}); + +test('unfinished block', () => { + const code = ` +//// def foo(a: Union[int, str]): +//// while True: +//// [|/*marker*/|] +//// +//// def bar() -> int: +//// return 1 + `; + + testIndentation(code, 8); +}); + +function testIndentation(code: string, indentation: number, preferDedent?: boolean) { + const state = parseAndGetTestState(code).state; + const marker = state.getMarkerByName('marker'); + + const parseResults = state.program.getBoundSourceFile(marker.fileName)!.getParseResults()!; + const actual = getIndentation(parseResults, marker.position, preferDedent); + assert.strictEqual(actual, indentation); +} diff --git a/packages/pyright-internal/src/tests/ipythonMode.test.ts b/packages/pyright-internal/src/tests/ipythonMode.test.ts new file mode 100644 index 000000000000..1c47f8a8b65b --- /dev/null +++ b/packages/pyright-internal/src/tests/ipythonMode.test.ts @@ -0,0 +1,365 @@ +/* + * ipythonMode.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for tokenizer ipython mode + */ + +import assert from 'assert'; +import { CompletionItemKind, MarkupKind } from 'vscode-languageserver-types'; + +import { TextRange } from '../common/textRange'; +import { TextRangeCollection } from '../common/textRangeCollection'; +import { Localizer } from '../localization/localize'; +import { Comment, CommentType, Token } from '../parser/tokenizerTypes'; +import { parseAndGetTestState } from './harness/fourslash/testState'; + +test('regular mode', () => { + const code = ` +//// [|/*marker*/%cd test|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython magic', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%cd test|] + `; + + testIPython(code); +}); + +test('ipython shell escape', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/!shellCommand|] + `; + + testIPython(code); +}); + +test('ipython regular operator - mod', () => { + const code = ` +// @ipythonMode: true +//// a = 1 [|/*marker*/% 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython regular operator - bang', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// a [|/*marker*/!= 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython regular operator multiline', () => { + const code = ` +// @ipythonMode: true +//// a = 1 \\ +//// [|/*marker*/% 1|] + `; + + testIPython(code, /*expectMagic*/ false); +}); + +test('ipython at the top', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%cd test|] +//// b = 1 + `; + + testIPython(code); +}); + +test('ipython between statements', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test|] +//// b = 1 + `; + + testIPython(code); +}); + +test('ipython at the end', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test|] + `; + + testIPython(code); +}); + +test('ipython multiline magics', () => { + const code = ` +// @ipythonMode: true +//// a = 1 +//// [|/*marker*/%cd test \ +//// other arguments|] + `; + + testIPython(code); +}); + +test('ipython cell mode magics', () => { + const code = ` +// @ipythonMode: true +//// [|/*marker*/%%timeit|] + `; + + testIPython(code); +}); + +test('ipython with indentation', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython without indentation', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 1', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// # comments +//// [|/*marker*/%cd test|] +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 2', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// # comments +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 3', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython mixed with regular comments 4', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// # comments +//// pass + `; + + testIPython(code); +}); + +test('ipython multiple magics 1', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// [|/*marker*/%cd test|] +//// %cd test2 +//// pass + `; + + testIPython(code); +}); + +test('ipython multiple magics 2', () => { + const code = ` +// @ipythonMode: true +//// def foo(): +//// %cd test +//// [|/*marker*/%cd test2|] +//// pass + `; + + testIPython(code); +}); + +test('top level await raises errors in regular mode', () => { + const code = ` +//// async def foo(): +//// pass +//// +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileName)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === Localizer.Diagnostic.awaitNotInAsync())); +}); + +test('top level await raises no errors in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def foo(): +//// pass +//// +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileName)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(!diagnostics?.some((d) => d.message === Localizer.Diagnostic.awaitNotInAsync())); +}); + +test('await still raises errors when used in wrong context in ipython mode', () => { + const code = ` +// @ipythonMode: true +//// async def foo(): +//// pass +//// +//// def bar(): +//// [|/*marker*/await foo();|] + `; + + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const source = state.program.getBoundSourceFile(range.fileName)!; + const diagnostics = source.getDiagnostics(state.configOptions); + + assert(diagnostics?.some((d) => d.message === Localizer.Diagnostic.awaitNotInAsync())); +}); + +test('try implicitly load ipython display module but fail', async () => { + const code = ` +// @ipythonMode: true +//// [|display/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('excluded', MarkupKind.Markdown, { + marker: { + completions: [ + { + label: 'display', + kind: CompletionItemKind.Function, + }, + ], + }, + }); +}); + +test('implicitly load ipython display module', async () => { + const code = ` +// @filename: pyrightconfig.json +//// { +//// "useLibraryCodeForTypes": true +//// } + +// @filename: test.py +// @ipythonMode: true +//// [|display/*marker*/|] + +// @filename: IPython/__init__.py +// @library: true +//// + +// @filename: IPython/display.py +// @library: true +//// def display(): pass + `; + + const state = parseAndGetTestState(code).state; + + await state.verifyCompletion('included', MarkupKind.Markdown, { + marker: { + completions: [ + { + label: 'display', + kind: CompletionItemKind.Function, + }, + ], + }, + }); +}); + +function testIPython(code: string, expectMagic = true) { + const state = parseAndGetTestState(code).state; + const range = state.getRangeByMarkerName('marker')!; + + const results = state.program.getBoundSourceFile(range.fileName)!.getParseResults()!; + + const comment = findCommentByOffset(results.tokenizerOutput.tokens, range.pos + 1); + if (!expectMagic) { + assert(!comment); + return; + } + + assert(comment); + const text = results.text.substring(range.pos, range.end); + + const type = text[0] === '%' ? CommentType.IPythonMagic : CommentType.IPythonShellEscape; + assert.strictEqual(type, comment.type); + assert.strictEqual(text.substring(1), comment.value); +} + +function findCommentByOffset(tokens: TextRangeCollection, offset: number) { + let startIndex = tokens.getItemAtPosition(offset); + startIndex = startIndex >= 0 ? startIndex : 0; + + let comment: Comment | undefined; + for (let i = startIndex; i < tokens.count; i++) { + const token = tokens.getItemAt(i); + comment = token.comments?.find((c) => TextRange.contains(c, offset)); + if (comment) { + break; + } + + if (offset < token.start) { + return undefined; + } + } + + return comment; +} diff --git a/packages/pyright-internal/src/tests/parseTreeUtils.test.ts b/packages/pyright-internal/src/tests/parseTreeUtils.test.ts new file mode 100644 index 000000000000..58c8aed57b8c --- /dev/null +++ b/packages/pyright-internal/src/tests/parseTreeUtils.test.ts @@ -0,0 +1,281 @@ +/* + * parseTreeUtils.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Unit tests for parseTreeUtils module. + */ + +import assert from 'assert'; + +import { + getDottedName, + getDottedNameWithGivenNodeAsLastName, + getFirstAncestorOrSelfOfKind, + getFirstNameOfDottedName, + getFullStatementRange, + getStringNodeValueRange, + isFirstNameOfDottedName, + isFromImportAlias, + isFromImportModuleName, + isFromImportName, + isImportAlias, + isImportModuleName, + isLastNameOfDottedName, +} from '../analyzer/parseTreeUtils'; +import { rangesAreEqual, TextRange } from '../common/textRange'; +import { MemberAccessNode, NameNode, ParseNodeType, StringNode } from '../parser/parseNodes'; +import { getNodeAtMarker, getNodeForRange, parseAndGetTestState, TestState } from './harness/fourslash/testState'; + +test('isImportModuleName', () => { + const code = ` +//// import [|/*marker*/os|] + `; + + assert(isImportModuleName(getNodeAtMarker(code))); +}); + +test('isImportAlias', () => { + const code = ` +//// import os as [|/*marker*/os|] + `; + + assert(isImportAlias(getNodeAtMarker(code))); +}); + +test('isFromImportModuleName', () => { + const code = ` +//// from [|/*marker*/os|] import path + `; + + assert(isFromImportModuleName(getNodeAtMarker(code))); +}); + +test('isFromImportName', () => { + const code = ` +//// from . import [|/*marker*/os|] + `; + + assert(isFromImportName(getNodeAtMarker(code))); +}); + +test('isFromImportAlias', () => { + const code = ` +//// from . import os as [|/*marker*/os|] + `; + + assert(isFromImportAlias(getNodeAtMarker(code))); +}); + +test('getFirstAncestorOrSelfOfKind', () => { + const code = ` +//// import a.b.c +//// a.b.c.function( +//// 1 + 2 + 3, +//// [|/*result*/a.b.c.function2( +//// [|/*marker*/"name"|] +//// )|] +//// ) + `; + + const state = parseAndGetTestState(code).state; + const node = getFirstAncestorOrSelfOfKind(getNodeAtMarker(state), ParseNodeType.Call); + assert(node); + + const result = state.getRangeByMarkerName('result')!; + assert(node.nodeType === ParseNodeType.Call); + assert(node.start === result.pos); + assert(TextRange.getEnd(node) === result.end); +}); + +test('getDottedNameWithGivenNodeAsLastName', () => { + const code = ` +//// [|/*result1*/[|/*marker1*/a|]|] +//// [|/*result2*/a.[|/*marker2*/b|]|] +//// [|/*result3*/a.b.[|/*marker3*/c|]|] +//// [|/*result4*/a.[|/*marker4*/b|]|].c +//// [|/*result5*/[|/*marker5*/a|]|].b.c + `; + + const state = parseAndGetTestState(code).state; + + for (let i = 1; i <= 5; i++) { + const markerName = 'marker' + i; + const resultName = 'result' + i; + const node = getDottedNameWithGivenNodeAsLastName(getNodeAtMarker(state, markerName) as NameNode); + const result = state.getRangeByMarkerName(resultName)!; + + assert(node.nodeType === ParseNodeType.Name || node.nodeType === ParseNodeType.MemberAccess); + assert(node.start === result.pos); + assert(TextRange.getEnd(node) === result.end); + } +}); + +test('getDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// [|/*marker2*/a.b|] +//// [|/*marker3*/a.b.c|] +//// [|/*marker4*/a.b|].c +//// [|/*marker5*/a|].b.c + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(getDottedNameString('marker1'), 'a'); + assert.strictEqual(getDottedNameString('marker2'), 'a.b'); + assert.strictEqual(getDottedNameString('marker3'), 'a.b.c'); + assert.strictEqual(getDottedNameString('marker4'), 'a.b'); + assert.strictEqual(getDottedNameString('marker5'), 'a'); + + function getDottedNameString(marker: string) { + const node = getNodeForRange(state, marker); + return getDottedName(node as NameNode | MemberAccessNode) + ?.map((n) => n.value) + .join('.'); + } +}); + +test('getFirstNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// [|/*marker2*/a.b|] +//// [|/*marker3*/a.b.c|] +//// [|/*marker4*/a.b|].c +//// [|/*marker5*/a|].b.c + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(getDottedNameString('marker1'), 'a'); + assert.strictEqual(getDottedNameString('marker2'), 'a'); + assert.strictEqual(getDottedNameString('marker3'), 'a'); + assert.strictEqual(getDottedNameString('marker4'), 'a'); + assert.strictEqual(getDottedNameString('marker5'), 'a'); + + function getDottedNameString(marker: string) { + const node = getNodeForRange(state, marker); + return getFirstNameOfDottedName(node as NameNode | MemberAccessNode)?.value ?? ''; + } +}); + +test('isLastNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// a.[|/*marker2*/b|] +//// a.b.[|/*marker3*/c|] +//// a.[|/*marker4*/b|].c +//// [|/*marker5*/a|].b.c +//// (a).[|/*marker6*/b|] +//// (a.b).[|/*marker7*/c|] +//// a().[|/*marker8*/b|] +//// a[0].[|/*marker9*/b|] +//// a.b([|/*marker10*/c|]).d +//// a.b.([|/*marker11*/c|]) +//// a.[|/*marker12*/b|].c() +//// a.[|/*marker13*/b|]() +//// a.[|/*marker14*/b|][] + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker1') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker2') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker3') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker4') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker5') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker6') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker7') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker8') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker9') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker10') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker11') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker12') as NameNode), false); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker13') as NameNode), true); + assert.strictEqual(isLastNameOfDottedName(getNodeAtMarker(state, 'marker14') as NameNode), true); +}); + +test('isFirstNameOfDottedName', () => { + const code = ` +//// [|/*marker1*/a|] +//// a.[|/*marker2*/b|] +//// a.b.[|/*marker3*/c|] +//// a.[|/*marker4*/b|].c +//// [|/*marker5*/a|].b.c +//// ([|/*marker6*/a|]).b +//// (a.b).[|/*marker7*/c|] +//// [|/*marker8*/a|]().b +//// a[0].[|/*marker9*/b|] +//// a.b([|/*marker10*/c|]).d +//// a.b.([|/*marker11*/c|]) +//// a.[|/*marker12*/b|].c() +//// [|/*marker13*/a|].b() +//// a.[|/*marker14*/b|][] +//// [|/*marker15*/a|][] + `; + + const state = parseAndGetTestState(code).state; + + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker1') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker2') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker3') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker4') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker5') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker6') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker7') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker8') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker9') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker10') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker11') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker12') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker13') as NameNode), true); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker14') as NameNode), false); + assert.strictEqual(isFirstNameOfDottedName(getNodeAtMarker(state, 'marker15') as NameNode), true); +}); + +test('getStringNodeValueRange', () => { + const code = ` +//// a = "[|/*marker1*/test|]" +//// b = '[|/*marker2*/test2|]' +//// c = '''[|/*marker3*/test3|]''' + `; + + const state = parseAndGetTestState(code).state; + + for (let i = 1; i <= 3; i++) { + const markerName = 'marker' + i; + const range = getStringNodeValueRange(getNodeAtMarker(state, markerName) as StringNode); + const result = state.getRangeByMarkerName(markerName)!; + + assert(range.start === result.pos); + assert(TextRange.getEnd(range) === result.end); + } +}); + +test('getFullStatementRange', () => { + const code = ` +//// [|/*marker1*/import a +//// |][|/*marker2*/a = 1; |][|/*marker3*/b = 2 +//// |][|/*marker4*/if True: +//// pass|] + `; + + const state = parseAndGetTestState(code).state; + + testRange(state, 'marker1', ParseNodeType.Import); + testRange(state, 'marker2', ParseNodeType.Assignment); + testRange(state, 'marker3', ParseNodeType.Assignment); + testRange(state, 'marker4', ParseNodeType.If); + + function testRange(state: TestState, markerName: string, type: ParseNodeType) { + const range = state.getRangeByMarkerName(markerName)!; + const sourceFile = state.program.getBoundSourceFile(range.marker!.fileName)!; + + const statementNode = getFirstAncestorOrSelfOfKind(getNodeAtMarker(state, markerName), type)!; + const statementRange = getFullStatementRange(statementNode, sourceFile.getParseResults()!.tokenizerOutput); + + const expectedRange = state.convertPositionRange(range); + + assert(rangesAreEqual(expectedRange, statementRange)); + } +}); diff --git a/packages/pyright-internal/src/tests/parser.test.ts b/packages/pyright-internal/src/tests/parser.test.ts index dfdcb2ab6a8f..60b806c4da7c 100644 --- a/packages/pyright-internal/src/tests/parser.test.ts +++ b/packages/pyright-internal/src/tests/parser.test.ts @@ -11,7 +11,9 @@ import * as assert from 'assert'; import { DiagnosticSink } from '../common/diagnosticSink'; +import { TextRange } from '../common/textRange'; import { ParseNodeType, StatementListNode } from '../parser/parseNodes'; +import { getNodeAtMarker, parseAndGetTestState } from './harness/fourslash/testState'; import * as TestUtils from './testUtils'; test('Empty', () => { @@ -70,3 +72,28 @@ test('ExpressionWrappedInParens', () => { assert.equal(statementList.statements[0].nodeType, ParseNodeType.Name); assert.equal(statementList.statements[0].length, 3); }); + +test('MaxParseDepth1', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('maxParseDepth1.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 1); +}); + +test('MaxParseDepth2', () => { + const diagSink = new DiagnosticSink(); + TestUtils.parseSampleFile('maxParseDepth2.py', diagSink); + assert.strictEqual(diagSink.getErrors().length, 4); +}); + +test('ModuleName range', () => { + const code = ` +//// from [|/*marker*/...|] import A + `; + + const state = parseAndGetTestState(code).state; + const expectedRange = state.getRangeByMarkerName('marker'); + const node = getNodeAtMarker(state); + + assert.strictEqual(node.start, expectedRange?.pos); + assert.strictEqual(TextRange.getEnd(node), expectedRange?.end); +}); diff --git a/packages/pyright-internal/src/tests/pathUtils.test.ts b/packages/pyright-internal/src/tests/pathUtils.test.ts index 522b094d9a40..6419eff6d7c4 100644 --- a/packages/pyright-internal/src/tests/pathUtils.test.ts +++ b/packages/pyright-internal/src/tests/pathUtils.test.ts @@ -153,6 +153,16 @@ test('getWildcardRoot2', () => { assert.equal(p, normalizeSlashes('/users/me')); }); +test('getWildcardRoot with root', () => { + const p = getWildcardRoot('/', '.'); + assert.equal(p, normalizeSlashes('/')); +}); + +test('getWildcardRoot with drive letter', () => { + const p = getWildcardRoot('c:/', '.'); + assert.equal(p, normalizeSlashes('c:')); +}); + test('reducePathComponentsEmpty', () => { assert.equal(reducePathComponents([]).length, 0); }); diff --git a/packages/pyright-internal/src/tests/renameModule.folder.test.ts b/packages/pyright-internal/src/tests/renameModule.folder.test.ts new file mode 100644 index 000000000000..05d65c163f1a --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModule.folder.test.ts @@ -0,0 +1,261 @@ +/* + * renameModule.folder.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-languageserver'; + +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testRenameModule } from './renameModuleTestUtils'; + +test('folder move up', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] + +// @filename: test1.py +//// from . import ([|nested|]) + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + const edits = state.program.renameModule(path, combinePaths(path, 'sub'), CancellationToken.None); + assert(!edits); +}); + +test('folder move down', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] + +// @filename: test1.py +//// from . import ([|nested|]) + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + const edits = state.program.renameModule(path, combinePaths(path, '..'), CancellationToken.None); + assert(!edits); +}); + +test('folder rename - from import', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// from . import ([|nested|]) +//// [|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - from ', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// from [|nested|] import foo + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - import ', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import [|nested|] +//// [|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - import dotted name', () => { + const code = ` +// @filename: nested1/__init__.py +//// # empty + +// @filename: nested1/nested2/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import nested1.[|nested2|] +//// nested1.[|nested2|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested2', 'sub'); +}); + +test('folder rename - multiple files', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: nested/module1.py +//// def foo1(): +//// pass + +// @filename: nested/module2.py +//// def foo2(): +//// pass + +// @filename: test1.py +//// from [|nested|] import foo, module1 +//// from [|nested|].module2 import foo2 +//// module1.foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - from alias', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// from . import [|nested|] as [|nested|] +//// [|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - import alias', () => { + const code = ` +// @filename: nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import [|nested|] as [|nested|] +//// [|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - import dotted name alias', () => { + const code = ` +// @filename: nested/__init__.py +//// # empty + +// @filename: nested/nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import nested.[|nested|] as [|nested|] +//// [|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - reexport', () => { + const code = ` +// @filename: nested/__init__.py +//// from . import [|nested|] +//// [|nested|].foo() + +// @filename: nested/nested/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: nested/nested/module.py +//// from ..[|nested|] import foo + +// @filename: nested/nested/reexport.py +//// from .. import [|nested|] as [|nested|] + +// @filename: test1.py +//// import nested.[|nested|] as [|nested|] +//// [|nested|].foo() + +// @filename: test2.py +//// import nested.[|nested|].reexport +//// nested.[|nested|].reexport.[|nested|].foo() + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); + +test('folder rename - middle folder', () => { + const code = ` +// @filename: nested/__init__.py +//// # empty + +// @filename: nested/nested/__init__.py +//// [|/*marker*/|] + +// @filename: nested/nested/nested/__init__.py +//// # empty + +// @filename: test1.py +//// import nested.[|nested|].nested as nested +//// nested.foo() + +// @filename: test2.py +//// from nested.[|nested|] import nested + `; + + const state = parseAndGetTestState(code).state; + const path = getDirectoryPath(state.getMarkerByName('marker').fileName); + + testRenameModule(state, path, `${combinePaths(path, '..', 'sub')}`, 'nested', 'sub'); +}); diff --git a/packages/pyright-internal/src/tests/renameModule.fromImports.test.ts b/packages/pyright-internal/src/tests/renameModule.fromImports.test.ts new file mode 100644 index 000000000000..378a0dee0393 --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModule.fromImports.test.ts @@ -0,0 +1,952 @@ +/* + * renameModule.fromImports.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testRenameModule } from './renameModuleTestUtils'; + +test('rename just file name', () => { + const code = ` +// @filename: common/__init__.py +//// from io2 import tools +//// from io2.tools import [|{|"r":"renamedModule"|}pathUtils|] as [|{|"r":"renamedModule"|}pathUtils|] + +// @filename: io2/__init__.py +//// # empty + +// @filename: io2/tools/__init__.py +//// # empty + +// @filename: io2/tools/empty.py +//// # empty + +// @filename: io2/tools/pathUtils.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import * +//// +//// [|{|"r":"renamedModule"|}pathUtils|].getFilename("c") + +// @filename: test3.py +//// from .io2.tools import [|{|"r":"renamedModule"|}pathUtils|] as p +//// +//// p.getFilename("c") + +// @filename: test4.py +//// from common import tools, [|{|"r":"renamedModule"|}pathUtils|] +//// +//// [|{|"r":"renamedModule"|}pathUtils|].getFilename("c") + +// @filename: test5.py +//// from io2.tools import [|{|"r":""|}pathUtils as pathUtils, |]empty[|{|"r":", renamedModule as renamedModule"|}|] +//// +//// [|{|"r":"renamedModule"|}pathUtils|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('from module - move file to nested folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|module|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`, + 'module', + 'common.moduleRenamed' + ); +}); + +test('from module - move file to parent folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test.py +//// from [|common.module|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`, + 'common.module', + 'moduleRenamed' + ); +}); + +test('from module - move file to sibling folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: common1/__init__.py +//// # empty + +// @filename: test.py +//// from [|common.module|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'common1', 'moduleRenamed.py')}`, + 'common.module', + 'common1.moduleRenamed' + ); +}); + +test('import name - move file to nested folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"common.sub"|}common|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import name - move file to parent folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"common"|}common.sub|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`); +}); + +test('import name - move file to sibling folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common1/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"common1"|}common|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'common1', 'moduleRenamed.py')}` + ); +}); + +test('import alias - different name', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"common.sub"|}common|] import [|{|"r":"moduleRenamed"|}module|] as m +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import alias - same name', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"common"|}common.sub|] import [|{|"r":"moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`); +}); + +test('import multiple names', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module, |]sub[|{|"r":"!n!from common.sub import moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with multiple deletions - edge case', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import sub[|{|"r":""|}, module, module|][|{|"r":"!n!from common.sub import moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test2.py +//// from common import [|{|"r":""|}module, |]sub[|{|"r":""|}, module|][|{|"r":"!n!from common.sub import moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test3.py +//// [|{|"r":""|}from common import module, module[|{|"r":"!n!from common.sub import moduleRenamed"|}|] +//// |][|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with alias 1', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module as m, |]sub[|{|"r":"!n!from common.sub import moduleRenamed as m"|}|] +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with alias 2', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module as module, |]sub[|{|"r":"!n!from common.sub import moduleRenamed as moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with existing from import statement', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module, |]sub +//// from common.sub import existing[|{|"r":", moduleRenamed"|}|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with existing from import statement with multiple deletion - edge case', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module, module, |]sub +//// from common.sub import existing[|{|"r":", moduleRenamed"|}|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test2.py +//// [|{|"r":""|}from common import module, module +//// |]from common.sub import existing[|{|"r":", moduleRenamed"|}|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with existing from import statement with alias 1', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module as m, |]sub +//// from common.sub import existing[|{|"r":", moduleRenamed as m"|}|] +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('import multiple names with existing from import statement with alias 2', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module as module, |]sub +//// from common.sub import existing[|{|"r":", moduleRenamed as moduleRenamed"|}|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module multiple import names', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def getFilename(path): +//// [|/*marker*/pass|] +//// +//// def foo(): +//// pass + +// @filename: test.py +//// from [|common.module|] import getFilename, foo + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`, + 'common.module', + 'moduleRenamed' + ); +}); + +test('from module relative path - same folder', () => { + const code = ` +// @filename: module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test.py +//// from . import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moduleRenamed.py')}`); +}); + +test('from module relative path - nested folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test.py +//// from [|{|"r":".common"|}.|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`); +}); + +test('from module relative path - parent folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test.py +//// from [|{|"r":"."|}.common|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`); +}); + +test('from module relative path - sibling folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common1/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test.py +//// from [|{|"r":".common1"|}.common|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'common1', 'moduleRenamed.py')}` + ); +}); + +test('from module relative path - more complex', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// from [|{|"r":"...common.sub"|}...common|] import [|{|"r":"moduleRenamed"|}module|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with multiple import names', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// [|{|"r":"from ...common.sub import moduleRenamed!n!"|}|]from ...common import [|{|"r":""|}module, |]sub +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with multiple import names and alias 1', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// [|{|"r":"from ...common.sub import moduleRenamed as m!n!"|}|]from ...common import [|{|"r":""|}module as m, |]sub +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with multiple import names and alias 2', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// [|{|"r":"from ...common.sub import moduleRenamed as moduleRenamed!n!"|}|]from ...common import [|{|"r":""|}module as module, |]sub +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with merging with existing import', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// from ...common import [|{|"r":""|}module, |]sub +//// from ...common.sub import existing[|{|"r":", moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with merging with existing import with alias 1', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// from ...common import [|{|"r":""|}module as m, |]sub +//// from ...common.sub import existing[|{|"r":", moduleRenamed as m"|}|] +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from module relative path with merging with existing import with alias 2', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// # empty + +// @filename: common/sub/existing.py +//// # empty + +// @filename: base/nested/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: base/nested/test.py +//// from ...common import [|{|"r":""|}module as module, |]sub +//// from ...common.sub import existing[|{|"r":", moduleRenamed as moduleRenamed"|}|] +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('from import move to current folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|{|"r":"."|}common|] import ([|{|"r":"renamedModule"|}module|]) +//// +//// [|{|"r":"renamedModule"|}module|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`); +}); + +test('re-exported symbols', () => { + const code = ` +// @filename: common/__init__.py +//// from [|{|"r":"common"|}common.io.nest|] import [|{|"r":"renamedModule"|}module|] as [|{|"r":"renamedModule"|}module|] + +// @filename: common/io/__init__.py +//// from [|{|"r":".."|}.nest|] import [|{|"r":"renamedModule"|}module|] as [|{|"r":"renamedModule"|}module|] + +// @filename: common/io/nest/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: reexport.py +//// from common import [|{|"r":"renamedModule"|}module|] +//// __all__ = ["[|{|"r":"renamedModule"|}module|]"] + +// @filename: test1.py +//// from common import [|{|"r":"renamedModule"|}module|] +//// [|{|"r":"renamedModule"|}module|].foo() + +// @filename: test2.py +//// from common.io import [|{|"r":"renamedModule"|}module|] +//// [|{|"r":"renamedModule"|}module|].foo() + +// @filename: test3.py +//// from reexport import [|{|"r":"renamedModule"|}module|] +//// [|{|"r":"renamedModule"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '..', 'renamedModule.py')}`); +}); + +test('new import with existing import with wildcard', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/sub/__init__.py +//// class A: ... +//// __all__ = ["A"] + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from common import [|{|"r":""|}module, |]sub +//// from common.sub import *[|{|"r":"!n!from common.sub import moduleRenamed"|}|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`); +}); + +test('simple rename of relative module', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: common/test1.py +//// from [|.module|] import foo + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'moduleRenamed.py')}`, + '.module', + '.moduleRenamed' + ); +}); + +test('relative module move', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: common/test1.py +//// from [|.module|] import foo + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'sub', 'moduleRenamed.py')}`, + '.module', + '.sub.moduleRenamed' + ); +}); + +test('__init__ relative module move', () => { + const code = ` +// @filename: common/__init__.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|.common|] import foo + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'moved', '__init__.py')}`, + '.common', + '.common.moved' + ); +}); + +test('__init__ relative module rename', () => { + const code = ` +// @filename: common/__init__.py +//// def foo(): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from [|.common|] import foo + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'moved', '__init__.py')}`, + '.common', + '.moved' + ); +}); diff --git a/packages/pyright-internal/src/tests/renameModule.imports.test.ts b/packages/pyright-internal/src/tests/renameModule.imports.test.ts new file mode 100644 index 000000000000..ec112227fd85 --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModule.imports.test.ts @@ -0,0 +1,450 @@ +/* + * renameModule.imports.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testRenameModule } from './renameModuleTestUtils'; + +test('rename just file name', () => { + const code = ` +// @filename: empty.py +//// # empty + +// @filename: pathUtils.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// import [|pathUtils|] as p +//// +//// p.getFilename("c") + +// @filename: test2.py +//// import [|pathUtils|] +//// +//// [|pathUtils|].getFilename("c") + +// @filename: test3.py +//// import [|pathUtils|] as [|pathUtils|], empty +//// +//// [|pathUtils|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'pathUtils', + 'renamedModule' + ); +}); + +test('import - move file to nested folder', () => { + const code = ` +// @filename: common/__init__.py +//// def foo(): +//// pass + +// @filename: module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// import [|{|"r":"common.moduleRenamed as moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`); +}); + +test('import - move file to parent folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// import [|common.module|] +//// +//// [|common.module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`, + 'common.module', + 'moduleRenamed' + ); +}); + +test('import - move file to sibling folder', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: common/module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: common1/__init__.py +//// # empty + +// @filename: test.py +//// import [|common.module|] +//// +//// [|common.module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'common1', 'moduleRenamed.py')}`, + 'common.module', + 'common1.moduleRenamed' + ); +}); + +test('import alias move up file', () => { + const code = ` +// @filename: common/__init__.py +//// def foo(): +//// pass + +// @filename: module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// import [|{|"r":"common.moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"common.moduleRenamed"|}module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`, + 'module' + ); +}); + +test('import alias move down file', () => { + const code = ` +// @filename: common/__init__.py +//// def foo(): +//// pass + +// @filename: common/module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// import [|{|"r":"moduleRenamed"|}common.module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"moduleRenamed"|}common.module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`); +}); + +test('import alias rename file', () => { + const code = ` +// @filename: module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// import [|{|"r":"moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"moduleRenamed"|}module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moduleRenamed.py')}`, 'module'); +}); + +test('import alias move sibling file', () => { + const code = ` +// @filename: common1/__init__.py +//// def foo(): +//// pass + +// @filename: common2/__init__.py +//// def foo(): +//// pass + +// @filename: common1/module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// import [|{|"r":"common2.moduleRenamed"|}common1.module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"common2.moduleRenamed"|}common1.module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'common2', 'moduleRenamed.py')}` + ); +}); + +test('re-export import alias through __all__', () => { + const code = ` +// @filename: common1/__init__.py +//// import [|{|"r":"common2.moduleRenamed as moduleRenamed"|}module|] +//// __all__ = ["[|{|"r":"moduleRenamed"|}module|]"] + +// @filename: common2/__init__.py +//// def foo(): +//// pass + +// @filename: module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// from common1 import [|{|"r":"moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"common2.moduleRenamed"|}module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common2', 'moduleRenamed.py')}`); +}); + +test('re-export import alias', () => { + const code = ` +// @filename: common1/__init__.py +//// import [|{|"r":"common2.moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] + +// @filename: common2/__init__.py +//// def foo(): +//// pass + +// @filename: module.py +//// [|/*marker*/|] +//// # empty + +// @filename: test.py +//// from common1 import [|{|"r":"moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] +//// +//// [|{|"r":"moduleRenamed"|}module|].foo() + +// @filename: test1.py +//// import [|{|"r":"common2.moduleRenamed"|}module|] as m +//// +//// m.foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common2', 'moduleRenamed.py')}`); +}); + +test('update module symbol exposed through call 1', () => { + const code = ` +// @filename: lib.py +//// import reexport +//// +//// def foo(): +//// return reexport + +// @filename: reexport.py +//// import [|{|"r":"moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] + +// @filename: module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// from lib import foo +//// +//// foo().[|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moduleRenamed.py')}`); +}); + +test('update module symbol exposed through call 2', () => { + const code = ` +// @filename: lib.py +//// import reexport +//// +//// def foo(): +//// return reexport + +// @filename: reexport.py +//// import [|{|"r":"common.moduleRenamed"|}module|] as [|{|"r":"moduleRenamed"|}module|] + +// @filename: module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// from lib import foo +//// +//// foo().[|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`); +}); + +test('update module symbol exposed through __all__ 1', () => { + const code = ` +// @filename: lib.py +//// import reexport +//// +//// def foo(): +//// return reexport + +// @filename: reexport.py +//// import [|{|"r":"moduleRenamed"|}module|] +//// __all__ = ["[|{|"r":"moduleRenamed"|}module|]"] + +// @filename: module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// from lib import foo +//// +//// foo().[|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moduleRenamed.py')}`); +}); + +test('update module symbol exposed through __all__ 2', () => { + const code = ` +// @filename: lib.py +//// import reexport +//// +//// def foo(): +//// return reexport + +// @filename: reexport.py +//// import [|{|"r":"common.moduleRenamed as moduleRenamed"|}module|] +//// __all__ = ["[|{|"r":"moduleRenamed"|}module|]"] + +// @filename: module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// from lib import foo +//// +//// foo().[|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'moduleRenamed.py')}`); +}); + +test('update module symbol exposed through __all__ 3', () => { + const code = ` +// @filename: lib.py +//// import reexport +//// +//// def foo(): +//// return reexport + +// @filename: reexport.py +//// import [|{|"r":"moduleRenamed"|}common.module|] as [|{|"r":"moduleRenamed"|}module|] +//// __all__ = ["[|{|"r":"moduleRenamed"|}module|]"] + +// @filename: common/module.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test.py +//// from lib import foo +//// +//// foo().[|{|"r":"moduleRenamed"|}module|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moduleRenamed.py')}`); +}); diff --git a/packages/pyright-internal/src/tests/renameModule.misc.test.ts b/packages/pyright-internal/src/tests/renameModule.misc.test.ts new file mode 100644 index 000000000000..8010a23127eb --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModule.misc.test.ts @@ -0,0 +1,765 @@ +/* + * renameModule.misc.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import { CancellationToken } from 'vscode-languageserver'; + +import { assert } from '../common/debug'; +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testRenameModule } from './renameModuleTestUtils'; + +test('from import with paren', () => { + const code = ` +// @filename: module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from . import ([|module|]) +//// +//// [|module|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'module', + 'renamedModule' + ); +}); + +test('from import with paren with alias', () => { + const code = ` +// @filename: module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: test1.py +//// from . import ([|module|] as [|module|]) +//// +//// [|module|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'module', + 'renamedModule' + ); +}); + +test('from import with paren multiple import names', () => { + const code = ` +// @filename: common/__init__.py +//// # empty + +// @filename: module.py +//// def getFilename(path): +//// [|/*marker*/pass|] + +// @filename: module2.py +//// # empty + +// @filename: test1.py +//// [|{|"r":"from .common import renamedModule as renamedModule!n!"|}|]from . import ([|{|"r":""|}module as module, |]module2) +//// +//// [|{|"r":"renamedModule"|}module|].getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`); +}); + +test('rename - circular references', () => { + const code = ` +// @filename: module1.py +//// from . import [|mySelf|] as [|mySelf|] + +// @filename: mySelf.py +//// from module1 import * +//// [|/*marker*/mySelf|].foo() +//// +//// def foo(): +//// pass + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'mySelf', + 'renamedModule' + ); +}); + +test('move - circular references', () => { + const code = ` +// @filename: module1.py +//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}mySelf|] as [|{|"r":"renamedModule"|}mySelf|] + +// @filename: common/__init__.py +//// # empty + +// @filename: mySelf.py +//// [|/*marker*/|] +//// from module1 import * +//// [|{|"r":"renamedModule"|}mySelf|].foo() +//// def foo(): +//// pass + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`); +}); + +test('py and pyi file update', () => { + const code = ` +// @filename: module.py +//// def getFilename(path): +//// pass + +// @filename: module.pyi +//// [|/*marker*/|] +//// def getFilename(path): ... + +// @filename: test1.py +//// from . import [|module|] as [|module|] +//// +//// [|module|].getFilename("c") + +// @filename: test1.pyi +//// from . import [|module|] as [|module|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.pyi')}`, + 'module', + 'renamedModule' + ); +}); + +test('py and pyi file update from py', () => { + // No reference. if both py and pyi exist, then given file must point to pyi not py. + const code = ` +// @filename: module.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: module.pyi +//// def getFilename(path): ... + +// @filename: test1.py +//// from . import module +//// +//// module.getFilename("c") + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('handle __all__ reference', () => { + const code = ` +// @filename: module.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from . import [|module|] +//// +//// __all__ = [ "[|module|]" ] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'module', + 'renamedModule' + ); +}); + +test('handle __all__ re-export', () => { + const code = ` +// @filename: module.py +//// [|/*marker*/|] +//// def foo(path): +//// pass + +// @filename: common/__init__.py +//// # empty + +// @filename: test1.py +//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}module|] +//// +//// __all__ = [ "[|{|"r":"renamedModule"|}module|]" ] + +// @filename: test2.py +//// from test1 import [|{|"r":"renamedModule"|}module|] +//// +//// [|renamedModule|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'common', 'renamedModule.py')}`); +}); + +test('__init__.py rename', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|common|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'common', + 'common.renamedModule' + ); +}); + +test('__init__.py rename import', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] + +// @filename: test1.py +//// import [|common|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`, + 'common', + 'common.renamedModule as renamedModule' + ); +}); + +test('__init__.py move to nested folder', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|common|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'nested', 'renamedModule.py')}`, + 'common', + 'common.nested.renamedModule' + ); +}); + +test('__init__.py move to nested folder with same name', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|common|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), 'nested', '__init__.py')}`, + 'common', + 'common.nested' + ); +}); + +test('__init__.py move to parent folder', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|common|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule( + state, + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`, + 'common', + 'renamedModule' + ); +}); + +test('__init__.py move to parent folder with same name 1', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|common|] import getFilename + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + const edits = state.program.renameModule( + fileName, + `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`, + CancellationToken.None + ); + assert(!edits); +}); + +test('__init__.py with alias', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// from [|{|"r":".common"|}.|] import [|{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py import with alias', () => { + const code = ` +// @filename: common/__init__.py +//// [|/*marker*/|] +//// def getFilename(path): +//// pass + +// @filename: test1.py +//// import [|{|"r":"common.renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py rename complex', () => { + const code = ` +// @filename: common/__init__.py +//// import [|{|"r":"common.nested.renamedModule"|}common.nested.lib|] as [|{|"r":"renamedModule"|}lib|] +//// __all__ = ["[|{|"r":"renamedModule"|}lib|]"] + +// @filename: reexport.py +//// from common import [|{|"r":"renamedModule"|}lib|] as [|{|"r":"renamedModule"|}lib|] + +// @filename: common/nested/__init__.py +//// # empty + +// @filename: common/nested/lib/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import common +//// common.[|{|"r":"renamedModule"|}lib|].foo() + +// @filename: test2.py +//// from reexport import [|{|"r":"renamedModule"|}lib|] +//// [|{|"r":"renamedModule"|}lib|].foo() + +// @filename: test3.py +//// from common import * +//// [|{|"r":"renamedModule"|}lib|].foo() + +// @filename: test4.py +//// from reexport import * +//// [|{|"r":"renamedModule"|}lib|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`); +}); + +test('__init__.py moved to parent folder with same name 2', () => { + const code = ` +// @filename: common/__init__.py +//// import [|{|"r":"common.nested"|}common.nested.lib|] as [|{|"r":"nested"|}lib|] +//// __all__ = ["[|{|"r":"nested"|}lib|]"] + +// @filename: reexport.py +//// from common import [|{|"r":"nested"|}lib|] as [|{|"r":"nested"|}lib|] + +// @filename: common/nested/lib/__init__.py +//// [|/*marker*/|] +//// def foo(): +//// pass + +// @filename: test1.py +//// import common +//// common.[|{|"r":"nested"|}lib|].foo() + +// @filename: test2.py +//// from reexport import [|{|"r":"nested"|}lib|] +//// [|{|"r":"nested"|}lib|].foo() + +// @filename: test3.py +//// from common import * +//// [|{|"r":"nested"|}lib|].foo() + +// @filename: test4.py +//// from reexport import * +//// [|{|"r":"nested"|}lib|].foo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`); +}); + +test('__init__.py changes middle of dotted name', () => { + const code = ` +// @filename: common/__init__.py +//// # empty [|/*marker*/|] +//// from common.nested import lib as lib + +// @filename: common/nested/lib.py +//// def libFoo(): +//// pass + +// @filename: common/nested/__init__.py +//// def nestedFoo(): +//// pass + +// @filename: test1.py +//// import common.nested.lib +//// common.nested.lib.libFoo() + +// @filename: test2.py +//// from common import nested +//// nested.nestedFoo() + +// @filename: test3.py +//// from [|{|"r":"common.renamedModule"|}common|] import * +//// lib.libFoo() + +// @filename: test4.py +//// from [|{|"r":"common.renamedModule"|}common|] import lib +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py - split from import statement', () => { + const code = ` +// @filename: common/__init__.py +//// # empty [|/*marker*/|] +//// from common.nested import lib as lib + +// @filename: common/nested/lib.py +//// def libFoo(): +//// pass + +// @filename: common/nested/__init__.py +//// def nestedFoo(): +//// pass + +// @filename: test1.py +//// from common import nested[|{|"r":""|}, lib|][|{|"r":"!n!from common.renamedModule import lib"|}|] +//// nested.nestedFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py - split from import statement with multiple names', () => { + const code = ` +// @filename: common/__init__.py +//// # empty [|/*marker*/|] +//// from common.nested import lib as lib +//// def commonFoo(): +//// pass + +// @filename: common/nested/lib.py +//// def libFoo(): +//// pass + +// @filename: common/nested/__init__.py +//// def nestedFoo(): +//// pass + +// @filename: test1.py +//// from common import nested[|{|"r":""|}, lib, commonFoo|][|{|"r":"!n!from common.renamedModule import commonFoo, lib"|}|] +//// nested.nestedFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py - merge from import statement with multiple names', () => { + const code = ` +// @filename: common/nested/__init__.py +//// # empty [|/*marker*/|] +//// from common.nested2 import lib as lib +//// def commonFoo(): +//// pass + +// @filename: common/nested/sub.py +//// # empty + +// @filename: common/empty.py +//// # empty + +// @filename: common/nested2/lib.py +//// def libFoo(): +//// pass + +// @filename: test1.py +//// from common.nested import [|{|"r":""|}commonFoo, lib, |]sub +//// from common import [|{|"r":"commonFoo, "|}|]empty[|{|"r":", lib"|}|] +//// +//// nested.commonFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`); +}); + +test('__init__.py - split from import statement with multiple names with circular reference', () => { + const code = ` +// @filename: common/__init__.py +//// # empty +//// from common.nested import lib as lib +//// from common.nested import [|/*marker*/{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|] +//// +//// def commonFoo(): +//// pass + +// @filename: common/nested/lib.py +//// def libFoo(): +//// pass + +// @filename: common/nested/__init__.py +//// from [|{|"r":".."|}...|] import [|{|"r":"renamedModule"|}common|] as [|{|"r":"renamedModule"|}common|] + +// @filename: test1.py +//// from common import nested[|{|"r":""|}, lib, common|][|{|"r":"!n!from common.renamedModule import lib, renamedModule"|}|] +//// nested.[|{|"r":"renamedModule"|}common|].commonFoo() +//// [|{|"r":"renamedModule"|}common|].commonFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'renamedModule.py')}`); +}); + +test('__init__.py - merge from import statement with multiple names with circular reference', () => { + const code = ` +// @filename: common/nested/__init__.py +//// # empty +//// from common.nested2 import lib as lib +//// from common.nested2 import [|/*marker*/{|"r":"common"|}nested|] as [|{|"r":"common"|}nested|] +//// +//// def commonFoo(): +//// pass + +// @filename: common/nested/sub.py +//// # empty + +// @filename: common/empty.py +//// # empty + +// @filename: common/nested2/__init__.py +//// from [|{|"r":"..."|}..|] import [|{|"r":"common"|}nested|] as [|{|"r":"common"|}nested|] + +// @filename: common/nested2/lib.py +//// def libFoo(): +//// pass + +// @filename: test1.py +//// from common.nested import [|{|"r":""|}nested, lib, |]sub +//// from common import [|{|"r":"common, "|}|]empty[|{|"r":", lib"|}|] +//// +//// [|{|"r":"common"|}nested|].commonFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', '__init__.py')}`); +}); + +test('__init__.py - merge from import statement with multiple names with circular reference with only name change', () => { + const code = ` +// @filename: common/nested/__init__.py +//// # empty +//// from common.nested2 import lib as lib +//// from common.nested2 import [|/*marker*/{|"r":"renamedModule"|}nested|] as [|{|"r":"renamedModule"|}nested|] +//// +//// def commonFoo(): +//// pass + +// @filename: common/nested/sub.py +//// # empty + +// @filename: common/empty.py +//// # empty + +// @filename: common/nested2/__init__.py +//// from .. import [|{|"r":"renamedModule"|}nested|] as [|{|"r":"renamedModule"|}nested|] + +// @filename: common/nested2/lib.py +//// def libFoo(): +//// pass + +// @filename: test1.py +//// from common.nested import [|{|"r":""|}nested, lib, |]sub[|{|"r":"!n!from common.renamedModule import lib, renamedModule"|}|] +//// +//// [|{|"r":"renamedModule"|}nested|].commonFoo() +//// lib.libFoo() + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'renamedModule.py')}`); +}); + +test('add and remove consecutive edits', () => { + const code = ` +// @filename: a1.py +//// # empty [|/*marker*/|] + +// @filename: a3.py +//// # empty + +// @filename: test1.py +//// from . import [|{|"r":"a2"|}a1|], a3 + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`); +}); + +test('add and remove consecutive edits with alias 1', () => { + const code = ` +// @filename: a1.py +//// # empty [|/*marker*/|] + +// @filename: a3.py +//// # empty + +// @filename: test1.py +//// from . import [|{|"r":"a2"|}a1|] as [|{|"r":"a2"|}a1|], a3 + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`); +}); + +test('add and remove consecutive edits with alias 2', () => { + const code = ` +// @filename: a1.py +//// # empty [|/*marker*/|] + +// @filename: a3.py +//// # empty + +// @filename: test1.py +//// from . import [|{|"r":"a2"|}a1|] as a, a3 + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'a2.py')}`); +}); diff --git a/packages/pyright-internal/src/tests/renameModule.relativePath.test.ts b/packages/pyright-internal/src/tests/renameModule.relativePath.test.ts new file mode 100644 index 000000000000..be4a05667251 --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModule.relativePath.test.ts @@ -0,0 +1,277 @@ +/* + * renameModule.misc.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import { combinePaths, getDirectoryPath } from '../common/pathUtils'; +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testRenameModule } from './renameModuleTestUtils'; + +test('relative path for self', () => { + const code = ` +// @filename: self.py +//// from .self import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path for self - different name', () => { + const code = ` +// @filename: self.py +//// from [|{|"r":".renamedModule"|}.self|] import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'renamedModule.py')}`); +}); + +test('relative path for self - __init__', () => { + const code = ` +// @filename: common/__init__.py +//// from . import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', '__init__.py')}`); +}); + +test('relative path for self - __init__ different name', () => { + const code = ` +// @filename: common/__init__.py +//// from [|{|"r":".renamedModule"|}.|] import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'renamedModule.py')}`); +}); + +test('relative path for self - __init__ folder name', () => { + const code = ` +// @filename: common/__init__.py +//// from [|{|"r":"."|}..common|] import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', '__init__.py')}`); +}); + +test('relative path for self - __init__ different folder name', () => { + const code = ` +// @filename: common/__init__.py +//// from [|{|"r":".renamedModule"|}..common|] import foo +//// def foo(): +//// [|/*marker*/pass|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'renamedModule.py')}`); +}); + +test('relative path for self - import name', () => { + const code = ` +// @filename: self.py +//// from . import self +//// [|/*marker*/|] + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path for modules', () => { + const code = ` +// @filename: self.py +//// from [|{|"r":".."|}.|] import module +//// [|/*marker*/|] + +// @filename: module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path to self with multiple import names', () => { + const code = ` +// @filename: common/self.py +//// [|{|"r":"from . import self!n!"|}|]from [|{|"r":".."|}.|] import [|{|"r":""|}self, |]module, foo +//// [|/*marker*/|] + +// @filename: common/module.py +//// # empty + +// @filename: common/__init__.py +//// def foo(): +//// pass + + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path to module - move up', () => { + const code = ` +// @filename: common/test.py +//// from [|{|"r":"...sub.foo"|}..sub.foo|] import bar +//// [|/*marker*/|] + +// @filename: sub/foo.py +//// def bar(): +//// pass + +// @filename: sub/__init__.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path to module - move down', () => { + const code = ` +// @filename: common/test.py +//// from [|{|"r":".sub.foo"|}..sub.foo|] import bar +//// [|/*marker*/|] + +// @filename: sub/foo.py +//// def bar(): +//// pass + +// @filename: sub/__init__.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'self.py')}`); +}); + +test('relative path to module - sibling', () => { + const code = ` +// @filename: common/test.py +//// from ..sub.foo import bar +//// [|/*marker*/|] + +// @filename: sub/foo.py +//// def bar(): +//// pass + +// @filename: sub/__init__.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), '..', 'moved', 'self.py')}`); +}); + +test('relative path to self __init__ with sub modules and symbol with dots', () => { + const code = ` +// @filename: common/__init__.py +//// [|{|"r":"from .self import bar!n!"|}|]from [|{|"r":".."|}.|] import module[|{|"r":""|}, bar|] +//// [|/*marker*/|] +//// def bar(): +//// pass + +// @filename: common/module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path to self __init__ with sub modules and symbol with dotted name', () => { + const code = ` +// @filename: common/__init__.py +//// [|{|"r":"from common.moved.self import bar!n!"|}|]from [|{|"r":".."|}..common|] import module[|{|"r":""|}, bar|] +//// [|/*marker*/|] +//// def bar(): +//// pass + +// @filename: common/module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', 'self.py')}`); +}); + +test('relative path to self __init__ with sub modules and symbol with dots to __init__', () => { + const code = ` +// @filename: common/__init__.py +//// [|{|"r":"from . import bar!n!"|}|]from [|{|"r":".."|}.|] import module[|{|"r":""|}, bar|] +//// [|/*marker*/|] +//// def bar(): +//// pass + +// @filename: common/module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', '__init__.py')}`); +}); + +test('relative path to self __init__ with sub modules and symbol with dotted name to __init__', () => { + const code = ` +// @filename: common/__init__.py +//// [|{|"r":"from common.moved import bar!n!"|}|]from [|{|"r":".."|}..common|] import module[|{|"r":""|}, bar|] +//// [|/*marker*/|] +//// def bar(): +//// pass + +// @filename: common/module.py +//// # empty + `; + + const state = parseAndGetTestState(code).state; + const fileName = state.getMarkerByName('marker').fileName; + + testRenameModule(state, fileName, `${combinePaths(getDirectoryPath(fileName), 'moved', '__init__.py')}`); +}); diff --git a/packages/pyright-internal/src/tests/renameModuleTestUtils.ts b/packages/pyright-internal/src/tests/renameModuleTestUtils.ts new file mode 100644 index 000000000000..d267e7b0f26e --- /dev/null +++ b/packages/pyright-internal/src/tests/renameModuleTestUtils.ts @@ -0,0 +1,205 @@ +/* + * renameModule.fromImports.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.RenameModule + */ + +import assert from 'assert'; +import { CancellationToken } from 'vscode-languageserver'; + +import { createMapFromItems } from '../common/collectionUtils'; +import { assertNever } from '../common/debug'; +import { Diagnostic } from '../common/diagnostic'; +import { DiagnosticRule } from '../common/diagnosticRules'; +import { FileEditAction, FileEditActions, FileOperations } from '../common/editAction'; +import { getDirectoryPath, isFile } from '../common/pathUtils'; +import { convertRangeToTextRange } from '../common/positionUtils'; +import { Position, rangesAreEqual, TextRange } from '../common/textRange'; +import { Range } from './harness/fourslash/fourSlashTypes'; +import { TestState } from './harness/fourslash/testState'; + +export function testMoveSymbolAtPosition( + state: TestState, + filePath: string, + newFilePath: string, + position: Position, + text?: string, + replacementText?: string +) { + const actions = state.program.moveSymbolAtPosition(filePath, newFilePath, position, CancellationToken.None); + assert(actions); + + const ranges: Range[] = []; + if (text !== undefined) { + ranges.push(...state.getRangesByText().get(text)!); + } else { + ranges.push(...state.getRanges().filter((r) => !!r.marker?.data)); + } + + assert.strictEqual(actions.edits.length, ranges.length); + + _verifyFileOperations(state, actions, ranges, replacementText); +} + +export function testRenameModule( + state: TestState, + filePath: string, + newFilePath: string, + text?: string, + replacementText?: string +) { + const edits = state.program.renameModule(filePath, newFilePath, CancellationToken.None); + assert(edits); + + const ranges: Range[] = []; + if (text !== undefined) { + ranges.push(...state.getRangesByText().get(text)!); + } else { + ranges.push(...state.getRanges().filter((r) => !!r.marker?.data)); + } + + assert.strictEqual(edits.length, ranges.length); + + const fileOperations: FileOperations[] = []; + fileOperations.push({ kind: 'rename', oldFilePath: filePath, newFilePath }); + + // Make sure we don't have missing imports on the original state. + _verifyFileOperations(state, { edits, fileOperations }, ranges, replacementText); +} + +function _verifyFileOperations( + state: TestState, + fileEditActions: FileEditActions, + ranges: Range[], + replacementText: string | undefined +) { + const editsPerFileMap = createMapFromItems(fileEditActions.edits, (e) => e.filePath); + + _verifyMissingImports(); + + _verifyEdits(state, fileEditActions, ranges, replacementText); + + _applyFileOperations(state, fileEditActions); + + // Make sure we don't have missing imports after the change. + _verifyMissingImports(); + + function _verifyMissingImports() { + for (const editFileName of editsPerFileMap.keys()) { + const sourceFile = state.program.getBoundSourceFile(editFileName)!; + _verifyMissingImportsDiagnostics(sourceFile.getDiagnostics(state.configOptions)); + } + } +} + +function _verifyEdits( + state: TestState, + fileEditActions: FileEditActions, + ranges: Range[], + replacementText: string | undefined +) { + for (const edit of fileEditActions.edits) { + assert( + ranges.some((r) => { + const data = r.marker?.data as { r: string } | undefined; + const expectedText = replacementText ?? data?.r ?? 'N/A'; + const expectedRange = state.convertPositionRange(r); + return ( + r.fileName === edit.filePath && + rangesAreEqual(expectedRange, edit.range) && + expectedText.replace(/!n!/g, '\n') === edit.replacementText + ); + }), + `can't find '${replacementText ?? edit.replacementText}'@'${edit.filePath}:(${edit.range.start.line},${ + edit.range.start.character + })'` + ); + } +} + +function _applyFileOperations(state: TestState, fileEditActions: FileEditActions) { + // Apply changes + // First, apply text changes + const editsPerFileMap = createMapFromItems(fileEditActions.edits, (e) => e.filePath); + + for (const [editFileName, editsPerFile] of editsPerFileMap) { + const result = _applyEdits(state, editFileName, editsPerFile); + state.testFS.writeFileSync(editFileName, result.text, 'utf8'); + + // Update open file content if the file is in opened state. + if (result.version) { + let openedFilePath = editFileName; + const renamed = fileEditActions.fileOperations.find( + (o) => o.kind === 'rename' && o.oldFilePath === editFileName + ); + if (renamed?.kind === 'rename') { + openedFilePath = renamed.newFilePath; + state.program.setFileClosed(renamed.oldFilePath); + } + + state.program.setFileOpened(openedFilePath, result.version + 1, [{ text: result.text }]); + } + } + + // Second, apply filename change to disk or rename directory. + for (const fileOperation of fileEditActions.fileOperations) { + switch (fileOperation.kind) { + case 'create': { + state.testFS.mkdirpSync(getDirectoryPath(fileOperation.filePath)); + state.testFS.writeFileSync(fileOperation.filePath, ''); + break; + } + case 'rename': { + if (isFile(state.testFS, fileOperation.oldFilePath)) { + state.testFS.mkdirpSync(getDirectoryPath(fileOperation.newFilePath)); + state.testFS.renameSync(fileOperation.oldFilePath, fileOperation.newFilePath); + + // Add new file as tracked file + state.program.addTrackedFile(fileOperation.newFilePath); + } else { + state.testFS.renameSync(fileOperation.oldFilePath, fileOperation.newFilePath); + } + break; + } + case 'delete': { + state.testFS.rimrafSync(fileOperation.filePath); + break; + } + default: + assertNever(fileOperation); + } + } + + // And refresh program. + state.importResolver.invalidateCache(); + state.program.markAllFilesDirty(true); +} + +function _verifyMissingImportsDiagnostics(diagnostics: Diagnostic[] | undefined) { + assert( + !diagnostics || diagnostics.filter((d) => d.getRule() === DiagnosticRule.reportMissingImports).length === 0, + JSON.stringify(diagnostics!.map((d) => d.message)) + ); +} + +function _applyEdits(state: TestState, filePath: string, edits: FileEditAction[]) { + const sourceFile = state.program.getBoundSourceFile(filePath)!; + const parseResults = sourceFile.getParseResults()!; + + const editsWithOffset = edits + .map((e) => ({ + range: convertRangeToTextRange(e.range, parseResults.tokenizerOutput.lines)!, + text: e.replacementText, + })) + .sort((e1, e2) => e2.range.start - e1.range.start); + + // Apply change in reverse order. + let current = parseResults.text; + for (const change of editsWithOffset) { + current = current.substr(0, change.range.start) + change.text + current.substr(TextRange.getEnd(change.range)); + } + + return { version: sourceFile.getClientVersion(), text: current }; +} diff --git a/packages/pyright-internal/src/tests/samples/abstractClass2.py b/packages/pyright-internal/src/tests/samples/abstractClass2.py index fa01618754e2..3cc5fa402625 100644 --- a/packages/pyright-internal/src/tests/samples/abstractClass2.py +++ b/packages/pyright-internal/src/tests/samples/abstractClass2.py @@ -13,9 +13,9 @@ def a(self) -> None: print('MixinA.a') class InterfaceAB(InterfaceA): - @abc.abstractmethod - def b(self) -> None: - print('InterfaceAB.b') + @abc.abstractmethod + def b(self) -> None: + print('InterfaceAB.b') class ClassAB(InterfaceAB, MixinA): def b(self) -> None: diff --git a/packages/pyright-internal/src/tests/samples/annotated1.py b/packages/pyright-internal/src/tests/samples/annotated1.py index 5948c17a8a65..61f765b2aac9 100644 --- a/packages/pyright-internal/src/tests/samples/annotated1.py +++ b/packages/pyright-internal/src/tests/samples/annotated1.py @@ -1,7 +1,8 @@ # This sample tests handling of the Python 3.9 "Annotated" feature # described in PEP 593. -from typing import Annotated, TypeVar +from typing import Annotated, TypeVar, ClassVar, Final +from dataclasses import InitVar, dataclass class struct2: @@ -53,3 +54,19 @@ def func2(a: TypeWithStringArg): Param = Annotated[_T, "x"] x: Param[int] = 3 + + +class A: + classvar: Annotated[ClassVar[int], (2, 5)] = 4 + const: Annotated[Final[int], "metadata"] = 4 + + +@dataclass +class B: + x: Annotated[InitVar[int], "metadata"] + + +d1 = B(x=4) + +# This should generate an error because x is not an actual member. +d1.x diff --git a/packages/pyright-internal/src/tests/samples/annotations1.py b/packages/pyright-internal/src/tests/samples/annotations1.py index 679da5f9574d..d742c54defaa 100644 --- a/packages/pyright-internal/src/tests/samples/annotations1.py +++ b/packages/pyright-internal/src/tests/samples/annotations1.py @@ -27,7 +27,8 @@ def func3(self) -> "Optional[ClassC]": def func4(self) -> Optional["ClassC"]: return None - # This should generate an error. + # This should generate an error for Python versions 3.9 + # and earlier. def func5(self) -> "Optional"[int]: return None @@ -49,4 +50,29 @@ def func10(): int, str ] -""" \ No newline at end of file +""" + + +class ClassD: + ClassA: "ClassA" + + # This should generate an error because ClassF refers + # to itself, and there is no ClassF declared at the module + # level. + ClassF: "ClassF" + + str: "str" + + def int(self): + ... + + foo: "int" + + # This should generate an error because it refers to the local + # "int" symbol rather than the builtins "int". + bar: int + + +# This should generate an error because modules are not allowed in +# type annotations. +z: typing diff --git a/packages/pyright-internal/src/tests/samples/annotations3.py b/packages/pyright-internal/src/tests/samples/annotations3.py index c38bc7e5a18f..50d35c449a94 100644 --- a/packages/pyright-internal/src/tests/samples/annotations3.py +++ b/packages/pyright-internal/src/tests/samples/annotations3.py @@ -29,6 +29,15 @@ def func3(self) -> "Optional[ClassC]": def func4(self) -> Optional["ClassC"]: return None + def func5(self, x: ClassA): + x.func0() + + class ClassA: + ... + + def func6(self, x: ClassC): + x.my_int + class ClassC: - pass + my_int: int diff --git a/packages/pyright-internal/src/tests/samples/annotations6.py b/packages/pyright-internal/src/tests/samples/annotations6.py new file mode 100644 index 000000000000..62cbaff9106c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/annotations6.py @@ -0,0 +1,29 @@ +# This sample verifies that the Type[] and type[] annotations work +# as expected when the type argument is Any. + +from typing import Type, Any + + +def is_type1(x: object, y: Type[Any]) -> bool: + return isinstance(x, y) + + +is_type1(1, int) + +# This should generate an error. +is_type1(1, 1) + + +def is_type2(x: object, y: type[Any]) -> bool: + return isinstance(x, y) + + +is_type2(1, int) + +# This should generate an error. +is_type2(1, 1) + + +def func1(v1: Type[Any], v2: type[Any]): + reveal_type(v1, expected_text="type") + reveal_type(v2, expected_text="type") diff --git a/packages/pyright-internal/src/tests/samples/assignment10.py b/packages/pyright-internal/src/tests/samples/assignment10.py new file mode 100644 index 000000000000..e1e5acdf90c0 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/assignment10.py @@ -0,0 +1,14 @@ +# This sample tests some cases where types are narrowed on assignment. + + +class A: + instance: "A" | None + + def __init__(self) -> None: + self.foo: bool + + @classmethod + def method1(cls) -> bool: + if cls.instance is None: + cls.instance = cls() + return cls.instance.foo diff --git a/packages/pyright-internal/src/tests/samples/assignment2.py b/packages/pyright-internal/src/tests/samples/assignment2.py index db5ec428ed08..13a57907573b 100644 --- a/packages/pyright-internal/src/tests/samples/assignment2.py +++ b/packages/pyright-internal/src/tests/samples/assignment2.py @@ -1,7 +1,7 @@ # This sample tests assignments to indexed expressions # where the base is a specialized object. -from typing import List, Dict, Literal, Union +from typing import List, Dict, Union v1: List[int] = [1, 2, 3, 4, 5] @@ -16,15 +16,15 @@ v3: List[Union[int, str]] = ["a"] v3[0] = 3 -t3: Literal["Literal[3]"] = reveal_type(v3[0]) +reveal_type(v3[0], expected_text="Literal[3]") v4: Dict[str, Union[int, str]] = {} v4["aaa"] = 3 v4["bbb"] = "bbb" -t4_0: Literal["Literal[3]"] = reveal_type(v4["aaa"]) -t4_1: Literal["Literal['bbb']"] = reveal_type(v4["bbb"]) -t4_2: Literal["int | str"] = reveal_type(v4["ccc"]) +reveal_type(v4["aaa"], expected_text="Literal[3]") +reveal_type(v4["bbb"], expected_text="Literal['bbb']") +reveal_type(v4["ccc"], expected_text="int | str") class Assymetric: @@ -37,4 +37,4 @@ def __getitem__(self, i: int) -> int: v5 = Assymetric() v5[0] = 3 -t5: Literal["int"] = reveal_type(v5[0]) +reveal_type(v5[0], expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/assignment3.py b/packages/pyright-internal/src/tests/samples/assignment3.py index 386c26d5c261..9558347eb2cb 100644 --- a/packages/pyright-internal/src/tests/samples/assignment3.py +++ b/packages/pyright-internal/src/tests/samples/assignment3.py @@ -2,7 +2,7 @@ # there is an expected type, so bidirectional type # inference is used. -from typing import Callable, Dict, Literal, Protocol, Tuple +from typing import Callable, Dict, Protocol, Tuple f1: Callable[[int, int], int] = lambda a, b: a + b @@ -43,4 +43,4 @@ def __call__(self, x: int, y: Dict[str, int]) -> int: v1: Adder = lambda x, y: x + y["hi"] -t1: Literal["(x: int, y: Dict[str, int]) -> int"] = reveal_type(v1) +reveal_type(v1, expected_text="(x: int, y: Dict[str, int]) -> int") diff --git a/packages/pyright-internal/src/tests/samples/assignmentExpr2.py b/packages/pyright-internal/src/tests/samples/assignmentExpr2.py index 5021c5219bc3..0e50a9e296c1 100644 --- a/packages/pyright-internal/src/tests/samples/assignmentExpr2.py +++ b/packages/pyright-internal/src/tests/samples/assignmentExpr2.py @@ -41,3 +41,7 @@ def f(x: float): foo(x = y := f(25)) # INVALID foo(x=(y := f(25))) # Valid, though probably confusing + # This should generate an error. + [y for x in [0, 1] if y := x - 1] + + [y for x in [0, 1] if (y := x - 1)] diff --git a/packages/pyright-internal/src/tests/samples/assignmentExpr3.py b/packages/pyright-internal/src/tests/samples/assignmentExpr3.py index a8e588017d78..3c9f9550a72f 100644 --- a/packages/pyright-internal/src/tests/samples/assignmentExpr3.py +++ b/packages/pyright-internal/src/tests/samples/assignmentExpr3.py @@ -5,7 +5,7 @@ def foo1(x: float): ... -p =3 +p = 3 # This should generate an error. def foo2(answer = p := 42): # INVALID diff --git a/packages/pyright-internal/src/tests/samples/assignmentExpr9.py b/packages/pyright-internal/src/tests/samples/assignmentExpr9.py index 3e6aeab36a0d..eeccc317b695 100644 --- a/packages/pyright-internal/src/tests/samples/assignmentExpr9.py +++ b/packages/pyright-internal/src/tests/samples/assignmentExpr9.py @@ -1,7 +1,7 @@ # This sample tests the case where an assignment expression target # is found within a function decorator or a function default value expression. -from typing import Any, Callable, List, Literal, TypeVar +from typing import Any, Callable, List, TypeVar _T = TypeVar("_T") @@ -24,5 +24,5 @@ def decorated( pass -t1: Literal["set[int]"] = reveal_type(walrus_target_1) -t2: Literal["list[str]"] = reveal_type(walrus_target_2) +reveal_type(walrus_target_1, expected_text="set[int]") +reveal_type(walrus_target_2, expected_text="list[str]") diff --git a/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py b/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py index 74c021f7ac7f..a4f07c5c7c05 100644 --- a/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py +++ b/packages/pyright-internal/src/tests/samples/augmentedAssignment1.py @@ -2,16 +2,32 @@ # augmented assignments (combining a binary operator # with an assignment). + a = 1 b = 3.4 a += b +reveal_type(a, expected_text="float") + a -= b +reveal_type(a, expected_text="float") + a *= b +reveal_type(a, expected_text="float") + a /= b +reveal_type(a, expected_text="float") + a //= b +reveal_type(a, expected_text="float") + a %= b +reveal_type(a, expected_text="float") + a **= b +reveal_type(a, expected_text="Any") + +a = 1 # This should generate an error because # matrix multiply isn't supported by int. diff --git a/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py b/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py new file mode 100644 index 000000000000..c4ac8f1f1212 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/augmentedAssignment3.py @@ -0,0 +1,8 @@ +# This sample tests the case where an instance variable +# is assigned using only augmented assignment expressions. + + +class ClassA: + def a(self): + # This should generate an error. + self.val1 += 3 diff --git a/packages/pyright-internal/src/tests/samples/await1.py b/packages/pyright-internal/src/tests/samples/await1.py new file mode 100644 index 000000000000..0f739fa4058d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/await1.py @@ -0,0 +1,23 @@ +# This sample validates that the await keyword participates in +# bidirectional type inference. + +from typing import Callable, TypeVar, Generic + +T = TypeVar("T") +AnyMsg = TypeVar("AnyMsg", bound="Msg") + + +class Msg(Generic[T]): + body: T + + +class Request: + id: int + + +async def func1(check: "Callable[[AnyMsg], bool]") -> AnyMsg: + ... + + +async def main(): + _: Msg[Request] = await func1(check=lambda msg: (msg.body.id == 12345)) diff --git a/packages/pyright-internal/src/tests/samples/call1.py b/packages/pyright-internal/src/tests/samples/call1.py new file mode 100644 index 000000000000..e5e89074a243 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call1.py @@ -0,0 +1,93 @@ +# This sample tests various function type checking +# behavior including arg/param matching. + +from typing import Callable + +# ------------------------------------------------------ +# Test function type matching + + +class FooBase: + pass + + +class Foo(FooBase): + pass + + +class Bar(Foo): + pass + + +def needs_function1(callback: Callable[[Foo], Foo]): + pass + + +def callback1(): + pass + + +def callback2(a: Foo) -> Foo: + return Foo() + + +def callback3(a: Foo) -> str: + return "1" + + +def callback4(a: Foo, b: Foo) -> Foo: + return Foo() + + +def callback5(a: Foo, b: int = 3) -> Foo: + return Foo() + + +def callback6(*a) -> Foo: + return Foo() + + +def callback7(a: str) -> Foo: + return Foo() + + +def callback8(a: Bar) -> Foo: + return Foo() + + +def callback9(a: FooBase) -> Foo: + return Foo() + + +# This should generate an error because callback1 +# takes no parameters. +needs_function1(callback1) + +needs_function1(callback2) + +# This should generate an error because the return +# type of callback3 doesn't match. +needs_function1(callback3) + +# This should generage an error because callback4 +# takes too many parameters. +needs_function1(callback4) + +needs_function1(callback5) +needs_function1(callback6) + +# This should fail because the parameter is the +# wrong type. +needs_function1(callback7) + +# This should fail because the parameter is the +# wrong type. +needs_function1(callback8) + +needs_function1(callback9) + + +import typing + +# This should generate an error because modules are not callable. +typing() diff --git a/packages/pyright-internal/src/tests/samples/call2.py b/packages/pyright-internal/src/tests/samples/call2.py new file mode 100644 index 000000000000..9de623ddb745 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call2.py @@ -0,0 +1,117 @@ +# This sample tests function parameter matching logic. + + +from typing import Any, Dict, List + + +def func1(a: int, *b: int): + pass + + +func1(3) +func1(3, 4) +func1(3, *[1, 2, 3]) + +# This should generate an error +func1(3, "hello") + +# This should generate an error +func1(3, 5, 2, "str") + +# This should generate an error +func1("hello", 3) + +# This should generate an error +str_list = ["he", "2", "3"] +func1(3, *str_list) + + +def func2(a: str, **b: int): + pass + + +func2("hi") +func2("hi", b=3, c=4, d=5) + +str_dict = {"a": "3", "b": "2"} +func2("hi", **str_dict) + + +# This should generate a type error +func2("hi", 3) + +# This should generate a type error +func2("hi", b="hi") + + +def func4(*args: int): + pass + + +def func5(a: int, *args): + pass + + +tuple1 = (2, 3) +func4(*tuple1) +func5(*tuple1) + +# This should generate an error because a is assigned twice. +func2(a="", a="") + +# This should generate an error because c is assigned twice. +func2("", c=4, d=5, c=5) + + +def func6(param1: int, param2: str): + pass + + +def func7(*args: Any, param0: int, param1: int, param2: str): + func6(*args, param1=param1, param2=param2) + + func6(param0, param2=param2) + + # This should generate an error because param0 has no match. + func6(param0, param1=param1) + + +def func8( + y: str, + z: bool = ..., +) -> None: + ... + + +kwargs1: Dict[str, int] = {} +# This should generate an error because int is not compatible with str. +func8(z=False, **kwargs1) + + +class MyStr(str): + ... + + +kwargs2: Dict[MyStr, MyStr] = {} +func8(z=False, **kwargs2) + + +def func9( + x: int, + y: str, + *, + a: str = ..., + b: str, + c: str, +) -> None: + ... + + +kwargs3: Dict[str, str] = {} +func9(0, "", **kwargs3) + +args4: List[str] = ["hi"] +func9(0, *args4, **kwargs3) + +# This should generate an error +func9(*args4, **kwargs3) diff --git a/packages/pyright-internal/src/tests/samples/call3.py b/packages/pyright-internal/src/tests/samples/call3.py new file mode 100644 index 000000000000..5c471ae2cd48 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call3.py @@ -0,0 +1,168 @@ +# This sample tests the Python 3.8 "positional-only parameter" feature. + +from typing import Any, Dict, Protocol, Tuple + + +def f0(a: int, b: int): + return 3 + + +def f1(a: int, b: int, /): + return 3 + +# This should generate an error because only one +# '/' parameter is allowed. +def f2(a: int, /, b: int, /): + return 3 + +def f3(a: int, /, b: int): + return 3 + +def f4(a: int, /, b: int, *, c: int): + return 3 + +# This should generate an error because a '/' +# parameter shouldn't appear after '*'. +def f5(a: int, *, b: int, /, c: int): + return 3 + +# This should generate an error because a '/' +# parameter cannot be the first in a param list. +def f6(/, a: int, *, b: int): + return 3 + + +f0(2, 3) + +f1(2, 3) + +# This should generate an error because b +# is a position-only parameter. +f1(2, b=3) + +# This should generate an error because a and b +# are position-only parameters. +f1(a=2, b=3) + +f2(2, 3) + +# This should generate an error. +f2(a=2, b=3) + +f3(2, 3) +f3(2, b=3) + +# This should generate 1 error because a is a +# position-only parameter. +f3(a=2, b=3) + +f4(1, 2, c=3) +f4(1, b=2, c=3) + +# This should generate an error because c is a +# keyword-only parameter. +f4(1, 2, 3) + +# This should generate an error because a is a +# positional-only parameter. +f4(a=1, b=2, c=3) + +# This will an error because of the bad +# declaration. Test to make sure we don't crash. +f5(1, b=2, c=3) + +f6(1, b=2) +f6(a=1, b=2) + +class A: + def f(self, g: bool = False, /, **kwargs) -> None: + ... + +a = A() + +a.f(hello="world") + + +def f7(name: str, /, **kwargs: Any): + return 3 + +f7("hi", name=3) + +# This should generate an error +f7("hi", name=3, name=4) + + +class P1(Protocol): + def f(self, x: Any, /): + ... + + +class C1: + def f( + self, + y: Any, + ): + ... + + +c1: P1 = C1() + + +class P2(Protocol): + def f(self, x: Any): + ... + + +class C2: + def f(self, y: Any, /): + ... + + +# This should generate an error +c2: P2 = C2() + + +def f8(a: int, b: int = 3, /): + ... + + +kwargs: Dict[str, Any] = {} + +# This should generate an error +f8() + +# This should generate an error +f8(**kwargs) + + +f8(0, **kwargs) + +def f9(*, c: int): + pass + +# This should generate an error because it is missing a keyword +# argument for keyword parameter "c". +f9(*[1, 2, 3]) + + +# This should generate an error because "/" cannot be used after "*args" +def f10(x, *args, /, y): + pass + +# This should generate an error because "*" cannot be used after "*args" +def f11(x, *args, *, y): + pass + + + +def f12(a: int, b: str, /): + ... + + +def f13(v: Tuple[int, str]): + f12(*v) + +def f14(v: Tuple[int]): + # This should generate an error because parameter "b" has + # no corresponding argument. + f12(*v) diff --git a/packages/pyright-internal/src/tests/samples/call4.py b/packages/pyright-internal/src/tests/samples/call4.py new file mode 100644 index 000000000000..b9976045083c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call4.py @@ -0,0 +1,13 @@ +# This sample tests that the TypeVar matching logic for +# functions is working correctly. + +from typing import List + +a: List[str] = ["a", "bc"] + +# This should work because the "sorted" is defined +# with the first parameter of Iterable[_T] and the +# 'key' parameter Callable[[_T], Any]. Since "len" +# is a function that takes a "Sized" and "str" is +# a "Sized", the result of this should be List[str]. +b: List[str] = sorted(a, key=len) diff --git a/packages/pyright-internal/src/tests/samples/call5.py b/packages/pyright-internal/src/tests/samples/call5.py new file mode 100644 index 000000000000..366bccd55825 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call5.py @@ -0,0 +1,95 @@ +# This sample tests handling of unpack operators used +# for arguments that are of a specified length (specifically, +# tuples with a specified list of elements types). + +from typing import NamedTuple, List, Tuple + +X = NamedTuple("X", [("a", int), ("b", str), ("c", str)]) + +q0: List[Tuple[int, str, str]] = [(1, "", ""), (2, "", "")] + +[X(*item) for item in q0] + + +q1: List[Tuple[int, str, float]] = [(1, "a", 3), (2, "b", 4), (3, "c", 5)] + +# This should generate an error because the items in q1 are not the +# right type for the X constructor. +[X(*item) for item in q1] + + +q2: List[Tuple[int, str]] = [(1, "1"), (2, "2"), (3, "3")] + +# This should generate an error because the items in q2 contain only +# two elements, and we need three to populate all three parameters +# in the X constructor. +[X(*item) for item in q2] + + +q3: List[Tuple[int, str, str, float]] = [ + (1, "a", "3", 4), + (2, "b", "4", 5), + (3, "c", "5", 6), +] + +# This should generate an error because the items in q3 contain +# four elements, and we need three to populate all parameters +# in the X constructor. +[X(*item) for item in q3] + + +q4: List[Tuple[int, ...]] = [ + (1, 3), + (2, 5), + (3, 6), +] + +# This should generate two errors because int isn't assignable to parameter +# b or c. +[X(*item) for item in q4] + + +Y = NamedTuple("Y", [("a", str), ("b", str), ("c", str)]) + +q5: List[Tuple[str, ...]] = [ + ("a", "b"), + ("a", "b"), +] + +[Y(*item) for item in q5] + + +class Z(NamedTuple): + a: list[str] + b: list[int] + + +q6 = Z(["1"], [3]) + +for a, b in zip(*q6): + reveal_type(a, expected_text="str") + reveal_type(b, expected_text="int") + + +def func1(a: list[str], c: list[int]): + ... + + +func1(*q6) + + +class ABC(NamedTuple): + a: float + b: float + c: float + + def to_rgba(self) -> "ABC": + return ABC(*self) + + +class AB(NamedTuple): + a: float + b: float + + def to_abc(self) -> ABC: + return ABC(*self, 1) diff --git a/packages/pyright-internal/src/tests/samples/call6.py b/packages/pyright-internal/src/tests/samples/call6.py new file mode 100644 index 000000000000..4ec95f69d97b --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/call6.py @@ -0,0 +1,35 @@ +# This sample tests the handling of unpack operators +# used in argument expressions when used in conjunction with +# Tuples and *args parameters. + + +from typing import Tuple + + +def foo1(a: int, b: int): + pass + + +def foo2(*args: int): + pass + + +fixed_tuple_0 = () +foo1(*fixed_tuple_0, 2) +foo2(*fixed_tuple_0, 2) + +fixed_tuple_1 = (1,) +foo1(*fixed_tuple_1, 2) +foo2(*fixed_tuple_1, 2) + +fixed_tuple_3 = (1, 3, 5) + +# This should generate an error because there +# are too many parameters. +foo1(*fixed_tuple_3, 2) +foo2(*fixed_tuple_3, 2) + +homogen_tuple: Tuple[int, ...] = (1, 5, 3) + +foo2(*homogen_tuple) +foo2(*homogen_tuple, 2) diff --git a/packages/pyright-internal/src/tests/samples/function16.py b/packages/pyright-internal/src/tests/samples/call7.py similarity index 100% rename from packages/pyright-internal/src/tests/samples/function16.py rename to packages/pyright-internal/src/tests/samples/call7.py diff --git a/packages/pyright-internal/src/tests/samples/callable1.py b/packages/pyright-internal/src/tests/samples/callable1.py index 8e18e891a1fb..3c4022d03d26 100644 --- a/packages/pyright-internal/src/tests/samples/callable1.py +++ b/packages/pyright-internal/src/tests/samples/callable1.py @@ -1,7 +1,7 @@ # This sample tests the type checker's handling of the # builtin "Callable" class. -from typing import Callable, Literal +from typing import Callable # Test forward declaration Callable1 = Callable[["A"], None] @@ -48,6 +48,6 @@ def func6(a: Callable6): def func7(a: Callable): - t_a: Literal["(*args: Unknown, **kwargs: Unknown) -> Unknown"] = reveal_type(a) + reveal_type(a, expected_text="(...) -> Unknown") b = a(3, 4, 5) - t_b: Literal["Unknown"] = reveal_type(b) + reveal_type(b, expected_text="Unknown") diff --git a/packages/pyright-internal/src/tests/samples/callable2.py b/packages/pyright-internal/src/tests/samples/callable2.py index 2d0155528580..525c76870cd8 100644 --- a/packages/pyright-internal/src/tests/samples/callable2.py +++ b/packages/pyright-internal/src/tests/samples/callable2.py @@ -3,7 +3,7 @@ from asyncio.futures import Future from asyncio.tasks import ensure_future -from typing import Any, Awaitable, Callable, Iterable, Literal, Sequence, TypeVar +from typing import Any, Awaitable, Callable, Iterable, Sequence, TypeVar _T1 = TypeVar("_T1") @@ -55,4 +55,4 @@ def callback(done: Continuation[int]) -> None: pass -t1: Literal["Awaitable[int]"] = reveal_type(from_continuation(callback)) +reveal_type(from_continuation(callback), expected_text="Awaitable[int]") diff --git a/packages/pyright-internal/src/tests/samples/callable3.py b/packages/pyright-internal/src/tests/samples/callable3.py index d3c99b477b1b..7d8679645c1a 100644 --- a/packages/pyright-internal/src/tests/samples/callable3.py +++ b/packages/pyright-internal/src/tests/samples/callable3.py @@ -2,7 +2,7 @@ # callable type as an input parameter, and the latter callable # contains generic types. -from typing import Callable, Generic, Literal, Optional, Tuple, TypeVar +from typing import Callable, Generic, Optional, Tuple, TypeVar Msg = TypeVar("Msg") Reply = TypeVar("Reply") @@ -25,4 +25,4 @@ def post_and_async_reply( ] = lambda r: (42, r) ret = agent.post_and_async_reply(build_message) -t1: Literal["str | None"] = reveal_type(ret) +reveal_type(ret, expected_text="str | None") diff --git a/packages/pyright-internal/src/tests/samples/callable4.py b/packages/pyright-internal/src/tests/samples/callable4.py index 9fe821020279..c622d52dc42b 100644 --- a/packages/pyright-internal/src/tests/samples/callable4.py +++ b/packages/pyright-internal/src/tests/samples/callable4.py @@ -1,7 +1,7 @@ # This sample tests the case where a callable type within a function # signature contains a generic return type within a union. -from typing import Literal, Optional, TypeVar +from typing import Optional, TypeVar from collections.abc import Callable @@ -12,10 +12,10 @@ def g(f: Callable[[T], Optional[U]], x: T) -> U: y = f(x) - t_y1: Literal["U@g | None"] = reveal_type(y) + reveal_type(y, expected_text="U@g | None") if y is not None: - t_y2: Literal["U@g"] = reveal_type(y) + reveal_type(y, expected_text="U@g") return y raise ValueError() diff --git a/packages/pyright-internal/src/tests/samples/callable5.py b/packages/pyright-internal/src/tests/samples/callable5.py new file mode 100644 index 000000000000..87a4a0012d14 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/callable5.py @@ -0,0 +1,40 @@ +# This sample covers the case where a function accepts a generic callable +# as a parameter along with another parameter that uses the same type variable +# and a caller provides an overloaded function as an argument. + +from typing import Any, Callable, TypeVar, Union, overload + +T = TypeVar("T") + + +@overload +def ff1(real: float): + ... + + +@overload +def ff1(real: str): + ... + + +def ff1(real: Union[float, str]) -> None: + ... + + +def fun(f: Callable[[T], Any], p: T): + return f(p) + + +fun(ff1, 4) +fun(ff1, "4") + +# This should generate an error because a "bytes" argument +# doesn't match any of the overloads. +fun(ff1, b"") + + +map(complex, ["3j", "4"]) + +# This should generate an error because a "bytes" argument +# doesn't match any of the overloads in the "complex" constructor. +map(complex, [b"3j"]) diff --git a/packages/pyright-internal/src/tests/samples/callable6.py b/packages/pyright-internal/src/tests/samples/callable6.py new file mode 100644 index 000000000000..85760c3e5656 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/callable6.py @@ -0,0 +1,83 @@ +# This sample tests the use of unpacked tuples in a Callable, as described +# in PEP 646. + +from typing import Callable, Tuple, TypeVar, Union +from typing_extensions import TypeVarTuple, Unpack + +_T = TypeVar("_T", bound=int) + +TA1 = Callable[[_T, Unpack[Tuple[int, ...]], Tuple[int, int, str], str], _T] + +# This should generate an error +TA2 = Callable[[int, Unpack[Tuple[int, ...]], Unpack[Tuple[int, int, str]], str], int] + +TA3 = Callable[[int, Unpack[Tuple[int, int]], str], int] + + +def func1(x: TA1): + r1 = x(3, 4, 5, (1, 2, "hi"), "hi") + reveal_type(r1, expected_text="int") + + x(3, (1, 2, "hi"), "hi") + + # This should generage an error because the first argument is not an int. + x(None, (1, 2, "hi"), "hi") + + y = [1, 2, 3] + x(1, *y, (1, 2, "hi"), "hi") + + +def func2(x: TA3): + x(3, 4, 5, "hi") + + # This should generate an error. + x(3, 4, "hi") + + # This should generate an error. + x(3, 4, "hi", "hi") + + +Ts = TypeVarTuple("Ts") + + +def func3( + path: str, *args: Unpack[Tuple[Unpack[Ts], str]] +) -> Union[Unpack[Tuple[Unpack[Ts], int]]]: + ... + + +v3 = func3("", 1, "2", 3.3, None, "") +reveal_type(v3, expected_text="int | str | float | None") + +func3("", "") + +# This should generate an error because the type of the first arg is wrong. +func3(1, "") + +# This should generate an error because the type of the last arg is wrong. +func3("", 1) + +# This should generate an error because the type of the last arg is wrong. +func3("", 1, 2, 3, "hi", 1) + + +def func4( + path: str, *args: Unpack[Tuple[Unpack[Ts], str]] +) -> Tuple[Unpack[Ts], complex]: + ... + + +v4 = func4("", 1, "2", 3.3, None, "") +reveal_type(v4, expected_text="Tuple[int, str, float, None, complex]") + + +def func5(path: str, *args: Unpack[Tuple[str, ...]]) -> None: + ... + + +# This should generate an errors. +func5("", 1, "2", "") +func5("", "1", "2", "3.3", "None", "") + +# This should generate one error. +func5("", "1", "2", "3.3", "None", 3) diff --git a/packages/pyright-internal/src/tests/samples/callbackProtocol1.py b/packages/pyright-internal/src/tests/samples/callbackProtocol1.py index f78f0cc3f807..8d645488ef83 100644 --- a/packages/pyright-internal/src/tests/samples/callbackProtocol1.py +++ b/packages/pyright-internal/src/tests/samples/callbackProtocol1.py @@ -94,6 +94,7 @@ def func5(x: int) -> None: pass +# This should generate an error. var4: TestClass4 = func5 @@ -107,3 +108,12 @@ def func6(a: int, b: str) -> int: f: TestClass5 = func6 + + +class TestClass6: + def __call__(self, *vals: bytes, maxlen: Optional[int] = None) -> List[bytes]: + return [] + + +# This should generate an error because TestClass6 is not a protocol class. +var6: TestClass6 = good_cb diff --git a/packages/pyright-internal/src/tests/samples/callbackProtocol5.py b/packages/pyright-internal/src/tests/samples/callbackProtocol5.py index 88b59c14da93..f3eb475ee934 100644 --- a/packages/pyright-internal/src/tests/samples/callbackProtocol5.py +++ b/packages/pyright-internal/src/tests/samples/callbackProtocol5.py @@ -1,30 +1,66 @@ # This sample tests the case where a callback protocol defines additional # attributes. -from typing import Protocol +from typing import Any, Callable, Protocol, TypeVar, cast +from typing_extensions import ParamSpec -class SomeFunc(Protocol): +P = ParamSpec("P") +R = TypeVar("R", covariant=True) + + +class SomeFunc1(Protocol[P, R]): __name__: str other_attribute: int - def __call__(self) -> str: + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: ... -def other_func(f: SomeFunc): - print(f.__name__) +def other_func1(f: Callable[P, R]) -> SomeFunc1[P, R]: + converted = cast(SomeFunc1, f) + + print(converted.__name__) - f.other_attribute = 1 + converted.other_attribute = 1 # This should generate an error - f.other_attribute = "str" + converted.other_attribute = "str" # This should generate an error - f.xxx = 3 + converted.xxx = 3 + return converted -@other_func -def some_func() -> str: + +@other_func1 +def some_func1(x: int) -> str: ... + + +reveal_type(some_func1, expected_text="SomeFunc1[(x: int), str]") + +some_func1.other_attribute + +# This should generate an error +some_func1.other_attribute2 + +some_func1(x=3) + + +class SomeFunc2(Protocol): + __name__: str + __module__: str + __qualname__: str + __annotations__: dict[str, Any] + + def __call__(self) -> None: + ... + + +def some_func2() -> None: + ... + + +v: SomeFunc2 = some_func2 diff --git a/packages/pyright-internal/src/tests/samples/callbackProtocol6.py b/packages/pyright-internal/src/tests/samples/callbackProtocol6.py new file mode 100644 index 000000000000..81cb9842f273 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/callbackProtocol6.py @@ -0,0 +1,66 @@ +# This sample tests the case where a callback protocol uses a default argument +# but the corresponding callable does not or vice versa. + +from typing import Protocol, Any + +# Callback with positional parameter with default arg value. +class Callback1(Protocol): + def __call__(self, path: str = ...) -> str: + ... + + +# Callback with positional parameter without default arg value. +class Callback2(Protocol): + def __call__(self, path: str) -> str: + ... + + +def func1_1(path: str = "") -> str: + ... + + +def func1_2(path: str) -> str: + ... + + +val1_1: Callback1 = func1_1 + +# This should generate an error. +val1_2: Callback1 = func1_2 + + +# This should generate an error. +val2_1: Callback2 = func1_1 + +val2_2: Callback2 = func1_2 + + +# Callback with keyword parameter with default arg value. +class Callback3(Protocol): + def __call__(self, *, path: str = ...) -> str: + ... + + +# Callback with keyword parameter without default arg value. +class Callback4(Protocol): + def __call__(self, *, path: str) -> str: + ... + + +def func3_1(*, path: str = "") -> str: + ... + + +def func3_2(*, path: str) -> str: + ... + + +val3_1: Callback3 = func3_1 + +# This should generate an error. +val3_2: Callback3 = func3_2 + +# This should generate an error. +val4_1: Callback4 = func3_1 + +val4_2: Callback4 = func3_2 diff --git a/packages/pyright-internal/src/tests/samples/callbackProtocol7.py b/packages/pyright-internal/src/tests/samples/callbackProtocol7.py new file mode 100644 index 000000000000..ae7d882e9784 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/callbackProtocol7.py @@ -0,0 +1,16 @@ +# This sample tests the case where a callback protocol uses position-only +# parameters. + +from typing import Any, Protocol + + +def f1(x: int, /, y: str, z: None = None) -> Any: + ... + + +class X(Protocol): + def __call__(self, x: int, /, y: str) -> Any: + ... + + +x: X = f1 diff --git a/packages/pyright-internal/src/tests/samples/callbackProtocol8.py b/packages/pyright-internal/src/tests/samples/callbackProtocol8.py new file mode 100644 index 000000000000..4025689c28c6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/callbackProtocol8.py @@ -0,0 +1,16 @@ +# This sample tests the case where a callback protocol contains an *args +# and some keyword parameters. + +from typing import Any, Protocol + + +class P(Protocol): + def __call__(self, *args: Any, kwarg0: Any, kwarg1: Any) -> None: + ... + + +def f(*args: Any, kwarg0: Any, kwarg1: Any) -> None: + ... + + +p: P = f diff --git a/packages/pyright-internal/src/tests/samples/capturedVariable1.py b/packages/pyright-internal/src/tests/samples/capturedVariable1.py new file mode 100644 index 000000000000..7d6ce0fc853a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/capturedVariable1.py @@ -0,0 +1,92 @@ +# This sample tests the code flow analysis used to determine +# whether it is safe to narrow the type of a captured variable. + +from typing import Optional + + +def get_optional_int() -> Optional[int]: + ... + + +v1 = get_optional_int() +if v1 is not None: + lambda: v1 + 5 + +v2 = get_optional_int() +if v2 is not None: + # This should generate an error because v2 + # is reassigned after capture. + lambda: v2 + 5 +v2 = None + +v3 = get_optional_int() +if v3 is not None: + lambda: v3 + 5 +else: + v3 = None + +# This should generate an error because v4 is +# not bound prior to the capture. +lambda: v4 + 5 +v4 = get_optional_int() + + +def func1(v1: Optional[int]): + if v1 is not None: + lambda: v1 + 5 + + +def func2(v1: Optional[int]): + if v1 is not None: + + def func2_inner1(): + v1 + 5 + + def func2_inner2(): + lambda: v1 + 5 + + func2_inner2() + + func2_inner1() + + +def func3(): + v1: Optional[int] = 3 + lambda: v1 + 5 + + +def func4(): + v1: Optional[int] = 3 + # This should generate an error because v1 + # is reassigned after capture. + lambda: v1 + 5 + v1 = None + + +def func5(): + v1: Optional[int] = 3 + + while True: + lambda: v1 + 5 + + +def func6(): + v1: Optional[int] = 3 + + while True: + if v1 is not None: + # This should generate an error because + # v1 is reassigned on a code path that is + # reachable from the lambda. + lambda: v1 + 5 + else: + v1 = None + + +def func7(): + while True: + # This should generate an error because v1 is + # potentially unbound prior to capture. + lambda: v1 + 5 + + v1: Optional[int] = 3 diff --git a/packages/pyright-internal/src/tests/samples/circular1.py b/packages/pyright-internal/src/tests/samples/circular1.py index afac92bad39c..c00bfed9dcd6 100644 --- a/packages/pyright-internal/src/tests/samples/circular1.py +++ b/packages/pyright-internal/src/tests/samples/circular1.py @@ -5,7 +5,7 @@ class Example1: # This should not generate an error because "int" # is not forward-declared. - str: str = 4 + str: str = "" int = int @@ -13,5 +13,4 @@ class Example1: class Example2: - # This should generate an error because it's forward-declared. int: "int" = 4 diff --git a/packages/pyright-internal/src/tests/samples/classGetItem1.py b/packages/pyright-internal/src/tests/samples/classGetItem1.py index 3201d9c76a59..bc8c2edfd9f9 100644 --- a/packages/pyright-internal/src/tests/samples/classGetItem1.py +++ b/packages/pyright-internal/src/tests/samples/classGetItem1.py @@ -2,7 +2,7 @@ # __class_getitem__ class method. -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar class Foo: @@ -13,7 +13,7 @@ def __class_getitem__(self, args: tuple[int, ...]) -> None: ... -t1: Literal["Type[Foo]"] = reveal_type(Foo[10, 63]) +reveal_type(Foo[10, 63], expected_text="Type[Foo]") _T = TypeVar("_T") @@ -27,4 +27,4 @@ def __class_getitem__(cls, args: tuple[int, ...]) -> None: ... -t2: Literal["Type[Bar[int, str]]"] = reveal_type(Bar[int, str]) +reveal_type(Bar[int, str], expected_text="Type[Bar[int, str]]") diff --git a/packages/pyright-internal/src/tests/samples/classVar1.py b/packages/pyright-internal/src/tests/samples/classVar1.py index 29b998cd0102..b75633ecf3a6 100644 --- a/packages/pyright-internal/src/tests/samples/classVar1.py +++ b/packages/pyright-internal/src/tests/samples/classVar1.py @@ -1,7 +1,7 @@ # This sample tests the type checker's handling of ClassVar # as described in PEP 526. -from typing import Any, ClassVar, Dict +from typing import Any, ClassVar, Dict, Optional class MyDescriptor: @@ -18,7 +18,7 @@ class Starship: stats: ClassVar[Dict[str, int]] = {} desc: ClassVar[MyDescriptor] = MyDescriptor() - def __init__(self, damage: int, captain: str = None): + def __init__(self, damage: int, captain: Optional[str] = None): self.damage = damage if captain: self.captain = captain # Else keep the default diff --git a/packages/pyright-internal/src/tests/samples/classVar2.py b/packages/pyright-internal/src/tests/samples/classVar2.py index 7ed36e5600aa..9cf89aeeff0f 100644 --- a/packages/pyright-internal/src/tests/samples/classVar2.py +++ b/packages/pyright-internal/src/tests/samples/classVar2.py @@ -2,11 +2,13 @@ # used within a Protocol, as specified in PEP 544. import typing as t +from typing import ClassVar as _ClassVar class Proto(t.Protocol): var1: t.ClassVar[str] var2: t.ClassVar[str] + var3: _ClassVar = ["hi"] class ProtoImpl: @@ -19,3 +21,9 @@ def __init__(self) -> None: # This should generate an error because var2 # is not a class variable. a: Proto = ProtoImpl() + + +def func1(x: Proto): + reveal_type(x.var1, expected_text="str") + reveal_type(x.var2, expected_text="str") + reveal_type(x.var3, expected_text="list[str]") diff --git a/packages/pyright-internal/src/tests/samples/classVar3.py b/packages/pyright-internal/src/tests/samples/classVar3.py index 11a4b7ddc490..15feec1449ba 100644 --- a/packages/pyright-internal/src/tests/samples/classVar3.py +++ b/packages/pyright-internal/src/tests/samples/classVar3.py @@ -1,13 +1,16 @@ # This sample tests the reporting of errors for ClassVar in contexts # where it is not allowed. -from typing import ClassVar, Final, List +from typing import Annotated, Any, ClassVar, Final, Generic, List, TypeVar +from typing_extensions import Self # This should generate an error. x: ClassVar[int] = 3 +T = TypeVar("T") -class Foo: + +class Foo(Generic[T]): x: ClassVar[int] = 3 # This should generate an error. @@ -16,6 +19,18 @@ class Foo: # This should generate an error. z: List[ClassVar[int]] = [] + # This should generate an error because TypeVars cannot + # be used in a ClassVar. + illegal1: ClassVar[List[T]] + + # This should generate an error because TypeVars cannot + # be used in a ClassVar. + illegal2: ClassVar[T] + + ok1: ClassVar[list] + ok2: ClassVar[List[Any]] + ok3: Annotated[ClassVar[List[Self]], ""] + # This should generate an error. def func1(self, a: ClassVar[int]): # This should generate an error. @@ -26,4 +41,4 @@ def func1(self, a: ClassVar[int]): # This should generate an error. def func2(self) -> ClassVar[int]: - return 3 \ No newline at end of file + return 3 diff --git a/packages/pyright-internal/src/tests/samples/classVar4.py b/packages/pyright-internal/src/tests/samples/classVar4.py new file mode 100644 index 000000000000..81355c91d105 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/classVar4.py @@ -0,0 +1,36 @@ +# This sample tests that pyright emits an error when attempting to access +# a non-ClassVar protocol attribute from a protocol class. + +from typing import ClassVar, Protocol + + +class SomeProtocol(Protocol): + x: int = 3 + y: int + z: ClassVar[int] + + @classmethod + def meth1(cls) -> None: + return None + + @staticmethod + def meth2() -> None: + return None + + +class Class(SomeProtocol): + y = 0 + z = 0 + + +def func1() -> None: + # This should generate an error because y is not a ClassVar. + x: int = Class.x + + # This should generate an error because y is not a ClassVar. + y: int = Class.y + + z: int = Class.z + + Class.meth1 + Class.meth2 diff --git a/packages/pyright-internal/src/tests/samples/classes2.py b/packages/pyright-internal/src/tests/samples/classes2.py index e4e9d0452486..475df74b0ffa 100644 --- a/packages/pyright-internal/src/tests/samples/classes2.py +++ b/packages/pyright-internal/src/tests/samples/classes2.py @@ -8,6 +8,7 @@ Iterable, List, Optional, + ParamSpec, Sequence, Type, TypedDict, @@ -19,6 +20,12 @@ T_ParentClass = TypeVar("T_ParentClass", bound="ParentClass") +P = ParamSpec("P") + + +def decorator(func: Callable[P, None]) -> Callable[P, int]: + ... + class ParentClass: def my_method1(self, a: int): @@ -82,6 +89,9 @@ def my_method19(self, a: str, b: int, c: float, d: bool) -> None: def my_method20(cls: Type[T_ParentClass], a: str) -> T_ParentClass: ... + def my_method21(self, var: int) -> None: + ... + def _protected_method1(self, a: int): return 1 @@ -169,6 +179,11 @@ def my_method19(self, b: str, *args: object, **kwargs: object) -> None: def my_method20(cls: Type[T_ChildClass], a: str) -> T_ChildClass: ... + # This should generate an error. + @decorator + def my_method21(self, var: int) -> None: + ... + # This should generate an error. def _protected_method1(self): return 1 diff --git a/packages/pyright-internal/src/tests/samples/classes3.py b/packages/pyright-internal/src/tests/samples/classes3.py index 26fcc6f7814d..cfaa6b079232 100644 --- a/packages/pyright-internal/src/tests/samples/classes3.py +++ b/packages/pyright-internal/src/tests/samples/classes3.py @@ -7,6 +7,7 @@ class TestClass: print(__doc__) print(__module__) print(__name__) + print(__qualname__) base = TestClass.__base__ diff --git a/packages/pyright-internal/src/tests/samples/classes5.py b/packages/pyright-internal/src/tests/samples/classes5.py index 94d7eeeeb202..a23521082a77 100644 --- a/packages/pyright-internal/src/tests/samples/classes5.py +++ b/packages/pyright-internal/src/tests/samples/classes5.py @@ -1,7 +1,7 @@ # This sample tests the reportIncompatibleVariableOverride # configuration option. -from typing import ClassVar, Final, List, Optional, Union +from typing import ClassVar, Final, List, Optional, Protocol, Type, Union class ParentClass1: @@ -207,3 +207,22 @@ class SublassTuple1(ParentClass2): class SublassTuple2(ParentClass2): # This should generate an error. cv_decl_1, cv_decl_2, cv_decl_3 = (3, 4.5, None) + + +class ConfigBase: + ... + + +class ParentClass3(Protocol): + Config1: ClassVar[Type[ConfigBase]] + Config2: ClassVar[Type[ConfigBase]] + + +class ChildClass3(ParentClass3): + class Config1(ConfigBase): + ... + + # This should generate an error if reportIncompatibleVariableOverride + # is enabled. + class Config2: + ... diff --git a/packages/pyright-internal/src/tests/samples/classes8.py b/packages/pyright-internal/src/tests/samples/classes8.py new file mode 100644 index 000000000000..2773d4a65556 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/classes8.py @@ -0,0 +1,47 @@ +# This sample tests the case where a generic class declaration refers +# to itself. This case should arguably be considered an error, but +# it does appear within the stdlib typeshed stubs (see os.scandir). + +from os import DirEntry +from types import TracebackType +from typing import AnyStr, ContextManager, Iterator, Type +from typing_extensions import Self + + +class _ScandirIterator( + Iterator[DirEntry[AnyStr]], ContextManager["_ScandirIterator[AnyStr]"] +): + def __iter__(self) -> Self: + ... + + def __next__(self) -> DirEntry[AnyStr]: + ... + + def close(self) -> None: + ... + + def __enter__(self) -> Self: + ... + + def __exit__( + self, + __exc_type: Type[BaseException] | None, + __exc_value: BaseException | None, + __traceback: TracebackType | None, + ) -> bool | None: + ... + + +def scandir(path: AnyStr) -> _ScandirIterator[AnyStr]: + ... + + +def thing(value: AnyStr): + with scandir(value) as it: + for file in it: + if isinstance(file.name, str): + if file.name.endswith(".xml"): + ... + else: + if file.name.endswith(b".xml"): + ... diff --git a/packages/pyright-internal/src/tests/samples/classes9.py b/packages/pyright-internal/src/tests/samples/classes9.py new file mode 100644 index 000000000000..d0cfb4c2cbfe --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/classes9.py @@ -0,0 +1,111 @@ +# This sample tests incompatible method overrides for multiple inheritance. +# This functionality is controlled by the reportIncompatibleMethodOverride +# diagnostic rule. + + +from typing import Generic, Iterable, TypeVar + + +class A1: + def func1(self, a: int) -> str: + ... + + +class A2: + def func1(self, a: int, b: int = 3) -> str: + ... + + +# This should generate an error because func1 is incompatible. +class ASub(A1, A2): + ... + + +class B1: + def func1(self) -> int: + ... + + +class B2: + def func1(self) -> float: + ... + + +class BSub(B1, B2): + ... + + +class C1: + def func1(self) -> float: + ... + + +class C2: + def func1(self) -> int: + ... + + +# This should generate an error because func1 is incompatible. +class CSub(C1, C2): + ... + + +class D1: + def func1(self, a: int) -> None: + ... + + +class D2: + def func1(self, b: int) -> None: + ... + + +# This should generate an error because func1 is incompatible. +class DSub(D1, D2): + ... + + +_T_E = TypeVar("_T_E") + + +class E1(Generic[_T_E]): + def func1(self, a: _T_E) -> None: + ... + + +class E2(Generic[_T_E]): + def func1(self, a: _T_E) -> None: + ... + + +class ESub(E1[int], E2[int]): + ... + + +_T_F = TypeVar("_T_F") + + +class F1(Generic[_T_F]): + def dostuff(self) -> Iterable[_T_F]: + ... + + +class F2(F1[_T_F]): + def dostuff(self) -> Iterable[_T_F]: + ... + + +class F3(F1[_T_F]): + ... + + +class FSub1(F3[int], F2[int]): + pass + + +class FSub2(F3[int], F1[int]): + pass + + +class FSub3(F2[int], F1[int]): + pass diff --git a/packages/pyright-internal/src/tests/samples/codeFlow4.py b/packages/pyright-internal/src/tests/samples/codeFlow4.py index 88afaa4c7a68..af2d3d906bf6 100644 --- a/packages/pyright-internal/src/tests/samples/codeFlow4.py +++ b/packages/pyright-internal/src/tests/samples/codeFlow4.py @@ -1,5 +1,5 @@ # This sample tests the handling of if/elif chains that omit an else -# statement. The "ghost" else statement should be assumed never taken if the +# statement. The "implied else" statement should be assumed never taken if the # final if/elif test expression evaluates to Never in the negative case. from enum import Enum @@ -81,7 +81,7 @@ def func8(color: Color) -> bool: return False -t1: Literal["bool"] = reveal_type(func8(Color.RED)) +reveal_type(func8(Color.RED), expected_text="bool") def func9(a: Union[str, int], b: Union[str, int]) -> bool: @@ -92,3 +92,18 @@ def func9(a: Union[str, int], b: Union[str, int]) -> bool: return False elif isinstance(b, int): return False + + +def func10(foo: list[str]) -> bool: + i = 0 + x: int | None = None + + while i < 5: + foo[i] + + if x is None: + return False + reveal_type(x, expected_text="Never") + i = x + + return True diff --git a/packages/pyright-internal/src/tests/samples/comparison1.py b/packages/pyright-internal/src/tests/samples/comparison1.py index 9f3126a8e377..1ceaccdfc28e 100644 --- a/packages/pyright-internal/src/tests/samples/comparison1.py +++ b/packages/pyright-internal/src/tests/samples/comparison1.py @@ -62,3 +62,11 @@ def func2( if b == e: return + + +def func3(base: type) -> None: + if base == ClassA: + ... + + if ClassA == base: + ... diff --git a/packages/pyright-internal/src/tests/samples/constructor10.py b/packages/pyright-internal/src/tests/samples/constructor10.py new file mode 100644 index 000000000000..6b8d8d52f454 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/constructor10.py @@ -0,0 +1,19 @@ +# This sample tests the handling of a __new__ method that +# is part of a generic class but uses its own type parameters. + + +from typing import Iterable, Iterator, TypeVar + + +_T_co = TypeVar("_T_co", covariant=True) +_T = TypeVar("_T") + + +class pairwise(Iterator[_T_co]): + def __new__(cls, __iterable: Iterable[_T]) -> "pairwise[tuple[_T, _T]]": + ... + + +def triplewise(iterable: Iterable[_T]) -> Iterator[tuple[_T, _T, _T]]: + for (a, _), (b, c) in pairwise(pairwise(iterable)): + yield a, b, c diff --git a/packages/pyright-internal/src/tests/samples/constructor11.py b/packages/pyright-internal/src/tests/samples/constructor11.py new file mode 100644 index 000000000000..eb9a40605d07 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/constructor11.py @@ -0,0 +1,32 @@ +# This sample tests the case where a subclass of Dict uses +# a dictionary literal as an argument to the constructor call. + +from collections import Counter, defaultdict +from typing import Callable, Generic, Mapping, Optional, TypeVar + +c1 = Counter({0, 1}) +reveal_type(c1, expected_text="Counter[int]") + +for i in range(256): + c1 = Counter({0: c1[1]}) + reveal_type(c1, expected_text="Counter[int]") + +reveal_type(c1, expected_text="Counter[int]") + + +K = TypeVar("K") +V = TypeVar("V") + +MyFuncType = Callable[[Callable[[K], V]], V] + + +class MyFunc(Generic[K, V]): + def __init__(self, g: MyFuncType[K, V]) -> None: + self.g = g + + +MyFuncMapping = Mapping[K, Optional[MyFunc[K, V]]] + +my_func_defaultdict: MyFuncMapping[str, int] = defaultdict( + lambda: None, {"x": MyFunc(lambda f: f("a"))} +) diff --git a/packages/pyright-internal/src/tests/samples/constructor12.py b/packages/pyright-internal/src/tests/samples/constructor12.py new file mode 100644 index 000000000000..d22591a956e6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/constructor12.py @@ -0,0 +1,20 @@ +# This sample tests the case where a specialized class is constructed +# from within the class implementation and uses a class TypeVar in +# the specialization. + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class Foo(Generic[T]): + def return_from_variable(self) -> "Foo[T]": + value = Foo[T]() + reveal_type(value, expected_text="Foo[T@Foo]") + return value + + +x = Foo[int]() +returned_from_variable = x.return_from_variable() + +reveal_type(returned_from_variable, expected_text="Foo[int]") diff --git a/packages/pyright-internal/src/tests/samples/constructor2.py b/packages/pyright-internal/src/tests/samples/constructor2.py index 232ad182ab62..61fcb9684bd8 100644 --- a/packages/pyright-internal/src/tests/samples/constructor2.py +++ b/packages/pyright-internal/src/tests/samples/constructor2.py @@ -52,129 +52,129 @@ def get_wingspan(self, p1: int) -> float: def s1(): b: Bear[str] = Bear() a: Animal[str, int] = b - t: Literal["Bear[str]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[str]") def s2(): a: Animal[str, int] = Bear() - t: Literal["Bear[str]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[str]") def s3(): a: Animal[str, int] = Bear() - t: Literal["Bear[str]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[str]") def s4(): a: Bear[Any] = Bear[int]() - t: Literal["Bear[Any]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[Any]") def s5(): a: Animal[Any, Any] = Bear[int]() - t: Literal["Bear[Any]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[Any]") def s6(): a: Union[Bat, Bear[str]] = Bear() - t: Literal["Bear[str]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[str]") def s7(p: Union[Bat, Bear[int]]): a: Animal[int, int] = p - t: Literal["Bat | Bear[int]"] = reveal_type(a) + reveal_type(a, expected_text="Bat | Bear[int]") def s8(): a: Animal[int, int] = Bear[int]() - t: Literal["Bear[int]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[int]") def s9(p: Dict[str, str]): a: Dict[str, Any] = p - t: Literal["Dict[str, Any]"] = reveal_type(a) + reveal_type(a, expected_text="Dict[str, Any]") def s10(p: List[str]): a: Iterable[Any] = p - t1: Literal["List[Any]"] = reveal_type(a) + reveal_type(a, expected_text="List[Any]") b: Iterable[str] = [] - t2: Literal["list[str]"] = reveal_type(b) + reveal_type(b, expected_text="list[str]") c: Iterable[str] = list() - t3: Literal["list[str]"] = reveal_type(c) + reveal_type(c, expected_text="list[str]") def s11(): a: Animal[Any, Any] = Donkey[int]() - t: Literal["Donkey[int]"] = reveal_type(a) + reveal_type(a, expected_text="Donkey[int]") def s12(p: Bear[_T1], b: _T1): a: Animal[Any, int] = p - t: Literal["Bear[Any]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[Any]") def s13(p: Bat): a: Flyer[int] = p - t: Literal["Bat"] = reveal_type(a) + reveal_type(a, expected_text="Bat") def s14(p: Bat): a: CaveDweller[int] = p - t: Literal["Bat"] = reveal_type(a) + reveal_type(a, expected_text="Bat") def s15(): a = Bear(1) - t1: Literal["Bear[int]"] = reveal_type(a) + reveal_type(a, expected_text="Bear[int]") b = Bear[int](1) - t2: Literal["Bear[int]"] = reveal_type(b) + reveal_type(b, expected_text="Bear[int]") c = Bear[float](1) - t3: Literal["Bear[float]"] = reveal_type(c) + reveal_type(c, expected_text="Bear[float]") d = Bear[Union[str, int]](1) - t4: Literal["Bear[str | int]"] = reveal_type(d) + reveal_type(d, expected_text="Bear[str | int]") def s16(): a: Any = Bear(1) - t: Literal["Any"] = reveal_type(a) + reveal_type(a, expected_text="Any") def s17(): a1: Iterable[object] = [2, 3, 4] - ta1: Literal["list[int]"] = reveal_type(a1) + reveal_type(a1, expected_text="list[int]") a2: List[object] = [2, 3, 4] - ta2: Literal["list[object]"] = reveal_type(a2) + reveal_type(a2, expected_text="list[object]") b1: Iterable[float] = [2, 3, 4] - tb1: Literal["list[int]"] = reveal_type(b1) + reveal_type(b1, expected_text="list[int]") b2: List[float] = [2, 3, 4] - tb2: Literal["list[float]"] = reveal_type(b2) + reveal_type(b2, expected_text="list[float]") c1: Iterable[Literal["A", "B", "C"]] = ["A", "B"] - tc1: Literal["list[Literal['A', 'B']]"] = reveal_type(c1) + reveal_type(c1, expected_text="list[Literal['A', 'B']]") c2: List[Literal["A", "B", "C"]] = ["A", "B"] - tc2: Literal["list[Literal['A', 'B', 'C']]"] = reveal_type(c2) + reveal_type(c2, expected_text="list[Literal['A', 'B', 'C']]") def s18(): a1: Mapping[object, object] = {"a": 3, "b": 5.6} - ta1: Literal["dict[object, float]"] = reveal_type(a1) + reveal_type(a1, expected_text="dict[object, float]") a2: Dict[object, object] = {"a": 3, "b": 5.6} - ta2: Literal["dict[object, object]"] = reveal_type(a2) + reveal_type(a2, expected_text="dict[object, object]") b1: Mapping[str, float] = {"a": 3, "b": 5} - tb1: Literal["dict[str, int]"] = reveal_type(b1) + reveal_type(b1, expected_text="dict[str, int]") b2: Dict[str, float] = {"a": 3, "b": 5} - tb2: Literal["dict[str, float]"] = reveal_type(b2) + reveal_type(b2, expected_text="dict[str, float]") c1: Mapping[Literal["A", "B"], Literal[3, 4]] = {"A": 3} - tc1: Literal["dict[Literal['A', 'B'], Literal[3]]"] = reveal_type(c1) + reveal_type(c1, expected_text="dict[Literal['A', 'B'], Literal[3]]") c2: Dict[Literal["A", "B"], Literal[3, 4]] = {"A": 3} - tc2: Literal["dict[Literal['A', 'B'], Literal[3, 4]]"] = reveal_type(c2) + reveal_type(c2, expected_text="dict[Literal['A', 'B'], Literal[3, 4]]") diff --git a/packages/pyright-internal/src/tests/samples/constructor3.py b/packages/pyright-internal/src/tests/samples/constructor3.py index 9924855b3dce..ce5b2f9ea690 100644 --- a/packages/pyright-internal/src/tests/samples/constructor3.py +++ b/packages/pyright-internal/src/tests/samples/constructor3.py @@ -6,13 +6,13 @@ from concurrent.futures import Future, wait from itertools import chain -from typing import Any, Dict, Literal +from typing import Any, Dict my_list = list(chain([0])) -t1: Literal["list[int]"] = reveal_type(my_list) +reveal_type(my_list, expected_text="list[int]") pending: Dict[Future[Any], Any] = {} done_tasks = wait(list(pending.keys())).done -t2: Literal["set[Future[Any]]"] = reveal_type(done_tasks) +reveal_type(done_tasks, expected_text="set[Future[Any]]") diff --git a/packages/pyright-internal/src/tests/samples/constructor4.py b/packages/pyright-internal/src/tests/samples/constructor4.py index d4299e262944..bea68fce156b 100644 --- a/packages/pyright-internal/src/tests/samples/constructor4.py +++ b/packages/pyright-internal/src/tests/samples/constructor4.py @@ -3,23 +3,23 @@ from collections import defaultdict from queue import Queue -from typing import DefaultDict, List, Literal, Type, TypeVar +from typing import DefaultDict, List, Type, TypeVar val1 = Queue() -t1: Literal["Queue[Unknown]"] = reveal_type(val1) +reveal_type(val1, expected_text="Queue[Unknown]") val2 = list() -t2: Literal["list[Unknown]"] = reveal_type(val2) +reveal_type(val2, expected_text="list[Unknown]") _T = TypeVar("_T") def foo(value: Type[_T], b: _T) -> None: val1: "DefaultDict[str, list[_T]]" = defaultdict(list) - t1: Literal["defaultdict[str, list[_T@foo]]"] = reveal_type(val1) + reveal_type(val1, expected_text="defaultdict[str, list[_T@foo]]") val2: "DefaultDict[str, list[_T]]" = defaultdict(List[_T]) - t2: Literal["defaultdict[str, list[_T@foo]]"] = reveal_type(val2) + reveal_type(val2, expected_text="defaultdict[str, list[_T@foo]]") # This should generate an error because the type is incompatible. val3: "DefaultDict[str, list[_T]]" = defaultdict(list[int]) diff --git a/packages/pyright-internal/src/tests/samples/constructor5.py b/packages/pyright-internal/src/tests/samples/constructor5.py index edecd264ed81..1f76cc6c2cef 100644 --- a/packages/pyright-internal/src/tests/samples/constructor5.py +++ b/packages/pyright-internal/src/tests/samples/constructor5.py @@ -3,17 +3,17 @@ # that is matched against a protocol in the OrderedDict # constructor. -from typing import Literal, OrderedDict +from typing import OrderedDict val1 = { "a": 1, "b": 0, } -t1: Literal["dict[str, int]"] = reveal_type(val1) +reveal_type(val1, expected_text="dict[str, int]") val2 = OrderedDict(val1) -t2: Literal["OrderedDict[str, int]"] = reveal_type(val2) +reveal_type(val2, expected_text="OrderedDict[str, int]") val3 = OrderedDict( @@ -22,4 +22,4 @@ "b": 0, } ) -t3: Literal["OrderedDict[str, int]"] = reveal_type(val3) +reveal_type(val3, expected_text="OrderedDict[str, int]") diff --git a/packages/pyright-internal/src/tests/samples/constructor6.py b/packages/pyright-internal/src/tests/samples/constructor6.py index 6bd6a5249ee2..b4192bab54ed 100644 --- a/packages/pyright-internal/src/tests/samples/constructor6.py +++ b/packages/pyright-internal/src/tests/samples/constructor6.py @@ -32,9 +32,9 @@ def __get__(self: "TextField[_T]", instance: Any, owner: Any) -> _T: def foo(a: bool): - t1: Literal["TextField[str]"] = reveal_type(TextField()) - t2: Literal["TextField[str | None]"] = reveal_type(TextField(null=True)) - t3: Literal["TextField[Unknown]"] = reveal_type(TextField(null=a)) + reveal_type(TextField(), expected_text="TextField[str]") + reveal_type(TextField(null=True), expected_text="TextField[str | None]") + reveal_type(TextField(null=a), expected_text="TextField[Unknown]") class Model: @@ -58,7 +58,7 @@ def __init__( ) -> None: ... - def __init__(self, to: Type[_T2], *, null: bool) -> None: + def __init__(self, to: Type[_T2], *, null: bool = False) -> None: ... @@ -66,5 +66,5 @@ class Author(Model): pass -t1: Literal["ForeignKey[Author]"] = reveal_type(ForeignKey(Author, null=False)) -t2: Literal["ForeignKey[Author | None]"] = reveal_type(ForeignKey(Author, null=True)) +reveal_type(ForeignKey(Author, null=False), expected_text="ForeignKey[Author]") +reveal_type(ForeignKey(Author, null=True), expected_text="ForeignKey[Author | None]") diff --git a/packages/pyright-internal/src/tests/samples/constructor7.py b/packages/pyright-internal/src/tests/samples/constructor7.py index 63e42426394c..b35934a18cc4 100644 --- a/packages/pyright-internal/src/tests/samples/constructor7.py +++ b/packages/pyright-internal/src/tests/samples/constructor7.py @@ -1,8 +1,6 @@ # This sample tests the case where a __new__ method provides # a type that differs from the class that contains it. -from typing import Literal - class HelloWorld: def __new__(cls) -> str: @@ -10,4 +8,4 @@ def __new__(cls) -> str: v1 = HelloWorld() -t_v1: Literal["str"] = reveal_type(v1) +reveal_type(v1, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/constructor8.py b/packages/pyright-internal/src/tests/samples/constructor8.py index 9927be99a65c..71587f175537 100644 --- a/packages/pyright-internal/src/tests/samples/constructor8.py +++ b/packages/pyright-internal/src/tests/samples/constructor8.py @@ -17,13 +17,13 @@ def __new__(cls, x: _T1) -> "A[_T1]": a1 = func1(A[float], 3.4) -t_a1: Literal["A[float]"] = reveal_type(a1) +reveal_type(a1, expected_text="A[float]") # This should generate an error. a2 = func1(A[int], 3.4) a3 = func1(A[int], 3) -t_a3: Literal["A[int]"] = reveal_type(a3) +reveal_type(a3, expected_text="A[int]") class B(Generic[_T1]): @@ -35,24 +35,24 @@ def __new__(cls, x: int, y: Literal[True]) -> "B[None]": def __new__(cls, x: _T1, y: bool = ...) -> "B[_T1]": ... - def __new__(cls, x: Union[_T1, int], y: bool) -> "B[Any]": + def __new__(cls, x: Union[_T1, int], y: bool = False) -> "B[Any]": ... b1 = func1(B[int], 3) -t_b1: Literal["B[int]"] = reveal_type(b1) +reveal_type(b1, expected_text="B[int]") # This should generate an error. b2 = func1(B[None], 3.5) b3 = func1(B[float], 3.5) -t_b3: Literal["B[float]"] = reveal_type(b3) +reveal_type(b3, expected_text="B[float]") b4 = func1(B[Union[int, str]], 3) -t_b4: Literal["B[int | str]"] = reveal_type(b4) +reveal_type(b4, expected_text="B[int | str]") b5 = func1(B[Union[int, str]], "3") -t_b5: Literal["B[int | str]"] = reveal_type(b5) +reveal_type(b5, expected_text="B[int | str]") class C(Generic[_T1]): @@ -61,13 +61,13 @@ def __init__(self: "C[_T1]", x: _T1) -> None: c1 = func1(C[float], 3.4) -t_c1: Literal["C[float]"] = reveal_type(c1) +reveal_type(c1, expected_text="C[float]") # This should generate an error. c2 = func1(C[int], 3.4) c3 = func1(C[int], 3) -t_c3: Literal["C[int]"] = reveal_type(c3) +reveal_type(c3, expected_text="C[int]") class D(Generic[_T1]): @@ -79,21 +79,21 @@ def __init__(self: "D[None]", x: int, y: Literal[True]) -> None: def __init__(self: "D[_T1]", x: _T1, y: bool = ...) -> None: ... - def __init__(self, x: Any, y: bool) -> None: + def __init__(self, x: Any, y: bool = False) -> None: ... d1 = func1(D[int], 3) -t_d1: Literal["D[int]"] = reveal_type(d1) +reveal_type(d1, expected_text="D[int]") # This should generate an error. d2 = func1(D[None], 3.5) d3 = func1(D[float], 3.5) -t_d3: Literal["D[float]"] = reveal_type(d3) +reveal_type(d3, expected_text="D[float]") d4 = func1(D[Union[int, str]], 3) -t_d4: Literal["D[int | str]"] = reveal_type(d4) +reveal_type(d4, expected_text="D[int | str]") d5 = func1(D[Union[int, str]], "3") -t_d5: Literal["D[int | str]"] = reveal_type(d5) +reveal_type(d5, expected_text="D[int | str]") diff --git a/packages/pyright-internal/src/tests/samples/coroutines3.py b/packages/pyright-internal/src/tests/samples/coroutines3.py new file mode 100644 index 000000000000..fd2445fb0db5 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/coroutines3.py @@ -0,0 +1,37 @@ +# This sample tests old-style (pre-await) awaitable generators. + +import asyncio +from typing import Any, AwaitableGenerator + + +@asyncio.coroutine +def old_style_coroutine1(): + yield from asyncio.sleep(1) + + +async def func1() -> None: + x = await old_style_coroutine1() + reveal_type(x, expected_text="None") + return x + + +reveal_type( + old_style_coroutine1, + expected_text="() -> AwaitableGenerator[Any, None, None, None]", +) + + +@asyncio.coroutine +def old_style_coroutine2() -> AwaitableGenerator[Any, None, None, None]: + yield from asyncio.sleep(1) + + +async def func2() -> None: + x = await old_style_coroutine2() + return x + + +reveal_type( + old_style_coroutine2, + expected_text="() -> AwaitableGenerator[Any, None, None, None]", +) diff --git a/packages/pyright-internal/src/tests/samples/dataclass12.py b/packages/pyright-internal/src/tests/samples/dataclass12.py index e4a067183a66..2ef75ba262a8 100644 --- a/packages/pyright-internal/src/tests/samples/dataclass12.py +++ b/packages/pyright-internal/src/tests/samples/dataclass12.py @@ -2,7 +2,7 @@ # dataclasses use generic types. from dataclasses import dataclass -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar Key0 = TypeVar("Key0") Key1 = TypeVar("Key1") @@ -25,8 +25,8 @@ class Foo(Generic[Key2, Value]): def add(self, key: Key2, value: Value): return MapTreeNode(key=key, value=value) - def test1(self, a: int, b: str): + def test1(self, a: Key2, b: Value): v1 = self.add(a, b) - t1: Literal["MapTreeNode[int, str]"] = reveal_type(v1) - t1_key: Literal["int"] = reveal_type(v1.key) - t1_value: Literal["str"] = reveal_type(v1.value) + reveal_type(v1, expected_text="MapTreeNode[Key2@Foo, Value@Foo]") + reveal_type(v1.key, expected_text="Key2@Foo") + reveal_type(v1.value, expected_text="Value@Foo") diff --git a/packages/pyright-internal/src/tests/samples/dataclass16.py b/packages/pyright-internal/src/tests/samples/dataclass16.py new file mode 100644 index 000000000000..782e1e3490f1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclass16.py @@ -0,0 +1,21 @@ +# This sample verifies that a generic dataclass works. + +from dataclasses import dataclass +from typing import Generic, TypeVar, Union + +T = TypeVar("T") + + +@dataclass +class Foo(Generic[T]): + value: Union[str, T] + + +reveal_type(Foo(""), expected_text="Foo[str]") + + +class Bar(Foo[int]): + pass + + +reveal_type(Bar(123), expected_text="Bar") diff --git a/packages/pyright-internal/src/tests/samples/dataclass17.py b/packages/pyright-internal/src/tests/samples/dataclass17.py new file mode 100644 index 000000000000..69c7dcd52589 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclass17.py @@ -0,0 +1,36 @@ +# This sample tests the "slots" parameter for dataclasses introduced +# in Python 3.10. + +from dataclasses import dataclass + + +# This should generate an error because __slots__ is already defined. +@dataclass(slots=True) +class A: + x: int + + __slots__ = () + + +@dataclass(slots=True) +class B: + x: int + + def __init__(self): + self.x = 3 + + # This should generate an error because "y" is not in slots. + self.y = 3 + + +@dataclass(slots=False) +class C: + x: int + + __slots__ = ("x",) + + def __init__(self): + self.x = 3 + + # This should generate an error because "y" is not in slots. + self.y = 3 diff --git a/packages/pyright-internal/src/tests/samples/dataclass18.py b/packages/pyright-internal/src/tests/samples/dataclass18.py new file mode 100644 index 000000000000..0ce0a11a3483 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclass18.py @@ -0,0 +1,57 @@ +# This sample tests the synthesis of the __hash__ method for dataclasses. + +from dataclasses import dataclass +from typing import Hashable + + +@dataclass +class DC1: + a: int + + +# This should generate an error. +v1: Hashable = DC1(0) + + +@dataclass(eq=True, frozen=True) +class DC2: + a: int + + +v2: Hashable = DC2(0) + + +@dataclass(eq=True) +class DC3: + a: int + + +# This should generate an error. +v3: Hashable = DC3(0) + + +@dataclass(frozen=True) +class DC4: + a: int + + +v4: Hashable = DC4(0) + + +@dataclass(eq=True, unsafe_hash=True) +class DC5: + a: int + + +v5: Hashable = DC5(0) + + +@dataclass(eq=True) +class DC6: + a: int + + def __hash__(self) -> int: + return 0 + + +v6: Hashable = DC6(0) diff --git a/packages/pyright-internal/src/tests/samples/dataclass2.py b/packages/pyright-internal/src/tests/samples/dataclass2.py new file mode 100644 index 000000000000..69cd8c422b66 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclass2.py @@ -0,0 +1,30 @@ +# This sample tests the handling of Callable fields within a +# dataclass definition. + +# pyright: strict + +from dataclasses import dataclass +from typing import Any, Callable, TypeVar + +CallableT = TypeVar("CallableT", bound=Callable[..., Any]) + + +def decorate(arg: CallableT) -> CallableT: + return arg + + +def f(s: str) -> int: + return int(s) + + +@dataclass +class C: + str_to_int: Callable[[str], int] = f + + +c = C() + + +reveal_type(c.str_to_int, expected_text="(str) -> int") + +c.str_to_int = decorate(f) diff --git a/packages/pyright-internal/src/tests/samples/dataclass4.py b/packages/pyright-internal/src/tests/samples/dataclass4.py index c4cbfb739ff8..b040b9cea1e9 100644 --- a/packages/pyright-internal/src/tests/samples/dataclass4.py +++ b/packages/pyright-internal/src/tests/samples/dataclass4.py @@ -44,6 +44,9 @@ class Baz1: # defaults. ccc: str + def __init__(self) -> None: + pass + @dataclass class Baz2: diff --git a/packages/pyright-internal/src/tests/samples/dataclass5.py b/packages/pyright-internal/src/tests/samples/dataclass5.py index 8a2f2bce6e60..43842a48034f 100644 --- a/packages/pyright-internal/src/tests/samples/dataclass5.py +++ b/packages/pyright-internal/src/tests/samples/dataclass5.py @@ -2,7 +2,6 @@ # with a custom __init__. from dataclasses import dataclass -from typing import Literal @dataclass(init=False) @@ -67,10 +66,10 @@ def __lt__(self, x: "E") -> str: foo1 = E(3) == E(3) -t1: Literal["float"] = reveal_type(foo1) +reveal_type(foo1, expected_text="float") foo2 = E(3) < E(3) -t2: Literal["str"] = reveal_type(foo2) +reveal_type(foo2, expected_text="str") @dataclass(order=True) @@ -79,4 +78,4 @@ class F: foo3 = F(3) < F(3) -t3: Literal["bool"] = reveal_type(foo3) +reveal_type(foo3, expected_text="bool") diff --git a/packages/pyright-internal/src/tests/samples/dataclass7.py b/packages/pyright-internal/src/tests/samples/dataclass7.py index b9e095970e0d..40f71bb1234e 100644 --- a/packages/pyright-internal/src/tests/samples/dataclass7.py +++ b/packages/pyright-internal/src/tests/samples/dataclass7.py @@ -1,7 +1,7 @@ # This sample tests the analyzer's ability to handle inherited # data classes. -from dataclasses import dataclass +from dataclasses import dataclass, field class C1: @@ -63,3 +63,31 @@ class DC5(DC3): # aa replaces aa in DC3, and it's ordered # before the params with default values. aa: C2 + + +@dataclass +class DC6: + a: int = 0 + + +@dataclass +class DC7(DC6): + a: int + + # This should generate an error because the default + # value for "a" is inherited from the base class. + b: str + + +@dataclass +class DC8: + a: int = field(default=0) + + +@dataclass +class DC9(DC8): + a: int + + # This should generate an error because the default + # value for "a" is inherited from the base class. + b: str diff --git a/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py b/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py new file mode 100644 index 000000000000..e3451e69f856 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclassPostInit1.py @@ -0,0 +1,43 @@ +# This sample tests the __post_init__ validation logic. + +from dataclasses import InitVar, dataclass, field +from typing import Iterable + + +@dataclass +class A: + a: InitVar[int] + b: InitVar[str] + c: InitVar[bool] + + def __post_init__(self, x: float, y: str, z: int, xx: int = 3) -> None: + ... + + +@dataclass +class B: + items: list[int] + + # This should generate an error because the number of InitVars is zero. + def __post_init__(self, x: list[int]) -> None: + ... + + +@dataclass +class C: + iterable: InitVar[Iterable[int]] + + items: list[int] = field(init=False) + + # This should generate an error because the number of InitVars is 1. + def __post_init__(self) -> None: + ... + + +@dataclass +class D: + iterable: InitVar[Iterable[int]] + + # This should generate an error because the type is incompatible. + def __post_init__(self, iterable: Iterable[str]) -> None: + ... diff --git a/packages/pyright-internal/src/tests/samples/dataclassTransform1.py b/packages/pyright-internal/src/tests/samples/dataclassTransform1.py index 4c3d51565e40..51c114755ce6 100644 --- a/packages/pyright-internal/src/tests/samples/dataclassTransform1.py +++ b/packages/pyright-internal/src/tests/samples/dataclassTransform1.py @@ -1,29 +1,20 @@ # This sample tests the handling of the dataclass_transform mechanism # when applied to a decorator function. -from typing import Any, Callable, Tuple, TypeVar, Union, overload +from typing import Any, Callable, TypeVar, overload +from typing_extensions import dataclass_transform _T = TypeVar("_T") -def __dataclass_transform__( - *, - eq_default: bool = True, - order_default: bool = False, - kw_only_default: bool = False, - field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), -) -> Callable[[_T], _T]: - return lambda a: a - - @overload -@__dataclass_transform__(kw_only_default=True, order_default=True) +@dataclass_transform(kw_only_default=True, order_default=True) def create_model(cls: _T) -> _T: ... @overload -@__dataclass_transform__(kw_only_default=True, order_default=True) +@dataclass_transform(kw_only_default=True, order_default=True) def create_model( *, frozen: bool = False, diff --git a/packages/pyright-internal/src/tests/samples/dataclassTransform2.py b/packages/pyright-internal/src/tests/samples/dataclassTransform2.py index 1491c2caa7f8..712ed47aa384 100644 --- a/packages/pyright-internal/src/tests/samples/dataclassTransform2.py +++ b/packages/pyright-internal/src/tests/samples/dataclassTransform2.py @@ -1,21 +1,12 @@ # This sample tests the handling of the dataclass_transform mechanism # when applied to a metaclass. -from typing import Any, Callable, Optional, Tuple, TypeVar, Union +from typing import Any, Optional, TypeVar +from typing_extensions import dataclass_transform _T = TypeVar("_T") -def __dataclass_transform__( - *, - eq_default: bool = True, - order_default: bool = False, - kw_only_default: bool = False, - field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), -) -> Callable[[_T], _T]: - return lambda a: a - - class ModelField: def __init__(self, *, init: bool = True, default: Optional[Any] = None) -> None: ... @@ -27,7 +18,7 @@ def model_field( ... -@__dataclass_transform__( +@dataclass_transform( kw_only_default=True, field_descriptors=(ModelField, model_field), ) diff --git a/packages/pyright-internal/src/tests/samples/dataclassTransform3.py b/packages/pyright-internal/src/tests/samples/dataclassTransform3.py index cc8341510b4e..8f097cc62426 100644 --- a/packages/pyright-internal/src/tests/samples/dataclassTransform3.py +++ b/packages/pyright-internal/src/tests/samples/dataclassTransform3.py @@ -1,87 +1,83 @@ -# This sample tests the case where a field descriptor has an implicit -# "init" parameter type based on an overload. - -from typing import ( - Any, - Callable, - Literal, - Optional, - Tuple, - Type, - TypeVar, - Union, - overload, -) +# This sample tests the handling of the dataclass_transform mechanism +# when applied to a class. + +from typing import Any, Callable, Optional, Tuple, TypeVar, Union + +_T = TypeVar("_T") -T = TypeVar("T") + +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: + return lambda a: a class ModelField: - def __init__( - self, - *, - default: Optional[Any] = ..., - init: Optional[bool] = True, - **kwargs: Any, - ) -> None: + def __init__(self, *, init: bool = True, default: Optional[Any] = None) -> None: ... -@overload -def field( - *, - default: Optional[str] = None, - resolver: Callable[[], Any], - init: Literal[False] = False, +def model_field( + *, init: bool = True, default: Optional[Any] = None, alias: Optional[str] = None ) -> Any: ... -@overload -def field( - *, - default: Optional[str] = None, - resolver: None = None, - init: Literal[True] = True, -) -> Any: - ... +@__dataclass_transform__( + kw_only_default=True, + field_descriptors=(ModelField, model_field), +) +class ModelBase: + def __init_subclass__( + cls, + *, + frozen: bool = False, + kw_only: bool = True, + order: bool = True, + ) -> None: + ... -def field( - *, - default: Optional[str] = None, - resolver: Optional[Callable[[], Any]] = None, - init: bool = True, -) -> Any: - ... +class Customer1(ModelBase, frozen=True): + id: int = model_field() + name: str = model_field() + name2: str = model_field(alias="other_name", default="None") -def __dataclass_transform__( - *, - eq_default: bool = True, - order_default: bool = False, - kw_only_default: bool = False, - field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), -) -> Callable[[T], T]: - # If used within a stub file, the following implementation can be - # replaced with "...". - return lambda a: a +class Customer1Subclass(Customer1, frozen=False): + salary: float = model_field() -@__dataclass_transform__(kw_only_default=True, field_descriptors=(field,)) -def create_model(*, init: bool = True) -> Callable[[Type[T]], Type[T]]: - ... +class Customer2(ModelBase, order=True): + id: int + name: str = model_field(default="None") + + +c1_1 = Customer1(id=3, name="Sue", other_name="Susan") + +# This should generate an error because the class is frozen. +c1_1.id = 4 + +# This should generate an error because the class is kw_only. +c1_2 = Customer1(3, "Sue") + +# This should generate an error because other_name is missing. +c1_3 = Customer1(id=3, name="John") +# This should generate an error because comparison methods are +# not synthesized. +v1 = c1_1 < c1_2 -@create_model() -class CustomerModel: - id: int = field(resolver=lambda: 0) - name: str = field(default="Voldemort") +c2_1 = Customer2(id=0, name="John") +c2_2 = Customer2(id=1) -CustomerModel() -CustomerModel(name="hi") +v2 = c2_1 < c2_2 -# This should generate an error because "id" is not -# supposed to be part of the init function. -CustomerModel(id=1, name="hi") +# This should generate an error because Customer2 supports +# keyword-only parameters for its constructor. +c2_3 = Customer2(0, "John") diff --git a/packages/pyright-internal/src/tests/samples/dataclassTransform4.py b/packages/pyright-internal/src/tests/samples/dataclassTransform4.py new file mode 100644 index 000000000000..cc8341510b4e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclassTransform4.py @@ -0,0 +1,87 @@ +# This sample tests the case where a field descriptor has an implicit +# "init" parameter type based on an overload. + +from typing import ( + Any, + Callable, + Literal, + Optional, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +T = TypeVar("T") + + +class ModelField: + def __init__( + self, + *, + default: Optional[Any] = ..., + init: Optional[bool] = True, + **kwargs: Any, + ) -> None: + ... + + +@overload +def field( + *, + default: Optional[str] = None, + resolver: Callable[[], Any], + init: Literal[False] = False, +) -> Any: + ... + + +@overload +def field( + *, + default: Optional[str] = None, + resolver: None = None, + init: Literal[True] = True, +) -> Any: + ... + + +def field( + *, + default: Optional[str] = None, + resolver: Optional[Callable[[], Any]] = None, + init: bool = True, +) -> Any: + ... + + +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[T], T]: + # If used within a stub file, the following implementation can be + # replaced with "...". + return lambda a: a + + +@__dataclass_transform__(kw_only_default=True, field_descriptors=(field,)) +def create_model(*, init: bool = True) -> Callable[[Type[T]], Type[T]]: + ... + + +@create_model() +class CustomerModel: + id: int = field(resolver=lambda: 0) + name: str = field(default="Voldemort") + + +CustomerModel() +CustomerModel(name="hi") + +# This should generate an error because "id" is not +# supposed to be part of the init function. +CustomerModel(id=1, name="hi") diff --git a/packages/pyright-internal/src/tests/samples/dataclassTransform5.py b/packages/pyright-internal/src/tests/samples/dataclassTransform5.py new file mode 100644 index 000000000000..0c16081eac6e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/dataclassTransform5.py @@ -0,0 +1,72 @@ +# This sample tests the "transform_descriptor_types" parameter of a +# dataclass_transform. + +from typing import Any, Callable, Generic, Tuple, TypeVar, Union + +T = TypeVar("T") + + +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + transform_descriptor_types: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[T], T]: + return lambda a: a + + +def mapped_field( + *, + default: Any, +) -> Any: + ... + + +class Descriptor(Generic[T]): + def __get__(self, instance: object, owner: Any) -> T: + ... + + def __set__(self, instance: object, value: T) -> None: + ... + + +@__dataclass_transform__( + transform_descriptor_types=True, field_descriptors=(mapped_field,) +) +class ModelBaseDescriptorTransform: + ... + + +class UserModel1(ModelBaseDescriptorTransform): + name: Descriptor[str] + age: Descriptor[int | None] = mapped_field(default=None) + + +reveal_type( + UserModel1.__init__, + expected_text="(self: UserModel1, name: str, age: int | None = None) -> None", +) + +um1 = UserModel1(name="hi", age=1) + + +@__dataclass_transform__( + transform_descriptor_types=False, field_descriptors=(mapped_field,) +) +class ModelBaseNoDescriptorTransform: + ... + + +class UserModel2(ModelBaseNoDescriptorTransform): + name: Descriptor[str] + + +reveal_type( + UserModel2.__init__, + expected_text="(self: UserModel2, name: Descriptor[str]) -> None", +) + +# This should generate an error because "hi" is not a descriptor instance. +um2 = UserModel2(name="hi") diff --git a/packages/pyright-internal/src/tests/samples/decorator4.py b/packages/pyright-internal/src/tests/samples/decorator4.py index a21095299544..022cee5fde4d 100644 --- a/packages/pyright-internal/src/tests/samples/decorator4.py +++ b/packages/pyright-internal/src/tests/samples/decorator4.py @@ -3,7 +3,6 @@ # pyright: reportMissingImports=false -from typing import Literal import my_module @@ -26,7 +25,7 @@ def __init__(self, a, b, c): v1 = Class1(1, 2, 3) -t1: Literal["Class1"] = reveal_type(v1) +reveal_type(v1, expected_text="Class1") @my_decorator @@ -35,4 +34,4 @@ def func1() -> int: v2 = func1() -t2: Literal["int"] = reveal_type(v2) +reveal_type(v2, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/decorator5.py b/packages/pyright-internal/src/tests/samples/decorator5.py index 2444da4b91cf..760c4258b18e 100644 --- a/packages/pyright-internal/src/tests/samples/decorator5.py +++ b/packages/pyright-internal/src/tests/samples/decorator5.py @@ -2,8 +2,6 @@ # an unannotated decorator should allow the decorated # function type (and docstring) to pass through unmodified. -from typing import Literal - def simple_decorator(method): def wrapper(*args, **kw): @@ -18,7 +16,7 @@ def function(var: str, kvar: str): return -t1: Literal["(var: str, kvar: str) -> None"] = reveal_type(function) +reveal_type(function, expected_text="(var: str, kvar: str) -> None") class Foo: @@ -27,5 +25,5 @@ def method(self, var: str, kvar: str): return -t2: Literal["(var: str, kvar: str) -> None"] = reveal_type(Foo().method) -t3: Literal["(self: Foo, var: str, kvar: str) -> None"] = reveal_type(Foo.method) +reveal_type(Foo().method, expected_text="(var: str, kvar: str) -> None") +reveal_type(Foo.method, expected_text="(self: Foo, var: str, kvar: str) -> None") diff --git a/packages/pyright-internal/src/tests/samples/decorator6.py b/packages/pyright-internal/src/tests/samples/decorator6.py new file mode 100644 index 000000000000..4e06939a865e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/decorator6.py @@ -0,0 +1,34 @@ +# This sample tests that function decorators can be combined with +# staticmethod or classmethod. + +from typing import Callable, TypeVar +import functools + +_T = TypeVar("_T") + + +def decorator(func: Callable[[_T, str], None]) -> Callable[[_T, str], None]: + @functools.wraps(func) + def func_wrapper(firstarg: _T, secondarg: str) -> None: + return func(firstarg, secondarg) + + return func_wrapper + + +class Test: + def __init__(self): + self.test1(1, "a") + self.test2("hi") + + @staticmethod + @decorator + def test1(firstarg: int, secondarg: str) -> None: + print(secondarg) + + @classmethod + @decorator + def test2(cls, secondarg: str) -> None: + print(secondarg) + + +Test() diff --git a/packages/pyright-internal/src/tests/samples/deprecated1.py b/packages/pyright-internal/src/tests/samples/deprecated1.py new file mode 100644 index 000000000000..316158871b2f --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/deprecated1.py @@ -0,0 +1,37 @@ +# This sample tests the detection of deprecated classes from the typing +# module. + + +from typing import ( + ChainMap, + Counter, + DefaultDict, + Deque, + Dict, + FrozenSet, + List, + Optional, + OrderedDict, + Set, + Tuple, + Type, + Union, +) + + +# These should be marked deprecated for Python >= 3.9 +v1: List[int] = [1, 2, 3] +v2: Dict[int, str] = {} +v3: Set[int] = set() +v4: Tuple[int] = (3,) +v5: FrozenSet[int] = frozenset() +v6: Type[int] = int +v7 = Deque() +v8 = DefaultDict() +v9 = OrderedDict() +v10 = Counter() +v11 = ChainMap() + +# These should be marked deprecated for Python >= 3.10 +v20: Union[int, str] +v21: Optional[int] diff --git a/packages/pyright-internal/src/tests/samples/descriptor1.py b/packages/pyright-internal/src/tests/samples/descriptor1.py new file mode 100644 index 000000000000..7b18c2ef9047 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/descriptor1.py @@ -0,0 +1,126 @@ +# This sample tests the detection and handling of asymmetric descriptors +# and properties. Type narrowing should be disabled in these cases. + +from typing import Any, Literal, Optional + + +class A: + @property + def prop1(self) -> Optional[int]: + ... + + @prop1.setter + def prop1(self, val: Optional[int]) -> None: + ... + + @property + def prop2(self) -> Optional[int]: + ... + + @prop2.setter + def prop2(self, val: int) -> None: + ... + + @property + def prop3(self) -> int: + ... + + @prop3.setter + def prop3(self, val: Optional[int]) -> None: + ... + + +def func1(obj: A) -> Literal[3]: + obj.prop1 = None + + b: None = obj.prop1 + + obj.prop1 = 3 + + obj.prop1 + 1 + return obj.prop1 + + +def func2(obj: A) -> Literal[3]: + obj.prop2 = 3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + b: int = obj.prop2 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.prop2 + + +def func3(obj: A) -> Literal[3]: + obj.prop3 = 3 + + b: int = obj.prop3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.prop3 + + +class Descriptor1: + def __get__(self, instance: Any, owner: Any) -> Optional[int]: + ... + + def __set__(self, owner: Any, value: Optional[int]) -> None: + ... + + +class Descriptor2: + def __get__(self, instance: Any, owner: Any) -> Optional[int]: + ... + + def __set__(self, owner: Any, value: int) -> None: + ... + + +class Descriptor3: + def __get__(self, instance: Any, owner: Any) -> int: + ... + + def __set__(self, owner: Any, value: Optional[int]) -> None: + ... + + +class B: + desc1: Descriptor1 + desc2: Descriptor2 + desc3: Descriptor3 + + +def func4(obj: B) -> Literal[3]: + obj.desc1 = None + + b: None = obj.desc1 + + obj.desc1 = 3 + + obj.desc1 + 1 + return obj.desc1 + + +def func5(obj: B) -> Literal[3]: + obj.desc2 = 3 + + # This should generate an error because desc2 isn't + # narrowed in this case. + b: int = obj.desc2 + + # This should generate an error because desc2 isn't + # narrowed in this case. + return obj.desc2 + + +def func6(obj: B) -> Literal[3]: + obj.desc3 = 3 + + b: int = obj.desc3 + + # This should generate an error because prop2 isn't + # narrowed in this case. + return obj.desc3 diff --git a/packages/pyright-internal/src/tests/samples/dictionary1.py b/packages/pyright-internal/src/tests/samples/dictionary1.py index 73aa44bd8d57..d57de409b267 100644 --- a/packages/pyright-internal/src/tests/samples/dictionary1.py +++ b/packages/pyright-internal/src/tests/samples/dictionary1.py @@ -26,7 +26,7 @@ def wantsIntDict(a: Dict[int, int]): d2 = {"hi": 3} d3 = {**d2, "": 4} -td3: Literal["dict[str, int]"] = reveal_type(d3) +reveal_type(d3, expected_text="dict[str, int]") LitChoices = Literal["ab", "bcd"] diff --git a/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py b/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py index 6184d25cb089..4e335aaa789e 100644 --- a/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py +++ b/packages/pyright-internal/src/tests/samples/duplicateDeclaration1.py @@ -1,7 +1,7 @@ # This sample tests the detection of duplicate (overwritten) symbols. -from typing import overload +from typing import Callable, overload class C: @@ -64,3 +64,52 @@ def b(): b: int = 3 + + +def func1(cond: bool): + if cond: + + def a() -> int: + return 3 + + # This should generate an error because its inferred return + # type differs from b above. + def b(): + return 3 + + # This should generate an error because the parameter names don't match. + def c(a: int, b: str) -> None: + return None + + # This should generate an error because the parameter is positional-only. + def d(a: int) -> None: + return None + + def e(a: int, /) -> None: + return None + + # This should generate an error because the parameter is not positional-only. + f: Callable[[int], None] = lambda a: None + + g: Callable[[int], None] = lambda a: None + + else: + + def a() -> int: + return 2 + + def b(): + return 2 + + def c(a: int, c: str) -> None: + return None + + d: Callable[[int], None] = lambda a: None + + e: Callable[[int], None] = lambda a: None + + def f(a: int) -> None: + return None + + def g(a: int, /) -> None: + return None diff --git a/packages/pyright-internal/src/tests/samples/emptyContainers1.py b/packages/pyright-internal/src/tests/samples/emptyContainers1.py index 30a8e4fcbf1c..e1cdfa64e496 100644 --- a/packages/pyright-internal/src/tests/samples/emptyContainers1.py +++ b/packages/pyright-internal/src/tests/samples/emptyContainers1.py @@ -2,7 +2,7 @@ # pyright: reportUnknownVariableType=true, reportUnknownArgumentType=true -from typing import List, Literal +from typing import List def func1(a: bool): @@ -11,29 +11,30 @@ def func1(a: bool): if a: val1 = [2, 3] - t_val1: Literal["list[int]"] = reveal_type(val1) + reveal_type(val1, expected_text="list[int]") if a: val2 = [] else: val2 = [] - t_val2: Literal["list[Unknown]"] = reveal_type(val2) + reveal_type(val2, expected_text="list[Unknown]") # This should generate an error because val2 is partially unknown. val2 += [3] val3 = val2 - # This should generate an error because val3 is partially unknown. + # This would normally generate an error, but because it comes from + # a [] expression, it's allowed. print(val3) - t_val3_1: Literal["list[Unknown]"] = reveal_type(val3) + reveal_type(val3, expected_text="list[Unknown]") if a: val3 = [3.4] print(val3) - t_val3_2: Literal["list[float]"] = reveal_type(val3) + reveal_type(val3, expected_text="list[float]") def func2(a: bool): @@ -42,29 +43,30 @@ def func2(a: bool): if a: val1 = {"a": 2} - t_val1: Literal["dict[str, int]"] = reveal_type(val1) + reveal_type(val1, expected_text="dict[str, int]") if a: val2 = {} else: val2 = {} - t_val2: Literal["dict[Unknown, Unknown]"] = reveal_type(val2) + reveal_type(val2, expected_text="dict[Unknown, Unknown]") # This should generate an error because val2 is partially unknown. val2.pop() val3 = val2 - # This should generate an error because val3 is partially unknown. + # This would normally generate an error, but because it comes from + # a {} expression, it's allowed. print(val3) - t_val3_1: Literal["dict[Unknown, Unknown]"] = reveal_type(val3) + reveal_type(val3, expected_text="dict[Unknown, Unknown]") if a: val3 = {"b": 3.4} print(val3) - t_val3_2: Literal["dict[str, float]"] = reveal_type(val3) + reveal_type(val3, expected_text="dict[str, float]") class A: @@ -78,9 +80,9 @@ def method2(self): self.val2 = {"a": 1} def method3(self): - t_val1: Literal["list[float]"] = reveal_type(self.val1) - t_val2: Literal["dict[str, int]"] = reveal_type(self.val2) - t_val3: Literal["list[Unknown]"] = reveal_type(self.val3) + reveal_type(self.val1, expected_text="list[float]") + reveal_type(self.val2, expected_text="dict[str, int]") + reveal_type(self.val3, expected_text="list[Unknown]") def method4(self) -> List[int]: # This should generate an error because of a type mismatch. diff --git a/packages/pyright-internal/src/tests/samples/enums1.py b/packages/pyright-internal/src/tests/samples/enums1.py index 9e9daf59c2bc..9c4a5ebb440c 100644 --- a/packages/pyright-internal/src/tests/samples/enums1.py +++ b/packages/pyright-internal/src/tests/samples/enums1.py @@ -1,7 +1,6 @@ # This sample tests the type checker's handling of Enum. from enum import Enum, IntEnum -from typing import Literal TestEnum1 = Enum("TestEnum1", "A B C D") @@ -39,15 +38,15 @@ class TestEnum3(Enum): # Test that enum classes are iterable. list1 = list(TestEnum3) -t1: Literal["list[TestEnum3]"] = reveal_type(list1) +reveal_type(list1, expected_text="list[TestEnum3]") list2 = [i for i in TestEnum3] -t2: Literal["list[TestEnum3]"] = reveal_type(list2) +reveal_type(list2, expected_text="list[TestEnum3]") num_items_in_enum3 = len(TestEnum3) -t3: Literal["int"] = reveal_type(num_items_in_enum3) +reveal_type(num_items_in_enum3, expected_text="int") -t4: Literal["Literal['A']"] = reveal_type(TestEnum3.A.name) -t5: Literal["Literal['A']"] = reveal_type(TestEnum3.A._name_) -t6: Literal["Literal[0]"] = reveal_type(TestEnum3.A.value) -t7: Literal["Literal[0]"] = reveal_type(TestEnum3.A._value_) +reveal_type(TestEnum3.A.name, expected_text="Literal['A']") +reveal_type(TestEnum3.A._name_, expected_text="Literal['A']") +reveal_type(TestEnum3.A.value, expected_text="Literal[0]") +reveal_type(TestEnum3.A._value_, expected_text="Literal[0]") diff --git a/packages/pyright-internal/src/tests/samples/enums4.py b/packages/pyright-internal/src/tests/samples/enums4.py index ea993f01007d..1cee93dd75bc 100644 --- a/packages/pyright-internal/src/tests/samples/enums4.py +++ b/packages/pyright-internal/src/tests/samples/enums4.py @@ -2,7 +2,6 @@ # they are not named as such. from enum import Enum -from typing import Literal class Status(Enum): @@ -18,7 +17,7 @@ def __init__(self): myobj = Myclass() -t1: Literal["Status"] = reveal_type(myobj.status) +reveal_type(myobj.status, expected_text="Status") myobj.status = Status.good -t2: Literal["Literal[Status.good]"] = reveal_type(myobj.status) +reveal_type(myobj.status, expected_text="Literal[Status.good]") diff --git a/packages/pyright-internal/src/tests/samples/enums5.py b/packages/pyright-internal/src/tests/samples/enums5.py index 4e2d4a6d8509..a7139fa68711 100644 --- a/packages/pyright-internal/src/tests/samples/enums5.py +++ b/packages/pyright-internal/src/tests/samples/enums5.py @@ -1,7 +1,6 @@ # This sample tests logical operators on enums. import enum -from typing import Literal class CustomFlags(enum.Flag): @@ -11,7 +10,7 @@ class CustomFlags(enum.Flag): flags1 = CustomFlags.A | CustomFlags.B -t1: Literal["CustomFlags"] = reveal_type(flags1) +reveal_type(flags1, expected_text="CustomFlags") flags2 = CustomFlags.A & CustomFlags.B -t2: Literal["CustomFlags"] = reveal_type(flags2) +reveal_type(flags2, expected_text="CustomFlags") diff --git a/packages/pyright-internal/src/tests/samples/enums6.py b/packages/pyright-internal/src/tests/samples/enums6.py index 917f6ad47cd3..1afd9ef7df00 100644 --- a/packages/pyright-internal/src/tests/samples/enums6.py +++ b/packages/pyright-internal/src/tests/samples/enums6.py @@ -3,7 +3,6 @@ # They should not be treated as enum objects. from enum import Enum -from typing import Literal class Descriptor: @@ -27,8 +26,8 @@ def __init__(self, foo: int, bar: str) -> None: baz = 123 + MyEnum.ENTRY.foo -t_baz: Literal["int"] = reveal_type(baz) +reveal_type(baz, expected_text="int") -t_exempt: Literal["int"] = reveal_type(MyEnum._exempt_) +reveal_type(MyEnum._exempt_, expected_text="int") -t_desc: Literal["complex"] = reveal_type(MyEnum.desc) +reveal_type(MyEnum.desc, expected_text="complex") diff --git a/packages/pyright-internal/src/tests/samples/enums7.py b/packages/pyright-internal/src/tests/samples/enums7.py new file mode 100644 index 000000000000..eb4b4f2a086e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/enums7.py @@ -0,0 +1,55 @@ +# This sample tests error detection of duplicate enum members and +# an attempt to subclass an enum. + +from enum import Enum + + +class Color(Enum): + red = "red" + blue = "blue" + yellow = "yellow" + + # This should generate an error because the enum member + # already exists. + blue = "blue" + + def __init__(self, value: str): + if value == "blue": + self.foo = False + else: + self.foo = True + + +class NonEnum: + ... + + +# This should generate an error because enums cannot +# be subclassed. +class ExtraColor(NonEnum, Color): + pass + + +# This should generate an error because reassignment of enum +# values is not allowed. +# Note: A change to typeshed's enum.pyi breaks this test. +Color.red = "new" + + +class EnumWithoutValue(Enum): + def do_something(self): + pass + + @property + def y(self) -> None: + pass + + +class EnumWithValue(EnumWithoutValue): + x = 0 + + +# This should generate an error because enums with values +# cannot be subclassed. +class EnumSubclass(EnumWithValue): + y: int diff --git a/packages/pyright-internal/src/tests/samples/enums8.py b/packages/pyright-internal/src/tests/samples/enums8.py new file mode 100644 index 000000000000..2f2d98074dcb --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/enums8.py @@ -0,0 +1,14 @@ +# This sample tests the custom __call__ method on the EnumMeta class. + +from enum import Enum + + +class Foo(Enum): + A = 1 + B = 2 + + +Foo(1) + +# This should generate an error. +Foo(1, 2, 3, 4) diff --git a/packages/pyright-internal/src/tests/samples/expressions3.py b/packages/pyright-internal/src/tests/samples/expressions3.py index 34b464034ce4..83faa2a40e91 100644 --- a/packages/pyright-internal/src/tests/samples/expressions3.py +++ b/packages/pyright-internal/src/tests/samples/expressions3.py @@ -5,8 +5,6 @@ def returnsFloat1() -> float: a = 1 b = not a - # This should generate an error because bool - # cannot be assigned to a float. return b diff --git a/packages/pyright-internal/src/tests/samples/expressions6.py b/packages/pyright-internal/src/tests/samples/expressions6.py index 55df759d4f56..b42b28a9b7fc 100644 --- a/packages/pyright-internal/src/tests/samples/expressions6.py +++ b/packages/pyright-internal/src/tests/samples/expressions6.py @@ -1,14 +1,14 @@ # This sample tests that binary operations "or" and "and" # properly handle bidirectional type inference. -from typing import Any, Dict, Literal, Optional +from typing import Any, Dict, Optional def func_or(a: Optional[Dict[str, Any]]): a = a or dict() - t1: Literal["Dict[str, Any]"] = reveal_type(a) + reveal_type(a, expected_text="Dict[str, Any]") def func_and(): a: Optional[Dict[str, Any]] = True and dict() - t1: Literal["dict[str, Any]"] = reveal_type(a) + reveal_type(a, expected_text="dict[str, Any]") diff --git a/packages/pyright-internal/src/tests/samples/expressions7.py b/packages/pyright-internal/src/tests/samples/expressions7.py index b7313d385eda..c1fe4f808463 100644 --- a/packages/pyright-internal/src/tests/samples/expressions7.py +++ b/packages/pyright-internal/src/tests/samples/expressions7.py @@ -1,7 +1,5 @@ # This sample tests various conditions with AND and OR operators. -from typing import Literal - def foo() -> bool: ... @@ -15,15 +13,15 @@ def baz() -> str: ... -t1: Literal["int | Literal[False]"] = reveal_type(foo() and bar()) -t2: Literal["str | Literal[False]"] = reveal_type(foo() and baz()) -t3: Literal["int | bool"] = reveal_type(bar() and foo()) -t4: Literal["str | bool"] = reveal_type(baz() and foo()) +reveal_type(foo() and bar(), expected_text="int | Literal[False]") +reveal_type(foo() and baz(), expected_text="str | Literal[False]") +reveal_type(bar() and foo(), expected_text="int | bool") +reveal_type(baz() and foo(), expected_text="str | bool") -t5: Literal["int | Literal[True]"] = reveal_type(foo() or bar()) -t6: Literal["str | Literal[True]"] = reveal_type(foo() or baz()) -t7: Literal["int | bool"] = reveal_type(bar() or foo()) -t8: Literal["str | bool"] = reveal_type(baz() or foo()) +reveal_type(foo() or bar(), expected_text="int | Literal[True]") +reveal_type(foo() or baz(), expected_text="str | Literal[True]") +reveal_type(bar() or foo(), expected_text="int | bool") +reveal_type(baz() or foo(), expected_text="str | bool") class Foo: @@ -35,8 +33,8 @@ class Bar: def func2(a: Foo and Bar): - t1: Literal["Bar"] = reveal_type(a) + reveal_type(a, expected_text="Bar") def func3(a: Foo or Bar): - t1: Literal["Foo"] = reveal_type(a) + reveal_type(a, expected_text="Foo") diff --git a/packages/pyright-internal/src/tests/samples/expressions8.py b/packages/pyright-internal/src/tests/samples/expressions8.py index 8b33d243c166..cdf02196b271 100644 --- a/packages/pyright-internal/src/tests/samples/expressions8.py +++ b/packages/pyright-internal/src/tests/samples/expressions8.py @@ -10,7 +10,7 @@ class ComparableTo(Protocol[_T_contra]): @abstractmethod - def __lt__(self, x: _T_contra) -> bool: + def __lt__(self, __x: _T_contra) -> bool: pass diff --git a/packages/pyright-internal/src/tests/samples/final3.py b/packages/pyright-internal/src/tests/samples/final3.py index fdb831192acd..c0de861544f2 100644 --- a/packages/pyright-internal/src/tests/samples/final3.py +++ b/packages/pyright-internal/src/tests/samples/final3.py @@ -2,7 +2,7 @@ # introduced in Python 3.8. import typing -from typing import Final, List, Literal +from typing import Any, Final, List foo1: typing.Final = 3 @@ -27,7 +27,7 @@ foo4: Final = 5 -t_4: Literal["Literal[5]"] = reveal_type(foo4) +reveal_type(foo4, expected_text="Literal[5]") class Foo: @@ -78,6 +78,10 @@ def another_method(self): self.member7: Final = 6 +reveal_type(Foo.member1, expected_text="Literal[4]") +reveal_type(Foo(True).member1, expected_text="Literal[4]") + + class Bar(Foo): # This should generate an error because we are overriding # a member that is marked Final in the parent class. @@ -98,10 +102,78 @@ def __init__(self): # This should generate an error because Final isn't allowed for # function parameters. -def bar(a: Final[int]): +def func1(a: Final[int]): pass # This should generate an error because Final must the outermost # type in assignments. b: List[Final[int]] = [] + + +class ClassA: + member1: Final = 3 + member2: Final + + def __init__(self): + # This should generate an error. + self.member1 = 5 + + self.member2 = "hi" + + self.member3: Final = "hi" + + def other(self): + # This should generate an error. + self.member1 = 5 + + # This should generate an error. + self.member2 = "hi" + + # This should generate an error. + self.member3 = "hi" + + +a = ClassA() + +# This should generate an error. +a.member1 = 4 + +# This should generate an error. +a.member3 = "x" + + +def func2(): + x: Final[Any] = 3 + + # This should generate an error because x is Final. + x += 1 + + # This should generate an error because x is Final. + a = (x := 4) + + # This should generate an error because x is Final. + for x in [1, 2, 3]: + pass + + # This should generate an error because x is Final. + with open("Hi") as x: + pass + + try: + pass + # This should generate an error because x is Final. + except ModuleNotFoundError as x: + pass + + # This should generate an error because x is Final. + (a, x) = (1, 2) + + +class ClassB: + def __init__(self): + self.x: Final = 1 + + def method1(self): + # This should generate an error because x is Final. + self.x += 1 diff --git a/packages/pyright-internal/src/tests/samples/final5.py b/packages/pyright-internal/src/tests/samples/final5.py new file mode 100644 index 000000000000..91497428b89a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/final5.py @@ -0,0 +1,18 @@ +# This sample tests that instance variables declared as Final within +# a dataclass do not need to have an explicit assignment because +# the generated __init__ method will assign them. + +from dataclasses import dataclass +from typing import Final + + +class Foo1: + x: Final[int] + + def __init__(self, x: int) -> None: + self.x = x + + +@dataclass +class Foo2: + x: Final[int] diff --git a/packages/pyright-internal/src/tests/samples/function1.py b/packages/pyright-internal/src/tests/samples/function1.py index ca8265600952..9e10672aa2c1 100644 --- a/packages/pyright-internal/src/tests/samples/function1.py +++ b/packages/pyright-internal/src/tests/samples/function1.py @@ -1,87 +1,12 @@ -# This sample tests various function type checking -# behavior including arg/param matching. +# This sample tests that args and kwargs parameters are +# properly typed. -from typing import Callable +from typing import Tuple, Dict, List -# ------------------------------------------------------ -# Test function type matching +def function_with_args(*args: str) -> Tuple[str, ...]: + return args -class FooBase: - pass - -class Foo(FooBase): - pass - - -class Bar(Foo): - pass - - -def needs_function1(callback: Callable[[Foo], Foo]): - pass - - -def callback1(): - pass - - -def callback2(a: Foo) -> Foo: - return Foo() - - -def callback3(a: Foo) -> str: - return "1" - - -def callback4(a: Foo, b: Foo) -> Foo: - return Foo() - - -def callback5(a: Foo, b: int = 3) -> Foo: - return Foo() - - -def callback6(*a) -> Foo: - return Foo() - - -def callback7(a: str) -> Foo: - return Foo() - - -def callback8(a: Bar) -> Foo: - return Foo() - - -def callback9(a: FooBase) -> Foo: - return Foo() - - -# This should generate an error because callback1 -# takes no parameters. -needs_function1(callback1) - -needs_function1(callback2) - -# This should generate an error because the return -# type of callback3 doesn't match. -needs_function1(callback3) - -# This should generage an error because callback4 -# takes too many parameters. -needs_function1(callback4) - -needs_function1(callback5) -needs_function1(callback6) - -# This should fail because the parameter is the -# wrong type. -needs_function1(callback7) - -# This should fail because the parameter is the -# wrong type. -needs_function1(callback8) - -needs_function1(callback9) +def function_with_kwargs(**kwargs: List[str]) -> Dict[str, List[str]]: + return kwargs diff --git a/packages/pyright-internal/src/tests/samples/function10.py b/packages/pyright-internal/src/tests/samples/function10.py index 4ec95f69d97b..1956db8a8557 100644 --- a/packages/pyright-internal/src/tests/samples/function10.py +++ b/packages/pyright-internal/src/tests/samples/function10.py @@ -1,35 +1,20 @@ -# This sample tests the handling of unpack operators -# used in argument expressions when used in conjunction with -# Tuples and *args parameters. +# This sample tests bidirectional type inference for a function when +# a union includes a "bare" TypeVar and another (non-generic) type. +from dataclasses import dataclass +from typing import Generic, Sequence, TypeVar, Union -from typing import Tuple +T = TypeVar("T") -def foo1(a: int, b: int): - pass +@dataclass +class Container(Generic[T]): + values: Sequence[Union[float, T]] -def foo2(*args: int): - pass +def create_container(values: Sequence[Union[float, T]]) -> Container[T]: + return Container(values) -fixed_tuple_0 = () -foo1(*fixed_tuple_0, 2) -foo2(*fixed_tuple_0, 2) - -fixed_tuple_1 = (1,) -foo1(*fixed_tuple_1, 2) -foo2(*fixed_tuple_1, 2) - -fixed_tuple_3 = (1, 3, 5) - -# This should generate an error because there -# are too many parameters. -foo1(*fixed_tuple_3, 2) -foo2(*fixed_tuple_3, 2) - -homogen_tuple: Tuple[int, ...] = (1, 5, 3) - -foo2(*homogen_tuple) -foo2(*homogen_tuple, 2) +arg: Sequence[Union[float, int]] = (1, 2.0) +x: Container[int] = create_container(arg) diff --git a/packages/pyright-internal/src/tests/samples/function11.py b/packages/pyright-internal/src/tests/samples/function11.py index f182e07099c9..d8c85da48695 100644 --- a/packages/pyright-internal/src/tests/samples/function11.py +++ b/packages/pyright-internal/src/tests/samples/function11.py @@ -1,17 +1,18 @@ -# This sample tests the ability of the type checker to -# deal with circular references in return types. +# This sample tests the case where a function type is assigned to another +# and the source contains parameters that are annotated as literals and +# the destination has corresponding TypeVars. +from typing import Callable, TypeVar, Literal -class Foo1: - # This should generate an error because 'dict' is - # a forward reference, so it refers to the function - # itself. - def dict(self) -> "dict": - # This should generate an error because the return - # type doesn't match. - return {} +_A = TypeVar("_A") -class Foo2: - def dict(self) -> dict: - return {} +def wrapper(fn: Callable[[_A], int]) -> _A: + ... + + +def f3(a: Literal[0]) -> int: + ... + + +wrapper(f3) diff --git a/packages/pyright-internal/src/tests/samples/function14.py b/packages/pyright-internal/src/tests/samples/function14.py deleted file mode 100644 index 624a881990b1..000000000000 --- a/packages/pyright-internal/src/tests/samples/function14.py +++ /dev/null @@ -1,12 +0,0 @@ -# This sample tests the case where a generic function return type -# is handled correctly when its result is assigned to a LHS with -# an expected type that is a union (in this case, "msg" has a type -# of Union[str, None] and "get" returns the type Union[_VT_co, _T]. - -from typing import Optional - - -def f(key: str, msg: Optional[str]) -> str: - if msg is None: - msg = {"a": "b"}.get(key, "c") - return msg diff --git a/packages/pyright-internal/src/tests/samples/function15.py b/packages/pyright-internal/src/tests/samples/function15.py deleted file mode 100644 index 6861efb6eed2..000000000000 --- a/packages/pyright-internal/src/tests/samples/function15.py +++ /dev/null @@ -1,13 +0,0 @@ -# This test validates that a function can be treated as an object -# for type checking purposes. - -from typing import Hashable - - -def func1(a: int) -> int: - return a - - -x: object = func1 - -y: Hashable = func1 diff --git a/packages/pyright-internal/src/tests/samples/function2.py b/packages/pyright-internal/src/tests/samples/function2.py index 9de623ddb745..7c1091de4f54 100644 --- a/packages/pyright-internal/src/tests/samples/function2.py +++ b/packages/pyright-internal/src/tests/samples/function2.py @@ -1,117 +1,7 @@ -# This sample tests function parameter matching logic. +# This sample tests the case where a param with no default +# arg value can follow a param with a default arg value +# if they are both followed by a vararg param. -from typing import Any, Dict, List - - -def func1(a: int, *b: int): - pass - - -func1(3) -func1(3, 4) -func1(3, *[1, 2, 3]) - -# This should generate an error -func1(3, "hello") - -# This should generate an error -func1(3, 5, 2, "str") - -# This should generate an error -func1("hello", 3) - -# This should generate an error -str_list = ["he", "2", "3"] -func1(3, *str_list) - - -def func2(a: str, **b: int): - pass - - -func2("hi") -func2("hi", b=3, c=4, d=5) - -str_dict = {"a": "3", "b": "2"} -func2("hi", **str_dict) - - -# This should generate a type error -func2("hi", 3) - -# This should generate a type error -func2("hi", b="hi") - - -def func4(*args: int): - pass - - -def func5(a: int, *args): - pass - - -tuple1 = (2, 3) -func4(*tuple1) -func5(*tuple1) - -# This should generate an error because a is assigned twice. -func2(a="", a="") - -# This should generate an error because c is assigned twice. -func2("", c=4, d=5, c=5) - - -def func6(param1: int, param2: str): +def f(*a, b=1, c): pass - - -def func7(*args: Any, param0: int, param1: int, param2: str): - func6(*args, param1=param1, param2=param2) - - func6(param0, param2=param2) - - # This should generate an error because param0 has no match. - func6(param0, param1=param1) - - -def func8( - y: str, - z: bool = ..., -) -> None: - ... - - -kwargs1: Dict[str, int] = {} -# This should generate an error because int is not compatible with str. -func8(z=False, **kwargs1) - - -class MyStr(str): - ... - - -kwargs2: Dict[MyStr, MyStr] = {} -func8(z=False, **kwargs2) - - -def func9( - x: int, - y: str, - *, - a: str = ..., - b: str, - c: str, -) -> None: - ... - - -kwargs3: Dict[str, str] = {} -func9(0, "", **kwargs3) - -args4: List[str] = ["hi"] -func9(0, *args4, **kwargs3) - -# This should generate an error -func9(*args4, **kwargs3) diff --git a/packages/pyright-internal/src/tests/samples/function3.py b/packages/pyright-internal/src/tests/samples/function3.py index 15100f00e84c..c417f7ab170a 100644 --- a/packages/pyright-internal/src/tests/samples/function3.py +++ b/packages/pyright-internal/src/tests/samples/function3.py @@ -1,155 +1,82 @@ -# This sample tests the Python 3.8 "positional-only parameter" feature. +# This sample tests that the type checker properly handles +# types of args and kwargs correctly. -from typing import Any, Dict, Protocol, Tuple +from typing import Any, Dict, Hashable, Mapping, Protocol, Tuple -def f0(a: int, b: int): - return 3 - - -def f1(a: int, b: int, /): - return 3 - -# This should generate an error because only one -# '/' parameter is allowed. -def f2(a: int, /, b: int, /): - return 3 - -def f3(a: int, /, b: int): - return 3 - -def f4(a: int, /, b: int, *, c: int): - return 3 - -# This should generate an error because a '/' -# parameter shouldn't appear after '*'. -def f5(a: int, *, b: int, /, c: int): - return 3 - -# This should generate an error because a '/' -# parameter cannot be the first in a param list. -def f6(/, a: int, *, b: int): - return 3 - - -f0(2, 3) - -f1(2, 3) - -# This should generate an error because b -# is a position-only parameter. -f1(2, b=3) - -# This should generate an error because a and b -# are position-only parameters. -f1(a=2, b=3) - -f2(2, 3) - -# This should generate an error. -f2(a=2, b=3) - -f3(2, 3) -f3(2, b=3) - -# This should generate 1 error because a is a -# position-only parameter. -f3(a=2, b=3) - -f4(1, 2, c=3) -f4(1, b=2, c=3) - -# This should generate an error because c is a -# keyword-only parameter. -f4(1, 2, 3) - -# This should generate an error because a is a -# positional-only parameter. -f4(a=1, b=2, c=3) - -# This will an error because of the bad -# declaration. Test to make sure we don't crash. -f5(1, b=2, c=3) - -f6(1, b=2) -f6(a=1, b=2) - -class A: - def f(self, g: bool = False, /, **kwargs) -> None: - ... - -a = A() +def requires_hashable_tuple(p1: Tuple[Hashable, ...]): + ... -a.f(hello="world") +def requires_hashable_dict(p1: Dict[str, Hashable]): + ... -def f7(name: str, /, **kwargs: Any): - return 3 -f7("hi", name=3) +def test_args(*args: Hashable): + if args: + aaa = list(args) + bbb = tuple(aaa) + args = bbb + requires_hashable_tuple(args) -# This should generate an error -f7("hi", name=3, name=4) +def test_kwargs(**kwargs: Hashable): + requires_hashable_dict(kwargs) -class P1(Protocol): - def f(self, x: Any, /): - ... +class StrSubclass(str): + ... -class C1: - def f( - self, - y: Any, - ): - ... +def test_kwargs2( + a: Mapping[str, Any], + b: Mapping[Any, Hashable], + c: Dict[StrSubclass, Hashable], + d: int, + e: Mapping[int, Hashable], + f: Tuple[str, ...], +): + test_kwargs(**a) + test_kwargs(**b) + test_kwargs(**c) -c1: P1 = C1() + # This should generate an error + test_kwargs(**d) + # This should generate an error + test_kwargs(**e) -class P2(Protocol): - def f(self, x: Any): - ... + # This should generate an error + test_kwargs(**f) -class C2: - def f(self, y: Any, /): +class Callback1(Protocol): + def __call__(self) -> None: ... -# This should generate an error -c2: P2 = C2() - - -def f8(a: int, b: int = 3, /): +def func1( + value: str = ..., + *args: object, +) -> None: ... -kwargs: Dict[str, Any] = {} - -# This should generate an error -f8() - -# This should generate an error -f8(**kwargs) - - -f8(0, **kwargs) - -def f9(*, c: int): - pass - -# This should generate an error because it is missing a keyword -# argument for keyword parameter "c". -f9(*[1, 2, 3]) +def func2( + value: str = ..., + **kwargs: object, +) -> None: + ... -# This should generate an error because "/" cannot be used after "*args" -def f10(x, *args, /, y): - pass +def func3( + value: str = ..., + *args: object, + **kwargs: object, +) -> None: + ... -# This should generate an error because "*" cannot be used after "*args" -def f11(x, *args, *, y): - pass +v1: Callback1 = func1 +v2: Callback1 = func2 +v3: Callback1 = func3 diff --git a/packages/pyright-internal/src/tests/samples/function4.py b/packages/pyright-internal/src/tests/samples/function4.py index b9976045083c..ede2a43dc8f5 100644 --- a/packages/pyright-internal/src/tests/samples/function4.py +++ b/packages/pyright-internal/src/tests/samples/function4.py @@ -1,13 +1,13 @@ -# This sample tests that the TypeVar matching logic for -# functions is working correctly. +# This sample tests assignment of a function that uses +# a synthesized TypeVar type for the "self" parameter. -from typing import List +from typing import Callable -a: List[str] = ["a", "bc"] -# This should work because the "sorted" is defined -# with the first parameter of Iterable[_T] and the -# 'key' parameter Callable[[_T], Any]. Since "len" -# is a function that takes a "Sized" and "str" is -# a "Sized", the result of this should be List[str]. -b: List[str] = sorted(a, key=len) +class TestClass: + def method(self) -> None: + pass + + +# This should generate an error. +func1: Callable[[float], None] = TestClass.method diff --git a/packages/pyright-internal/src/tests/samples/function5.py b/packages/pyright-internal/src/tests/samples/function5.py deleted file mode 100644 index 9e10672aa2c1..000000000000 --- a/packages/pyright-internal/src/tests/samples/function5.py +++ /dev/null @@ -1,12 +0,0 @@ -# This sample tests that args and kwargs parameters are -# properly typed. - -from typing import Tuple, Dict, List - - -def function_with_args(*args: str) -> Tuple[str, ...]: - return args - - -def function_with_kwargs(**kwargs: List[str]) -> Dict[str, List[str]]: - return kwargs diff --git a/packages/pyright-internal/src/tests/samples/function6.py b/packages/pyright-internal/src/tests/samples/function6.py index 7c1091de4f54..5a39fc30f481 100644 --- a/packages/pyright-internal/src/tests/samples/function6.py +++ b/packages/pyright-internal/src/tests/samples/function6.py @@ -1,7 +1,24 @@ -# This sample tests the case where a param with no default -# arg value can follow a param with a default arg value -# if they are both followed by a vararg param. +# This sample tests the case where a generic function return type +# is handled correctly when its result is assigned to a LHS with +# an expected type that is a union (in this case, "msg" has a type +# of Union[str, None] and "get" returns the type Union[_VT_co, _T]. +from typing import Optional -def f(*a, b=1, c): - pass + +def f(key: str, msg: Optional[str]): + if msg is None: + my_dict = {"a": "b"} + msg = my_dict.get(key, "c") + + # Without bidirectional type inference, the + # revealed type will be "str", but since "msg" + # has a declared type, it will be used in this + # case to inform the type "str | None", which + # is a valid solution for the constraint solver. + # Unfortunately, it's probably not the answer + # the user expects in this case. + reveal_type(msg, expected_text="str | None") + + x = my_dict.get(key, "c") + reveal_type(x, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/function7.py b/packages/pyright-internal/src/tests/samples/function7.py index 6d888d5185ec..6861efb6eed2 100644 --- a/packages/pyright-internal/src/tests/samples/function7.py +++ b/packages/pyright-internal/src/tests/samples/function7.py @@ -1,95 +1,13 @@ -# This sample tests handling of unpack operators used -# for arguments that are of a specified length (specifically, -# tuples with a specified list of elements types). +# This test validates that a function can be treated as an object +# for type checking purposes. -from typing import Literal, NamedTuple, List, Tuple +from typing import Hashable -X = NamedTuple("X", [("a", int), ("b", str), ("c", str)]) -q0: List[Tuple[int, str, str]] = [(1, "", ""), (2, "", "")] +def func1(a: int) -> int: + return a -[X(*item) for item in q0] +x: object = func1 -q1: List[Tuple[int, str, float]] = [(1, "a", 3), (2, "b", 4), (3, "c", 5)] - -# This should generate an error because the items in q1 are not the -# right type for the X constructor. -[X(*item) for item in q1] - - -q2: List[Tuple[int, str]] = [(1, "1"), (2, "2"), (3, "3")] - -# This should generate an error because the items in q2 contain only -# two elements, and we need three to populate all three parameters -# in the X constructor. -[X(*item) for item in q2] - - -q3: List[Tuple[int, str, str, float]] = [ - (1, "a", "3", 4), - (2, "b", "4", 5), - (3, "c", "5", 6), -] - -# This should generate an error because the items in q3 contain -# four elements, and we need three to populate all parameters -# in the X constructor. -[X(*item) for item in q3] - - -q4: List[Tuple[int, ...]] = [ - (1, 3), - (2, 5), - (3, 6), -] - -# This should generate two errors because int isn't assignable to parameter -# b or c. -[X(*item) for item in q4] - - -Y = NamedTuple("Y", [("a", str), ("b", str), ("c", str)]) - -q5: List[Tuple[str, ...]] = [ - ("a", "b"), - ("a", "b"), -] - -[Y(*item) for item in q5] - - -class Z(NamedTuple): - a: list[str] - b: list[int] - - -q6 = Z(["1"], [3]) - -for a, b in zip(*q6): - t1: Literal["str"] = reveal_type(a) - t2: Literal["int"] = reveal_type(b) - - -def func1(a: list[str], c: list[int]): - ... - - -func1(*q6) - - -class ABC(NamedTuple): - a: float - b: float - c: float - - def to_rgba(self) -> "ABC": - return ABC(*self) - - -class AB(NamedTuple): - a: float - b: float - - def to_abc(self) -> ABC: - return ABC(*self, 1) +y: Hashable = func1 diff --git a/packages/pyright-internal/src/tests/samples/function8.py b/packages/pyright-internal/src/tests/samples/function8.py index c417f7ab170a..839f5f38eaeb 100644 --- a/packages/pyright-internal/src/tests/samples/function8.py +++ b/packages/pyright-internal/src/tests/samples/function8.py @@ -1,82 +1,38 @@ -# This sample tests that the type checker properly handles -# types of args and kwargs correctly. +# This sample verifies that functions are treated as though they +# derive from object. -from typing import Any, Dict, Hashable, Mapping, Protocol, Tuple +from typing import Callable, Union, overload -def requires_hashable_tuple(p1: Tuple[Hashable, ...]): +@overload +def func1(a: str) -> str: ... -def requires_hashable_dict(p1: Dict[str, Hashable]): +@overload +def func1(a: int) -> int: ... -def test_args(*args: Hashable): - if args: - aaa = list(args) - bbb = tuple(aaa) - args = bbb - requires_hashable_tuple(args) - - -def test_kwargs(**kwargs: Hashable): - requires_hashable_dict(kwargs) - - -class StrSubclass(str): +def func1(a: Union[str, int]) -> Union[str, int]: ... -def test_kwargs2( - a: Mapping[str, Any], - b: Mapping[Any, Hashable], - c: Dict[StrSubclass, Hashable], - d: int, - e: Mapping[int, Hashable], - f: Tuple[str, ...], -): - test_kwargs(**a) - test_kwargs(**b) - test_kwargs(**c) - - # This should generate an error - test_kwargs(**d) - - # This should generate an error - test_kwargs(**e) - - # This should generate an error - test_kwargs(**f) - - -class Callback1(Protocol): - def __call__(self) -> None: - ... - - -def func1( - value: str = ..., - *args: object, -) -> None: +def func2(a: Union[str, int]) -> Union[str, int]: ... -def func2( - value: str = ..., - **kwargs: object, -) -> None: +def takes_object(val: object) -> None: ... -def func3( - value: str = ..., - *args: object, - **kwargs: object, -) -> None: - ... +takes_object(func1) +takes_object(func2) + +def func3(b: Callable[[str], bool]) -> None: + if b == func1: + pass -v1: Callback1 = func1 -v2: Callback1 = func2 -v3: Callback1 = func3 + if b != func2: + pass diff --git a/packages/pyright-internal/src/tests/samples/function9.py b/packages/pyright-internal/src/tests/samples/function9.py index ede2a43dc8f5..230e4ec23c9b 100644 --- a/packages/pyright-internal/src/tests/samples/function9.py +++ b/packages/pyright-internal/src/tests/samples/function9.py @@ -1,13 +1,75 @@ -# This sample tests assignment of a function that uses -# a synthesized TypeVar type for the "self" parameter. +# This sample tests the case where a function type is compared to another +# function type where one contains a positional-only marker and the +# other does not. -from typing import Callable +from typing import Protocol -class TestClass: - def method(self) -> None: +class _Writer1(Protocol): + def write(self, a: str, b: str) -> object: pass -# This should generate an error. -func1: Callable[[float], None] = TestClass.method +class Writer1: + def write(self, a: str, /, b: str): + pass + + +def make_writer1(w: _Writer1): + pass + + +# This should generate an error because the source function is positional-only. +make_writer1(Writer1()) + + +class _Writer2(Protocol): + def write(self, a: str, /, b: str) -> object: + pass + + +class Writer2: + def write(self, a: str, b: str): + pass + + +def make_writer2(w: _Writer2): + pass + + +make_writer2(Writer2()) + + +class _Writer3(Protocol): + def write(self, a: str, b: str) -> object: + pass + + +class Writer3: + def write(self, __a: str, b: str): + pass + + +def make_writer3(w: _Writer3): + pass + + +# This should generate an error because the source function is positional-only. +make_writer3(Writer3()) + + +class _Writer4(Protocol): + def write(self, __a: str, b: str) -> object: + pass + + +class Writer4: + def write(self, a: str, b: str): + pass + + +def make_writer4(w: _Writer4): + pass + + +make_writer4(Writer4()) diff --git a/packages/pyright-internal/src/tests/samples/functionAnnotation1.py b/packages/pyright-internal/src/tests/samples/functionAnnotation1.py index 5f49d9d0c061..d3b807c49fbe 100644 --- a/packages/pyright-internal/src/tests/samples/functionAnnotation1.py +++ b/packages/pyright-internal/src/tests/samples/functionAnnotation1.py @@ -1,8 +1,8 @@ # This sample tests support for comment-style function annotations. -# pyright: strict +# pyright: strict, reportMissingParameterType=false -from typing import Optional +from typing import Optional, Literal as _Literal, Union def func1a(a, b): @@ -47,6 +47,15 @@ def func1f(a): class Foo: pass + def func1g(*args, **kwargs): # type: (*int, **float) -> int return sum(args) + sum(round(kwarg) for kwarg in kwargs.values()) + + +def func1h( + a, # type: _Literal["{", "}"] + b, # type: Union[_Literal["%"], _Literal["{"], _Literal["$"]] +): + # type: (...) -> str + return "" diff --git a/packages/pyright-internal/src/tests/samples/function13.py b/packages/pyright-internal/src/tests/samples/functionMember1.py similarity index 100% rename from packages/pyright-internal/src/tests/samples/function13.py rename to packages/pyright-internal/src/tests/samples/functionMember1.py diff --git a/packages/pyright-internal/src/tests/samples/functionMember2.py b/packages/pyright-internal/src/tests/samples/functionMember2.py new file mode 100644 index 000000000000..75e221c1fdae --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/functionMember2.py @@ -0,0 +1,45 @@ +# This sample tests the special-case handling of the __self__ +# attribute for a function when it is bound to a class or object. + +# pyright: reportFunctionMemberAccess=error + + +def func1(a: int) -> str: + ... + + +# This should generate an error because func1 isn't +# bound to a "self". +s1 = func1.__self__ + + +class A: + def method1(self) -> None: + ... + + @classmethod + def method2(cls) -> None: + ... + + @staticmethod + def method3() -> None: + ... + + +s2 = A().method1.__self__ +reveal_type(s2, expected_text="A") + +s3 = A.method2.__self__ +reveal_type(s3, expected_text="Type[A]") + +s3 = A.method2.__self__ +reveal_type(s3, expected_text="Type[A]") + +s4 = A().method2.__self__ +reveal_type(s4, expected_text="Type[A]") + +# This should generate an error because method3 is static. +s5 = A().method3.__self__ + +# This should generate an error because method3 is static. +s6 = A.method3.__self__ diff --git a/packages/pyright-internal/src/tests/samples/generators1.py b/packages/pyright-internal/src/tests/samples/generators1.py index 1ffb3a85672f..653f18a84d56 100644 --- a/packages/pyright-internal/src/tests/samples/generators1.py +++ b/packages/pyright-internal/src/tests/samples/generators1.py @@ -1,7 +1,7 @@ # This sample tests various type checking operations relating to # generator functions (those with a "yield" method). -from typing import Any, Generator, Dict, Iterator +from typing import Any, Generator, Dict, Iterable, Iterator, List, TypedDict class ClassA: @@ -56,7 +56,7 @@ def generator3() -> Generator[ClassA, int, Any]: yield 3 -def generator4() -> Iterator[ClassA]: +def generator4() -> Iterable[ClassA]: yield ClassA() return True @@ -91,3 +91,32 @@ def generator8() -> Iterator[Dict[str, int]]: # This tests the bidirectional type inference # of dict types. It should not generate an error. yield {"hello": 3} + + +# This should generate an error. +def generator9() -> int: + yield None + return 3 + + +# This should generate an error. +async def generator10() -> int: + yield None + return 3 + + +# This should generate an error. +def generator11() -> List[int]: + yield 3 + + +class TD1(TypedDict): + x: str + + +def generator12() -> Generator[TD1, None, None]: + yield {"x": "x"} + +def generator13() -> Generator[TD1, None, None]: + # This should generate an error. + yield {"y": "x"} diff --git a/packages/pyright-internal/src/tests/samples/generators11.py b/packages/pyright-internal/src/tests/samples/generators11.py index 8fc4bc77a7b9..01f024809b38 100644 --- a/packages/pyright-internal/src/tests/samples/generators11.py +++ b/packages/pyright-internal/src/tests/samples/generators11.py @@ -1,6 +1,6 @@ # This sample tests the return type inference for a generator. -from typing import Generator, Literal +from typing import Generator def func1() -> Generator[int, None, str]: @@ -14,7 +14,14 @@ def func2() -> Generator[int, int, None]: x = [(yield from func1()) for lel in range(5)] v1 = yield from func1() - t_v1: Literal["str"] = reveal_type(v1) + reveal_type(v1, expected_text="str") v2 = yield 4 - t_v2: Literal["int"] = reveal_type(v2) + reveal_type(v2, expected_text="int") + + +def func3(): + [x for x in (yield [[[1]], [[2]], [[3]]]) for y in x] + + # This should generate an error. + [x for x in [[[1]], [[2]], [[3]]] for y in (yield x)] diff --git a/packages/pyright-internal/src/tests/samples/generators12.py b/packages/pyright-internal/src/tests/samples/generators12.py index 5be486eb2a45..0f2777ebd773 100644 --- a/packages/pyright-internal/src/tests/samples/generators12.py +++ b/packages/pyright-internal/src/tests/samples/generators12.py @@ -1,7 +1,7 @@ # This sample tests the inference of types relating to # "yield from" statements. -from typing import Generator, Literal +from typing import Generator class Yielder: @@ -21,5 +21,5 @@ def collect1() -> Generator[str, None, bool]: def collect2(): y = Yielder() z = yield from y - t_z: Literal["bool"] = reveal_type(z) + reveal_type(z, expected_text="bool") return z diff --git a/packages/pyright-internal/src/tests/samples/generators13.py b/packages/pyright-internal/src/tests/samples/generators13.py index f38a0522f12d..7260309d1128 100644 --- a/packages/pyright-internal/src/tests/samples/generators13.py +++ b/packages/pyright-internal/src/tests/samples/generators13.py @@ -1,7 +1,7 @@ # This sample tests async generator and non-generator functions. import asyncio -from typing import AsyncGenerator, AsyncIterator, List, Literal +from typing import AsyncGenerator, AsyncIterator, List async def get_data() -> List[int]: @@ -18,14 +18,14 @@ async def generate(nums: List[int]) -> AsyncGenerator[str, None]: async def get_generator1() -> AsyncGenerator[str, None]: data = await get_data() v1 = generate(data) - t_v1: Literal["AsyncGenerator[str, None]"] = reveal_type(v1) + reveal_type(v1, expected_text="AsyncGenerator[str, None]") return v1 async def get_generator2() -> AsyncIterator[str]: data = await get_data() v1 = generate(data) - t_v1: Literal["AsyncGenerator[str, None]"] = reveal_type(v1) + reveal_type(v1, expected_text="AsyncGenerator[str, None]") return v1 @@ -44,18 +44,18 @@ def get_generator4() -> AsyncGenerator[int, None]: async def demo_bug1() -> None: v1 = get_generator1() - t_v1: Literal["Coroutine[Any, Any, AsyncGenerator[str, None]]"] = reveal_type(v1) + reveal_type(v1, expected_text="Coroutine[Any, Any, AsyncGenerator[str, None]]") gen = await v1 - t_gen: Literal["AsyncGenerator[str, None]"] = reveal_type(gen) + reveal_type(gen, expected_text="AsyncGenerator[str, None]") async for s in gen: print(s) async def demo_bug2() -> None: v1 = get_generator2() - t_v1: Literal["Coroutine[Any, Any, AsyncIterator[str]]"] = reveal_type(v1) + reveal_type(v1, expected_text="Coroutine[Any, Any, AsyncIterator[str]]") gen = await v1 - t_gen: Literal["AsyncIterator[str]"] = reveal_type(gen) + reveal_type(gen, expected_text="AsyncIterator[str]") async for s in gen: print(s) diff --git a/packages/pyright-internal/src/tests/samples/generators14.py b/packages/pyright-internal/src/tests/samples/generators14.py new file mode 100644 index 000000000000..e9d5910b61fc --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/generators14.py @@ -0,0 +1,19 @@ +# This sample tests the inferred type of async and sync generators. + + +async def foo() -> int: + ... + + +async def main() -> None: + v1 = (x for x in [2, 3] if x > 3) + reveal_type(v1, expected_text="Generator[int, None, None]") + + v2 = (x for x in [2, 3] if await foo()) + reveal_type(v2, expected_text="AsyncGenerator[int, None]") + + v3 = (x for x in [2, 3]) + reveal_type(v3, expected_text="Generator[int, None, None]") + + v4 = (await foo() for _ in [2, 3]) + reveal_type(v4, expected_text="AsyncGenerator[int, None]") diff --git a/packages/pyright-internal/src/tests/samples/generators15.py b/packages/pyright-internal/src/tests/samples/generators15.py new file mode 100644 index 000000000000..195b9df7af7a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/generators15.py @@ -0,0 +1,26 @@ +# This sample tests that the parser emits an error when a generator +# is used as an argument without parentheses. + + +from typing import Any + + +def func1(*x: Any) -> None: + pass + +func1(x for x in [0, 1]) + +func1((x for x in [0, 1]), 1) + +func1((x for x in [0, 1]),) + +func1(1, (x for x in [0, 1])) + +# This should generate an error. +func1(x for x in [0, 1], 1) + +# This should generate an error. +func1(x for x in [0, 1],) + +# This should generate an error. +func1(1, x for x in [0, 1]) diff --git a/packages/pyright-internal/src/tests/samples/generic1.py b/packages/pyright-internal/src/tests/samples/generic1.py new file mode 100644 index 000000000000..8a776100dfa6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/generic1.py @@ -0,0 +1,41 @@ +# This sample tests error handling for the "Generic" special form. + +from typing import Generic, TypeVar + +T = TypeVar("T") + +# This should generate an error. +class Class1(Generic): + ... + +# This should generate two errors (a parse error and a semantic error). +class Class2(Generic[]): + ... + +# This should generate an error. +class Class3(Generic[int]): + ... + +# This should generate two errors. +class Class4(Generic[T, T, T]): + ... + + +# This should generate an error. +def func1(x: Generic[T]) -> T: + ... + +# This should generate an error. +def func2(x: T) -> Generic[T]: + ... + +class Class5(Generic[T]): + # This should generate an error. + x: Generic[T] + + +def func3(x: type): + if x is Generic: + return + + diff --git a/packages/pyright-internal/src/tests/samples/genericTypes10.py b/packages/pyright-internal/src/tests/samples/genericTypes10.py index dda42af67a80..e95b49d15469 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes10.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes10.py @@ -1,7 +1,7 @@ # This sample tests that a Generic base class overrides the type parameter # ordering of other type parameters. -from typing import Container, Generic, Iterable, Mapping, Protocol, TypeVar +from typing import Container, Generic, Iterable, Iterator, Mapping, Protocol, TypeVar _T1 = TypeVar("_T1") _T2 = TypeVar( @@ -16,6 +16,9 @@ def __init__(self, a: _T1, b: _T2): def foo(self, a: _T1, b: _T2) -> _T2: return b + def __iter__(self) -> Iterator[int]: + ... + a: Foo[int, str] = Foo(2, "") b: str = a.foo(4, "") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes11.py b/packages/pyright-internal/src/tests/samples/genericTypes11.py index d1f35214d527..7f45d027bfd0 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes11.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes11.py @@ -4,12 +4,12 @@ # pyright: strict -from typing import Callable, Iterator, TypeVar +from typing import Callable, Iterator, Protocol, TypeVar -_T = TypeVar("_T") +_T = TypeVar("_T", covariant=True) -class Foo(Iterator[_T]): +class Foo(Iterator[_T], Protocol): pass diff --git a/packages/pyright-internal/src/tests/samples/genericTypes13.py b/packages/pyright-internal/src/tests/samples/genericTypes13.py index 4e483521492d..d2ed47a00af3 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes13.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes13.py @@ -1,7 +1,7 @@ # This sample tests the type checker's ability to do bidirectional # type inference when the expected type is defined by a bound TypeVar. -from typing import Dict, Literal, TypeVar +from typing import Dict, TypeVar class A: @@ -24,4 +24,4 @@ def testFunc(value: Dict[str, _T_A]) -> _T_A: x = testFunc({"b": B(), "c": C()}) -t1: Literal["B | C"] = reveal_type(x) +reveal_type(x, expected_text="B | C") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes15.py b/packages/pyright-internal/src/tests/samples/genericTypes15.py index b082b92cd7c0..1110c483df32 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes15.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes15.py @@ -3,11 +3,11 @@ # the "keys" method on "dict") based on the provided "self" # argument. -from typing import Dict, Literal +from typing import Dict foo: Dict[str, str] = {} # This should not result in an "Unknown", so no # error should be generated. result = dict.keys(foo) -t1: Literal["_dict_keys[Unknown, Unknown]"] = reveal_type(result) +reveal_type(result, expected_text="dict_keys[Unknown, Unknown]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes19.py b/packages/pyright-internal/src/tests/samples/genericTypes19.py index fb32f93537de..314a84421eb5 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes19.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes19.py @@ -6,7 +6,7 @@ # no "unknown" types remaining in this file. # pyright: strict -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar _A = TypeVar("_A") _B = TypeVar("_B") @@ -28,5 +28,5 @@ def value_b(self): foo = Foo(27) -t_a: Literal["int"] = reveal_type(foo.value_a) -t_b: Literal["str"] = reveal_type(foo.value_b) +reveal_type(foo.value_a, expected_text="int") +reveal_type(foo.value_b, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes20.py b/packages/pyright-internal/src/tests/samples/genericTypes20.py index fa6502432c83..b4acc93a7fc9 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes20.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes20.py @@ -4,7 +4,7 @@ # We use "strict" here because we want to ensure that there are # no "unknown" types remaining in this file. -# pyright: strict, reportUnknownParameterType=false +# pyright: strict, reportUnknownParameterType=false, reportMissingParameterType=false from logging import Handler, NOTSET @@ -38,3 +38,7 @@ def test_function(a: int, b: str): str_value_2 = foo2.value_a int_value_2 = foo2.value_b test_function(int_value_2, str_value_2) + +# This should generate an error because a pseudo-generic +# class is not actually generic. +foo3: Foo[int, str, int] diff --git a/packages/pyright-internal/src/tests/samples/genericTypes28.py b/packages/pyright-internal/src/tests/samples/genericTypes28.py index 50bc36bbdf23..060afef69290 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes28.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes28.py @@ -1,7 +1,7 @@ # This sample tests that Optional types can be matched # to Type[T] expressions. -from typing import Generic, Literal, Optional, Type, TypeVar +from typing import Generic, Optional, Type, TypeVar _T1 = TypeVar("_T1") _T2 = TypeVar("_T2", bound=None) @@ -42,10 +42,10 @@ class Bar(Foo): def bar(value: _T1) -> Type[Foo[_T1]]: baz = Foo(value) qux = type(baz) - t1: Literal["Type[Foo[_T1@bar]]"] = reveal_type(qux) + reveal_type(qux, expected_text="Type[Foo[_T1@bar]]") return qux d = Bar.get() -t_d: Literal["Type[Bar]"] = reveal_type(d) -t_e: Literal["Type[Bar]"] = reveal_type(Bar.get()) +reveal_type(d, expected_text="Type[Bar]") +reveal_type(Bar.get(), expected_text="Type[Bar]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes29.py b/packages/pyright-internal/src/tests/samples/genericTypes29.py index ba471e741e75..319e52022b96 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes29.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes29.py @@ -2,7 +2,7 @@ # type derives from the expected type and both are # generic. -from typing import Mapping, Optional, Union +from typing import Iterable, Mapping, Optional, TypeVar, Union v0: Optional[Mapping[str, Union[int, str]]] = dict([("test1", 1), ("test2", 2)]) @@ -10,3 +10,15 @@ # This should generate an error because of a type mismatch. v2: Mapping[str, str] = dict([("test1", 1), ("test2", 2)]) + + +options: dict[Union[int, str], int] = {} +channel_types: dict[str, int] = {} + +keys = channel_types.keys() + +_T = TypeVar("_T") +_S = TypeVar("_S") + + +options.update(dict.fromkeys(keys, 1)) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes32.py b/packages/pyright-internal/src/tests/samples/genericTypes32.py index 2555645cf662..b9974205f44f 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes32.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes32.py @@ -4,14 +4,22 @@ # We need to validate that the type inference for lists # is not over-narrowing when matching these literals. -from typing import List, Tuple, TypeVar +from typing import Callable, List, Tuple, TypeVar -T = TypeVar("T") +_T = TypeVar("_T") -def extend_if(xs: List[T], ys: List[Tuple[T, bool]]) -> List[T]: +def extend_if(xs: List[_T], ys: List[Tuple[_T, bool]]) -> List[_T]: raise NotImplementedError() extend_if(["foo"], [("bar", True), ("baz", True)]) + + +def Return(value: _T) -> Callable[[_T], None]: + ... + + +def func1() -> Callable[[bool], None]: + return Return(True) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes34.py b/packages/pyright-internal/src/tests/samples/genericTypes34.py index 579d20e0ef4d..2dcab4411183 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes34.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes34.py @@ -1,7 +1,7 @@ # This sample tests the handling of generic type aliases # with constrained types. -from typing import Callable, Generic, TypeVar, Union +from typing import Callable, Generic, Type, TypeVar, Union T = TypeVar("T", str, bool, None) @@ -44,3 +44,11 @@ def f_union(val: Union[bool, str]) -> None: f_generic3(val) else: f_generic3(val) + + +def func1(v: T, t: Type[T]): + print(t) + + +def func2(v: T, t: Type[T]): + func1(v, t) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes36.py b/packages/pyright-internal/src/tests/samples/genericTypes36.py index f059ff4d66ff..1a0421adbc16 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes36.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes36.py @@ -3,7 +3,7 @@ # of another. The order of the constraints as they appear # within the TypeVar definition shouldn't matter. -from typing import Literal, TypeVar +from typing import TypeVar _T1 = TypeVar("_T1", float, str) @@ -13,11 +13,11 @@ def add1(a: _T1, b: _T1) -> _T1: a1 = add1(3, 5.5) -ta1: Literal["float"] = reveal_type(a1) +reveal_type(a1, expected_text="float") b1 = add1(3.3, 5) -tb1: Literal["float"] = reveal_type(b1) +reveal_type(b1, expected_text="float") c1 = add1("3", "5") -tc1: Literal["str"] = reveal_type(c1) +reveal_type(c1, expected_text="str") _T2 = TypeVar("_T2", float, int) @@ -28,8 +28,8 @@ def add2(a: _T2, b: _T2) -> _T2: a2 = add2(3, 5.5) -ta2: Literal["float"] = reveal_type(a2) +reveal_type(a2, expected_text="float") b2 = add2(3.3, 5) -tb2: Literal["float"] = reveal_type(b2) +reveal_type(b2, expected_text="float") c2 = add2(3, 5) -tc2: Literal["int"] = reveal_type(c2) +reveal_type(c2, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes37.py b/packages/pyright-internal/src/tests/samples/genericTypes37.py index 978927f4a710..f41056390609 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes37.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes37.py @@ -15,12 +15,11 @@ class Foo(Generic[_T]): def func1(self, a: _T): pass - def func2(self): + def func2(self, y: _T): x: int = 3 # This should generate an error self.func1(x) - y = Bar() self.func1(y) z: Any = 3 diff --git a/packages/pyright-internal/src/tests/samples/genericTypes39.py b/packages/pyright-internal/src/tests/samples/genericTypes39.py index 6be5b9de5894..382bf43e9094 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes39.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes39.py @@ -1,7 +1,7 @@ # This sample tests the handling of TypeVars defined by # a generic function. -from typing import Callable, Iterable, TypeVar +from typing import Callable, Dict, Generic, Iterable, List, Tuple, TypeVar T = TypeVar("T") R = TypeVar("R") @@ -17,9 +17,6 @@ def do_something( yield s -from typing import Dict, Generic, List, Tuple, TypeVar - - class Foo: pass diff --git a/packages/pyright-internal/src/tests/samples/genericTypes40.py b/packages/pyright-internal/src/tests/samples/genericTypes40.py index 5d8329f6f1e9..fd9732423317 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes40.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes40.py @@ -1,7 +1,9 @@ # This sample tests the type variable solving process when a # callable type is involved. -from typing import Literal +# pyright: strict + +from typing import Callable, Dict, Literal, TypeVar def filter_fn(value: object): @@ -9,10 +11,28 @@ def filter_fn(value: object): v1 = filter(filter_fn, [1, 2, 3]) -t1: Literal["filter[int]"] = reveal_type(v1) +reveal_type(v1, expected_text="filter[int]") v2 = filter(filter_fn, {1, 2}) -t2: Literal["filter[int]"] = reveal_type(v2) +reveal_type(v2, expected_text="filter[int]") v3 = filter(filter_fn, {1: 2}) -t3: Literal["filter[int]"] = reveal_type(v3) +reveal_type(v3, expected_text="filter[int]") + + +_T = TypeVar("_T") +Animal = Literal["cat"] + + +def func(v: Callable[[], _T]) -> _T: + ... + + +x1: Dict[Animal, int] = func(lambda: {"cat": 0}) + + +def func1(factory: Callable[[], _T]) -> _T: + ... + + +x2: set[int] = func1(lambda: set()) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes42.py b/packages/pyright-internal/src/tests/samples/genericTypes42.py index 1c5467daa2a5..24a2d70d3e2b 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes42.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes42.py @@ -1,7 +1,7 @@ # This sample tests the instantiation of classes via a constructor # when the type of the class is a TypeVar. -from typing import Literal, Type, TypeVar +from typing import Type, TypeVar class Foo: @@ -17,7 +17,7 @@ def func1(cls: Type[_TFoo]) -> _TFoo: y = cls() x = cls(1, "") - t1: Literal["_TFoo@func1"] = reveal_type(x) + reveal_type(x, expected_text="_TFoo@func1") return x @@ -29,5 +29,5 @@ def func2(cls: Type[_T]) -> _T: y = cls(1, "") x = cls() - t1: Literal["_T@func2"] = reveal_type(x) + reveal_type(x, expected_text="_T@func2") return x diff --git a/packages/pyright-internal/src/tests/samples/genericTypes45.py b/packages/pyright-internal/src/tests/samples/genericTypes45.py index 59a19787f028..b59e480f345a 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes45.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes45.py @@ -3,7 +3,7 @@ # the method but is specialized implicitly via the arguments # to the method. -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") @@ -28,16 +28,16 @@ class FooSub2(Foo[int]): def test1(val_str: str, val_int: int): - t1_0: Literal["Foo[str]"] = reveal_type(Foo.func1(val_str)) - t1_1: Literal["Foo[str]"] = reveal_type(FooSub1.func1(val_str)) - t1_2: Literal["Foo[int]"] = reveal_type(FooSub2.func1(val_int)) + reveal_type(Foo.func1(val_str), expected_text="Foo[str]") + reveal_type(FooSub1.func1(val_str), expected_text="Foo[str]") + reveal_type(FooSub2.func1(val_int), expected_text="Foo[int]") # This should generate an error because the argument type doesn't match. FooSub2.func1(val_str) - t2_0: Literal["Foo[str]"] = reveal_type(Foo.func2(val_str)) - t2_1: Literal["Foo[str]"] = reveal_type(FooSub1.func2(val_str)) - t2_2: Literal["Foo[int]"] = reveal_type(FooSub2.func2(val_int)) + reveal_type(Foo.func2(val_str), expected_text="Foo[str]") + reveal_type(FooSub1.func2(val_str), expected_text="Foo[str]") + reveal_type(FooSub2.func2(val_int), expected_text="Foo[int]") # This should generate an error because the argument type doesn't match. FooSub2.func2(val_str) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes50.py b/packages/pyright-internal/src/tests/samples/genericTypes50.py index 78e33ce29f87..c952b9a2082c 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes50.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes50.py @@ -2,7 +2,7 @@ # generic protocols are used. from datetime import timedelta -from typing import Any, Generic, Literal, Protocol, TypeVar, overload +from typing import Any, Generic, Protocol, TypeVar, overload _X_co = TypeVar("_X_co", covariant=True) _X_contra = TypeVar("_X_contra", contravariant=True) @@ -32,12 +32,13 @@ def divmod(__x: Any, __y: Any) -> Any: ... -t1: Literal["Tuple[int, timedelta]"] = reveal_type( - divmod(timedelta(minutes=90), timedelta(hours=1)) +reveal_type( + divmod(timedelta(minutes=90), timedelta(hours=1)), + expected_text="tuple[int, timedelta]", ) -t2: Literal["Tuple[int, int]"] = reveal_type(divmod(3, 4)) -t3: Literal["Tuple[float, float]"] = reveal_type(divmod(3.6, 4)) -t4: Literal["Tuple[float, float]"] = reveal_type(divmod(3, 4.5)) +reveal_type(divmod(3, 4), expected_text="tuple[int, int]") +reveal_type(divmod(3.6, 4), expected_text="tuple[float, float]") +reveal_type(divmod(3, 4.5), expected_text="tuple[float, float]") class SupportsLessThan(Protocol): @@ -58,4 +59,4 @@ def min2(__arg1: SupportsLessThanT, __arg2: SupportsLessThanT) -> SupportsLessTh def func1(): x = max2(1, min2(1, 4.5)) - t_x: Literal["float"] = reveal_type(x) + reveal_type(x, expected_text="float") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes51.py b/packages/pyright-internal/src/tests/samples/genericTypes51.py index 117c60ef09dc..4e34226c4d7a 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes51.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes51.py @@ -5,7 +5,7 @@ # with open('read', 'rb') as fr, open('write', 'wb') as fw: # shutil.copyfileobj(fr, fw) -from typing import Any, AnyStr, Literal, Optional, Protocol, TypeVar, Union +from typing import Any, AnyStr, Optional, Protocol, TypeVar, Union class Array: @@ -52,4 +52,4 @@ def copyfileobj( def f(fr: BufferedReader, fw: BufferedWriter): x = copyfileobj(fr, fw) - t_x: Literal["bytes"] = reveal_type(x) + reveal_type(x, expected_text="bytes") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes52.py b/packages/pyright-internal/src/tests/samples/genericTypes52.py index 22907799761f..259ae70d7d11 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes52.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes52.py @@ -1,7 +1,7 @@ # This sample tests the case where a TypeVar is used in the parameter # of a callable (and is hence treated as contravariant). -from typing import Callable, Literal, Sequence, TypeVar +from typing import Callable, Sequence, TypeVar T = TypeVar("T") U = TypeVar("U") @@ -29,6 +29,6 @@ def qux( ... -t1: Literal["float"] = reveal_type(baz(1.0, foo)) -t2: Literal["Sequence[float]"] = reveal_type(qux([1.0], foo)) -t3: Literal["float"] = reveal_type(qux([1.0], bar)) +reveal_type(baz(1.0, foo), expected_text="float") +reveal_type(qux([1.0], foo), expected_text="Sequence[float]") +reveal_type(qux([1.0], bar), expected_text="float") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes53.py b/packages/pyright-internal/src/tests/samples/genericTypes53.py index c42891d6f7a8..f625e0d76c42 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes53.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes53.py @@ -24,10 +24,10 @@ def func2(arg: _T) -> List[_T]: v3: List[Literal["test"]] = func2("test") v4 = func1("test") -t_v4: Literal["str"] = reveal_type(v4) +reveal_type(v4, expected_text="str") v5 = func2("test") -t_v5: Literal["List[str]"] = reveal_type(v5) +reveal_type(v5, expected_text="List[str]") def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: @@ -36,7 +36,7 @@ def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: dicts = [{"a": "b"}, {"c": "d"}] v6 = reduce(lambda x, y: x | y, dicts) -t_v6: Literal["dict[str, str]"] = reveal_type(v6) +reveal_type(v6, expected_text="dict[str, str]") v7 = reduce(lambda x, y: {**x, **y}, dicts) -t_v7: Literal["dict[str, str]"] = reveal_type(v7) +reveal_type(v7, expected_text="dict[str, str]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes55.py b/packages/pyright-internal/src/tests/samples/genericTypes55.py index 1510ebce4307..944ae79188d9 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes55.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes55.py @@ -2,7 +2,7 @@ # a generic Callable that can be specialized with type variables # provided by the caller. -from typing import Callable, Literal, TypeVar +from typing import Callable, TypeVar _T = TypeVar("_T") @@ -27,19 +27,19 @@ def identity_int(x: int) -> int: v1 = identity_callable_1(identity_int) -t_v1_1: Literal["(_p0: int) -> int"] = reveal_type(v1) -t_v1_2: Literal["int"] = reveal_type(v1(0)) +reveal_type(v1, expected_text="(int) -> int") +reveal_type(v1(0), expected_text="int") v2 = identity_callable_1(identity_generic) -t_v2_1: Literal["(_p0: _T@identity_generic) -> _T@identity_generic"] = reveal_type(v2) -t_v2_2: Literal["int"] = reveal_type(v2(0)) -t_v2_3: Literal["str"] = reveal_type(v2("")) +reveal_type(v2, expected_text="(_T@identity_generic) -> _T@identity_generic") +reveal_type(v2(0), expected_text="int") +reveal_type(v2(""), expected_text="str") v3 = identity_callable_2(identity_int) -t_v3_1: Literal["(_p0: int) -> int"] = reveal_type(v3) -t_v3_2: Literal["int"] = reveal_type(v3(0)) +reveal_type(v3, expected_text="(int) -> int") +reveal_type(v3(0), expected_text="int") v4 = identity_callable_2(identity_generic) -t_v4_1: Literal["(_p0: _T@identity_generic) -> _T@identity_generic"] = reveal_type(v4) -t_v4_2: Literal["int"] = reveal_type(v4(0)) -t_v4_3: Literal["str"] = reveal_type(v4("")) +reveal_type(v4, expected_text="(_T@identity_generic) -> _T@identity_generic") +reveal_type(v4(0), expected_text="int") +reveal_type(v4(""), expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes57.py b/packages/pyright-internal/src/tests/samples/genericTypes57.py index 9b196adf259d..47863c248e06 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes57.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes57.py @@ -2,7 +2,7 @@ # are unions and are being compared using invariant constraints # and the dest type contains a type variable. -from typing import Literal, Pattern, Sequence, TypeVar, List, Optional, Union +from typing import Pattern, Sequence, TypeVar, List, Optional, Union _T = TypeVar("_T") @@ -16,15 +16,15 @@ def func2(v: List[Optional[Union[_T, str]]]) -> _T: ... -v1: List[Optional[Union[int]]] = [1, None] +v1: List[Optional[int]] = [1, None] r1 = func1(v1) -t_r1: Literal["int"] = reveal_type(r1) +reveal_type(r1, expected_text="int") v2: List[Optional[Union[int, str]]] = [1, None] r2_1 = func1(v2) -t_r2_1: Literal["int | str"] = reveal_type(r2_1) +reveal_type(r2_1, expected_text="int | str") r2_2 = func2(v2) -t_r2_2: Literal["int"] = reveal_type(r2_2) +reveal_type(r2_2, expected_text="int") v3: List[Union[str, Sequence[Pattern]]] = [""] diff --git a/packages/pyright-internal/src/tests/samples/genericTypes58.py b/packages/pyright-internal/src/tests/samples/genericTypes58.py index 85e9a42973d2..c480931caff7 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes58.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes58.py @@ -2,7 +2,14 @@ # within unions, where the TypeVar may not be matched during constraint # solving. -from typing import Dict, List, Literal, Optional, TypeVar, Union +from typing import ( + Awaitable, + Callable, + Generic, + List, + TypeVar, + Union, +) _T = TypeVar("_T") @@ -12,15 +19,34 @@ def func1(x: Union[str, None, _T]) -> Union[str, None, _T]: ... -t1_1: Literal["str | None"] = reveal_type(func1(None)) -t1_2: Literal["str | None"] = reveal_type(func1("hi")) -t1_3: Literal["str | int | None"] = reveal_type(func1(3)) +reveal_type(func1(None), expected_text="str | None") +reveal_type(func1("hi"), expected_text="str | None") +reveal_type(func1(3), expected_text="str | int | None") def func2(x: Union[str, None, _T]) -> List[Union[str, None, _T]]: ... -t2_1: Literal["List[str | None]"] = reveal_type(func2(None)) -t2_2: Literal["List[str | None]"] = reveal_type(func2("hi")) -t2_3: Literal["List[str | int | None]"] = reveal_type(func2(3)) +reveal_type(func2(None), expected_text="List[str | None]") +reveal_type(func2("hi"), expected_text="List[str | None]") +reveal_type(func2(3), expected_text="List[str | int | None]") + + +CallbackSig = Callable[..., Awaitable[None]] +CallbackSigT = TypeVar("CallbackSigT", bound="CallbackSig") + + +class UsesFoo(Generic[CallbackSigT]): + ... + + +def dec1() -> Callable[ + [Union[CallbackSigT, UsesFoo[CallbackSigT]]], UsesFoo[CallbackSigT] +]: + ... + + +@dec1() +async def bars() -> None: + ... diff --git a/packages/pyright-internal/src/tests/samples/genericTypes59.py b/packages/pyright-internal/src/tests/samples/genericTypes59.py index 9639f7bf1ccd..2ce5f5bbfb40 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes59.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes59.py @@ -3,7 +3,7 @@ # according to PEP 484, but pyright has code in place to find the # "least complex" answer. -from typing import Generic, List, Literal, TypeVar, Union +from typing import Any, Generic, List, TypeVar, Union T1 = TypeVar("T1") @@ -26,7 +26,7 @@ def func1a(value: List[Union[T1, List[T1]]]) -> T1: def func2a(value: List[Union[float, List[float]]]): x = func1a(value) - t_x: Literal["float"] = reveal_type(x) + reveal_type(x, expected_text="float") def func3a(value: List[Union[str, List[float]]]): @@ -36,7 +36,7 @@ def func3a(value: List[Union[str, List[float]]]): def func4a(value: List[Union[float, str, List[Union[float, str]]]]): x = func1a(value) - t_x: Literal["float | str"] = reveal_type(x) + reveal_type(x, expected_text="float | str") def func1b(value: List[Union[int, List[T1]]]) -> T1: @@ -45,9 +45,27 @@ def func1b(value: List[Union[int, List[T1]]]) -> T1: def func2b(value: List[Union[int, List[float]]]): x = func1b(value) - t_x: Literal["float"] = reveal_type(x) + reveal_type(x, expected_text="float") def func3b(value: List[Union[str, List[float]]]): # This should generate an error func1b(value) + + +def ensure_list(value: Union[T1, List[T1]]) -> List[T1]: + ... + + +def func4( + v1: list, v2: List[Any], v3: List[None], v4: Any, v5: int, v6: T1, v7: List[T1] +) -> T1: + reveal_type(ensure_list(v1), expected_text="List[Unknown]") + reveal_type(ensure_list(v2), expected_text="List[Any]") + reveal_type(ensure_list(v3), expected_text="List[None]") + reveal_type(ensure_list(v4), expected_text="List[Any]") + reveal_type(ensure_list(v5), expected_text="List[int]") + reveal_type(ensure_list(v6), expected_text="List[T1@func4]") + reveal_type(ensure_list(v7), expected_text="List[T1@func4]") + + return v6 diff --git a/packages/pyright-internal/src/tests/samples/genericTypes6.py b/packages/pyright-internal/src/tests/samples/genericTypes6.py index 7830d2c29d0c..69ac2da381cf 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes6.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes6.py @@ -1,7 +1,7 @@ # This sample tests the type checker's ability to do # TypeVar matching for both constrained TypeVars and unconstrained. -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar S = TypeVar("S", str, bytes) @@ -36,14 +36,14 @@ def generic_func1(self, a: U, b: U = ..., **kwargs: U) -> U: foo = Foo[str]() r1 = foo.generic_func1("hi") -t1: Literal["str"] = reveal_type(r1) +reveal_type(r1, expected_text="str") r2 = foo.generic_func1("hi", test="hi") -t2: Literal["str"] = reveal_type(r2) +reveal_type(r2, expected_text="str") # This should generate an error. r3 = foo.generic_func1("hi", test=3) -t3: Literal["str"] = reveal_type(r3) +reveal_type(r3, expected_text="str") # This should generate an error. r4 = foo.generic_func1("hi", 3) -t4: Literal["str"] = reveal_type(r4) +reveal_type(r4, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes61.py b/packages/pyright-internal/src/tests/samples/genericTypes61.py index 14611449fd68..a709916130e6 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes61.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes61.py @@ -1,7 +1,7 @@ # This sample tests a case where Type[X] and X are used within the # same class declaration. -from typing import Dict, Generic, Literal, Type, TypeVar +from typing import Dict, Generic, Type, TypeVar from dataclasses import dataclass, field K = TypeVar("K") @@ -29,6 +29,6 @@ class Thing1(Base): pass -t1: Literal["BaseTypeRegistry[Thing1]"] = reveal_type(BaseTypeRegistry(Thing1)) +reveal_type(BaseTypeRegistry(Thing1), expected_text="BaseTypeRegistry[Thing1]") foo: BaseTypeRegistry[Thing1] = BaseTypeRegistry(Thing1) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes63.py b/packages/pyright-internal/src/tests/samples/genericTypes63.py index 5e883fb38532..19c3d8b0b232 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes63.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes63.py @@ -1,7 +1,7 @@ # This sample tests the handling of a constrained TypeVar used with # a Type[T] annotation. -from typing import Literal, Type, TypeVar, Any +from typing import Type, TypeVar, Any class A: @@ -20,14 +20,13 @@ def factory(desired_type: Type[T]) -> T: factory(str) -t1: Literal["str"] = reveal_type(factory(str)) +reveal_type(factory(str), expected_text="str") factory(int) -t2: Literal["int"] = reveal_type(factory(int)) +reveal_type(factory(int), expected_text="int") factory(A).f() -t3: Literal["A"] = reveal_type(factory(A)) +reveal_type(factory(A), expected_text="A") # This should generate an error factory(float) - diff --git a/packages/pyright-internal/src/tests/samples/genericTypes67.py b/packages/pyright-internal/src/tests/samples/genericTypes67.py new file mode 100644 index 000000000000..85f63dcc35a1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes67.py @@ -0,0 +1,57 @@ +# This sample tests for proper handling of constrained or bound TypeVars. + +from typing import Dict, Generic, Literal, Optional, TypeVar, Union + + +class IntSubclass1(int): + pass + + +_T1 = TypeVar("_T1", int, IntSubclass1) + + +def add1(value: _T1) -> _T1: + reveal_type(value + 1, expected_text="int*") + + # This should generate an error + return value + 5 + + +class IntSubclass2(int): + def __add__(self, value: object) -> "IntSubclass2": + ... + + +_T2 = TypeVar("_T2", int, IntSubclass2) + + +def add2(value: _T2) -> _T2: + reveal_type(value + 1, expected_text="int* | IntSubclass2*") + return value + 5 + + +class A: + ... + + +class B: + ... + + +_T3 = TypeVar("_T3", bound=Union[A, B]) + + +class Registry(Generic[_T3]): + def __init__(self) -> None: + self.registry = {} + + @property + def registry(self) -> Dict[str, _T3]: + ... + + @registry.setter + def registry(self, registry: Dict[str, _T3]) -> None: + ... + + def get(self, _id: str) -> Optional[_T3]: + return self.registry.get(_id) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes68.py b/packages/pyright-internal/src/tests/samples/genericTypes68.py new file mode 100644 index 000000000000..a6526d41acb8 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes68.py @@ -0,0 +1,14 @@ +# This sample tests the case where a constrained TypeVar is assigned +# to another constrained TypeVar or a union that contains a constrained +# TypeVar. + +from os import PathLike +from typing import AnyStr + + +def func(path: AnyStr | PathLike[AnyStr]) -> AnyStr: + ... + + +def thing(value: AnyStr) -> AnyStr: + return func(value) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes69.py b/packages/pyright-internal/src/tests/samples/genericTypes69.py new file mode 100644 index 000000000000..92869b6ff049 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes69.py @@ -0,0 +1,45 @@ +# This sample tests the case where a method within a generic class +# constructs an instance of the same type using a type variable +# within that class. + +from typing import Generic, Protocol, Tuple, TypeVar + + +T = TypeVar("T") + + +class A(Protocol[T]): + def a(self) -> "A[Tuple[T]]": + ... + + def b(self) -> "A[Tuple[T]]": + ... + + def c(self) -> "T": + ... + + +class B(Generic[T]): + def __init__(self, t: T): + self._t = t + + def a(self) -> A[Tuple[T]]: + t = (self._t,) + y = B(t) + v = f(y.b()) + reveal_type(v, expected_text="tuple[T@B]") + return y + + def b(self) -> A[Tuple[T]]: + x = (self._t,) + reveal_type(x, expected_text="tuple[T@B]") + y = B(x) + reveal_type(y, expected_text="B[tuple[T@B]]") + return y + + def c(self) -> T: + return self._t + + +def f(a: A[Tuple[T]]) -> T: + return a.c()[0] diff --git a/packages/pyright-internal/src/tests/samples/genericTypes70.py b/packages/pyright-internal/src/tests/samples/genericTypes70.py new file mode 100644 index 000000000000..768d757734f9 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes70.py @@ -0,0 +1,31 @@ +# This sample tests the case where a generic class has a constructor that +# supplies the type arguments via a callable which is itself generic. + + +from typing import Callable, Generic, Sequence, TypeVar + +T = TypeVar("T") +V = TypeVar("V", bound=object) +V_co = TypeVar("V_co", covariant=True) +U = TypeVar("U", bound=object) + + +class Result(Generic[V]): + pass + + +ParseFn = Callable[[Sequence[T], int, int], Result[V]] + + +class Parser(Generic[T, V_co]): + def fmap1(self, fn: Callable[[V_co], U]) -> "Parser[T, U]": + def fmap2(stream: Sequence[T], pos: int, bt: int) -> Result[U]: + raise NotImplementedError() + + reveal_type(FnParser(fmap2), expected_text="FnParser[T@Parser, U@fmap1]") + return FnParser(fmap2) + + +class FnParser(Parser[T, V_co]): + def __init__(self, fn: ParseFn[T, V_co]): + self._fn = fn diff --git a/packages/pyright-internal/src/tests/samples/genericTypes71.py b/packages/pyright-internal/src/tests/samples/genericTypes71.py new file mode 100644 index 000000000000..13a51f2e65f6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes71.py @@ -0,0 +1,72 @@ +# This sample tests the case where a generic class uses a default argument +# for a generic type parameter in its constructor. + +from typing import Generic, List, TypeVar + + +T = TypeVar("T") +U = TypeVar("U") + + +class Box(Generic[T]): + def __init__(self, value: T = 123): + self.value = value + + +x1 = Box[str | int]() +x2 = Box[float]() +x3 = Box[str]("hi") + +# This should generate an error because "hi" isn't compatible +# with float. +x4 = Box[float]("hi") + + +# This should generate an error because the default value of 123 +# isn't compatible with str. +y = Box[str]() + + +class Container(Generic[T]): + def __init__(self, value: T = None): + self.value = value + + @classmethod + def create(cls) -> "Container[T]": + # This should generate an error but it doesn't + # currently because Container[T] being constructed + # is different from the current Container[T]. + return Container[T]() + + def on_next(self, value: T): + pass + + +class IntContainer(Container[int]): + def increment(self): + # This should generate an error if strictParameterNoneValue is true. + self.value += 1 + + +class ContainerList(Generic[U]): + def __init__(self) -> None: + self.containers: List[Container[U]] = [] + + def method1(self, a: U): + Container[U](a) + Container() + Container(123) + + # This should generate an error if strictParameterNoneValue is false. + Container[U]() + + # This should generate an error if strictParameterNoneValue is false. + Container[U](None) + + def method2(self): + Container[U].create() + + +def default_if_empty(obv: Container[T], default_value: T = None) -> None: + # This should generate an error. + obv.on_next(default_value) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes72.py b/packages/pyright-internal/src/tests/samples/genericTypes72.py new file mode 100644 index 000000000000..41ca9c7f820e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes72.py @@ -0,0 +1,24 @@ +# This sample tests a special case of bidirectional type inference when +# the expected type is a union and the destination type is a union that +# contains Any and a TypeVar. + + +from typing import Any, Literal, TypeVar + +_T = TypeVar("_T") + + +def func1(__o: object, name: str, __default: _T) -> Any | _T: + ... + + +x: Literal[1, 2, 3] = func1(object(), "", 1) + + +def func2(a: _T) -> bool | _T: + ... + + +y = func2(None) +if y is not True: + y or func2(False) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes73.py b/packages/pyright-internal/src/tests/samples/genericTypes73.py new file mode 100644 index 000000000000..26e3e0daa672 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes73.py @@ -0,0 +1,14 @@ +# This sample tests the case where the constraint solver can choose one +# of several types that satisfy the constraints. + +from typing import TypeVar, Union + +T = TypeVar("T") + + +def to_list(t: Union[list[T], T]) -> list[T]: + ... + + +x = to_list([1, 2, 3]) +reveal_type(x, expected_text="list[int]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes74.py b/packages/pyright-internal/src/tests/samples/genericTypes74.py new file mode 100644 index 000000000000..d34c20db2cdd --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes74.py @@ -0,0 +1,23 @@ +# This sample tests the handling of a generic type whose implementation +# includes the instantiation of another instance of itself using its +# own type parameters as type arguments. + +from typing import Generic, TypeVar + +A = TypeVar("A") +B = TypeVar("B") + + +class X(Generic[A, B]): + _dict: dict[A, B] + _pair: "X[B, A]" + + def method(self, a: A, b: B) -> None: + self._pair._dict[b] + + +x = X[int, str]() +x._pair._dict["foo"] + +reveal_type(x._pair, expected_text="X[str, int]") +reveal_type(x._pair._pair, expected_text="X[int, str]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes75.py b/packages/pyright-internal/src/tests/samples/genericTypes75.py new file mode 100644 index 000000000000..b44c5641d5ac --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes75.py @@ -0,0 +1,45 @@ +# This sample tests the handling of a generic class whose implementation +# allocates an instance of itself by invoking a constructor and passing +# an argument that is a generic type. + +# pyright: strict + +from typing import Generic, TypeVar + +T = TypeVar("T") + + +class A(Generic[T]): + def __init__(self, x: T): + self.x = x + + def method1(self) -> "A[T]": + x = self.x + reveal_type(x, expected_text="T@A") + t = (x,) + reveal_type(t, expected_text="tuple[T@A]") + a = A(t[0]) + reveal_type(a, expected_text="A[T@A]") + return a + + +class B(Generic[T]): + def __init__(self, thing: T): + pass + + @staticmethod + def method1(val: T) -> "B[T]": + # This should generate an error. + return B(0) + + +class C(Generic[T]): + def method1(self) -> "C[T]": + return C[T]() + + +c1 = C[int]() +reveal_type(c1, expected_text="C[int]") + +c2 = c1.method1() +reveal_type(c2, expected_text="C[int]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes76.py b/packages/pyright-internal/src/tests/samples/genericTypes76.py new file mode 100644 index 000000000000..0c101bf13994 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes76.py @@ -0,0 +1,107 @@ +# This sample tests the "constrained TypeVar narrowing for return types" +# feature. When a declared return type of a function contains a constrained +# TypeVar and the return statement is found on a path that tests a variable +# that is typed as that TypeVar, we know that the code path is taken only +# in the case where constraint is satisfied. + +from typing import AnyStr, Generic, List, Optional, ParamSpec, TypeVar, Union +from typing_extensions import TypeVarTuple, Unpack + +_T1 = TypeVar("_T1", str, int) +_T2 = TypeVar("_T2") + + +class A: + ... + + +class B: + ... + + +class C: + ... + + +_T3 = TypeVar("_T3", A, B, C) + +_P = ParamSpec("_P") +_Ts = TypeVarTuple("_Ts") + + +def func1(val1: _T1) -> _T1: + if isinstance(val1, str): + return "" + return 0 + + +def func2(val1: _T1) -> list[_T1]: + if isinstance(val1, str): + return [""] + return [0] + + +class Class1(Generic[_T1, _T2, _T3, _P, Unpack[_Ts]]): + def meth1( + self, val1: _T1, val2: _T2, val3: _T3, cond: bool + ) -> Union[List[_T1], List[_T2], List[_T3]]: + if cond: + # This should generate an error. + return [0] + + if cond: + if isinstance(val1, str): + # This should generate an error. + return [0] + else: + return [0] + + if cond: + if isinstance(val3, B): + return [B()] + else: + # This should generate an error. + return [C()] + + if cond: + if not isinstance(val3, B) and not isinstance(val3, C): + return [A()] + + return [val1] + + def meth2(self, val1: _T1) -> _T1: + val2 = val1 + + while True: + if isinstance(val2, str): + return "hi" + + val2 = val2 = val1 + + if isinstance(val2, int): + return 0 + + def meth3(self, val1: _T1, val2: _T3) -> _T1: + if isinstance(val2, A): + # This should generate an error. + return 1 + + if isinstance(val2, B): + if isinstance(val1, str): + return "" + + if isinstance(val1, int): + if isinstance(val2, B): + # This should generate an error. + return "" + + raise BaseException() + + +def func3(s: AnyStr, y: Optional[AnyStr] = None) -> AnyStr: + if isinstance(s, str): + if y is None: + pass + return "" + else: + raise NotImplementedError diff --git a/packages/pyright-internal/src/tests/samples/genericTypes77.py b/packages/pyright-internal/src/tests/samples/genericTypes77.py new file mode 100644 index 000000000000..da1f2227cc57 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes77.py @@ -0,0 +1,26 @@ +# This sample tests the case where a generic function is passed as +# a parameter to another generic function. + +from typing import Any, Callable, Generic, Iterable, TypeVar + +T = TypeVar("T") +U = TypeVar("U") + + +def identity(x: U) -> U: + return x + + +def not_identity(x: Any) -> int: + return 3 + + +class Test(Generic[T]): + def fun(self, x: Iterable[T], f: Callable[[T], T]): + ... + + def caller(self, x: Iterable[T]): + self.fun(x, identity) + + # This should generate an error. + self.fun(x, not_identity) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes78.py b/packages/pyright-internal/src/tests/samples/genericTypes78.py new file mode 100644 index 000000000000..6676e64b40c9 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes78.py @@ -0,0 +1,23 @@ +# This sample tests the case where a generic function +# returns a generic Callable. + +from typing import Callable, TypeVar + + +_T = TypeVar("_T") + + +def func1(val1: _T) -> Callable[[_T], None]: + def f(a: str): + ... + + # This should generate an error because str isn't + # compatible with _T. + return f + + +def func2(val1: _T) -> Callable[[_T], None]: + def f(a: _T): + ... + + return f diff --git a/packages/pyright-internal/src/tests/samples/genericTypes79.py b/packages/pyright-internal/src/tests/samples/genericTypes79.py new file mode 100644 index 000000000000..8b8680c673b9 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes79.py @@ -0,0 +1,37 @@ +# This sample tests the case that exercises some of the heuristics that +# determine whether TypeVar matching should retain a literal type. + +from typing import Callable, Dict, Generic, Literal, Tuple, TypeVar + + +FileChanges = Dict[str, Literal["created", "edited", "removed"]] + +changes: FileChanges = {} +changes.update({filename: "removed" for filename in ["foo.py", "bar.py"]}) + +_T = TypeVar("_T") + + +class IAsyncContext(Generic[_T]): + pass + + +Async = Callable[[IAsyncContext[_T]], None] + + +def func1(value: _T) -> Async[_T]: + def ret(ctx: IAsyncContext[_T]) -> None: + pass + + return ret + + +def func2() -> Async[bool]: + return func1(True) + + +def func3(value: _T) -> Callable[[_T], None]: + ... + + +x: Callable[[Tuple[bool]], None] = func3((True,)) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes80.py b/packages/pyright-internal/src/tests/samples/genericTypes80.py new file mode 100644 index 000000000000..45e7dd7b6885 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes80.py @@ -0,0 +1,10 @@ +# This sample tests TypeVar matching when there are multiple sources +# and some of them are Unknown. The TypeVar constraint solver contains +# special heuristics to deal with this case. + + +def func1(u): + b: bool = True + + x = dict(b=b, u=u, x=[]) + reveal_type(x, expected_text="dict[str, bool | list[Any]]") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes81.py b/packages/pyright-internal/src/tests/samples/genericTypes81.py new file mode 100644 index 000000000000..a8ca979998b1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes81.py @@ -0,0 +1,32 @@ +# This sample verifies that a generic class parameterized with a +# constrained TypeVar properly translates an explicit type argument +# into the correct constrained type. + +from typing import TypeVar, Generic + + +class A: + ... + + +class B: + ... + + +class A2(A): + ... + + +T = TypeVar("T", A, B) + + +class F(Generic[T]): + def __init__(self, thing: T) -> None: + self.thing = thing + + +f2 = F[A2](A2()) + +reveal_type(F[A2], expected_text="Type[F[A]]") +reveal_type(f2, expected_text="F[A]") +reveal_type(f2.thing, expected_text="A") diff --git a/packages/pyright-internal/src/tests/samples/genericTypes82.py b/packages/pyright-internal/src/tests/samples/genericTypes82.py new file mode 100644 index 000000000000..bb48c1f35187 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/genericTypes82.py @@ -0,0 +1,19 @@ +# This sample tests the special-case handling of "type" when used +# as an assignment for Type[T]. + +from typing import Any, TypeVar + +T = TypeVar("T") + + +def f(x: type[T]) -> T: + ... + + +def g() -> type | Any: + ... + + +y = g() + +f(y) diff --git a/packages/pyright-internal/src/tests/samples/genericTypes9.py b/packages/pyright-internal/src/tests/samples/genericTypes9.py index fff50ffa1c55..890a397070de 100644 --- a/packages/pyright-internal/src/tests/samples/genericTypes9.py +++ b/packages/pyright-internal/src/tests/samples/genericTypes9.py @@ -2,7 +2,7 @@ import pathlib import shutil -from typing import AnyStr, Literal, Sequence, Type, TypeVar, Union +from typing import AnyStr, Type, TypeVar, Union class Foo: @@ -99,11 +99,11 @@ def func13(value: Union[A, D]): def func14(cls: Type[T4]) -> T4: instance1 = cls() - t1: Literal["T4@func14"] = reveal_type(instance1) # Unknown + reveal_type(instance1, expected_text="T4@func14") # Unknown return instance1 def func15(cls: Union[Type[Union[A, B]], Type[Union[C, D]]]) -> Union[A, B, C, D]: instance2 = cls() - t1: Literal["A | B | C | D"] = reveal_type(instance2) + reveal_type(instance2, expected_text="A | B | C | D") return instance2 diff --git a/packages/pyright-internal/src/tests/samples/import1.py b/packages/pyright-internal/src/tests/samples/import1.py index c81094714061..fbf695fc8e59 100644 --- a/packages/pyright-internal/src/tests/samples/import1.py +++ b/packages/pyright-internal/src/tests/samples/import1.py @@ -1,9 +1,7 @@ # This sample tests the type analyzer's handling of the built-in # __import__ function. -from typing import Literal - -v_path: Literal["Iterable[str]"] = reveal_type(__path__) +reveal_type(__path__, expected_text="Iterable[str]") # This should not generate a type error. __path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/packages/pyright-internal/src/tests/samples/import14.py b/packages/pyright-internal/src/tests/samples/import14.py index 2d4bb2d5a8c7..9d05c9c66a9b 100644 --- a/packages/pyright-internal/src/tests/samples/import14.py +++ b/packages/pyright-internal/src/tests/samples/import14.py @@ -3,9 +3,8 @@ # pyright: strict -from typing import Literal from .import13 import foo1 from . import import13 -t1: Literal["int"] = reveal_type(foo1) -t2: Literal["int"] = reveal_type(import13.foo2) +reveal_type(foo1, expected_text="int") +reveal_type(import13.foo2, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/import6.py b/packages/pyright-internal/src/tests/samples/import6.py index 24ba0a085b74..dd2142caf3b7 100644 --- a/packages/pyright-internal/src/tests/samples/import6.py +++ b/packages/pyright-internal/src/tests/samples/import6.py @@ -10,4 +10,8 @@ b = __foo c = bar + +# This should generate an error because there is no __all__ assignment +# and names starting with a single underscore should not be imported +# in a wildcard. d = _bar diff --git a/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py b/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py new file mode 100644 index 000000000000..1612b0024c37 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/inconsistentConstructor1.py @@ -0,0 +1,23 @@ +# This sample tests the reportInconsistentConstructor diagnostic check. + + +class Parent1: + def __init__(self, a: int) -> None: + ... + + +class Child1(Parent1): + # This should generate an error if reportInconsistentConstructor is enabled. + def __new__(cls, a: int | str): + ... + + +class Parent2: + def __init__(self, b: int) -> None: + ... + + +class Child2(Parent2): + # This should generate an error if reportInconsistentConstructor is enabled. + def __new__(cls, b: str): + ... diff --git a/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab.py b/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab1.py similarity index 100% rename from packages/pyright-internal/src/tests/samples/inconsistentSpaceTab.py rename to packages/pyright-internal/src/tests/samples/inconsistentSpaceTab1.py diff --git a/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py b/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py new file mode 100644 index 000000000000..3adc8e5c34ad --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/inconsistentSpaceTab2.py @@ -0,0 +1,10 @@ +# This sample tests the reporting of inconsistent space/tab usage +# for dedent tokens. + + +if True: + if True: + print("False") + print("True") # Should generate an error here. + + diff --git a/packages/pyright-internal/src/tests/samples/index1.py b/packages/pyright-internal/src/tests/samples/index1.py index 84f6b70db508..4682d95aecd8 100644 --- a/packages/pyright-internal/src/tests/samples/index1.py +++ b/packages/pyright-internal/src/tests/samples/index1.py @@ -2,7 +2,7 @@ # when used with the __getitem__ and __setitem__ method. -from typing import Generic, Literal, Type, TypeVar, Any +from typing import Generic, Type, TypeVar, Any class MyInt: @@ -47,7 +47,7 @@ class ClassA(metaclass=MyMetaclass): a1 = ClassA[1] -t_a1: Literal["ClassA"] = reveal_type(a1) +reveal_type(a1, expected_text="ClassA") # This should generate an error ClassA["1"] @@ -81,3 +81,17 @@ def __setitem__(self, index: int, value: TD): def func2(container: ClassD[TD], value: TD): container[1] = value + + +class ClassE: + def __getattr__(self, s: str) -> Any: + raise NotImplementedError() + + +e = ClassE() + +# This should generate an error +v_e = e["test"] + +# This should generate an error +e["test"] = 3 diff --git a/packages/pyright-internal/src/tests/samples/initVar1.py b/packages/pyright-internal/src/tests/samples/initVar1.py new file mode 100644 index 000000000000..fc72ae998786 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/initVar1.py @@ -0,0 +1,23 @@ +# This sample tests the use of the InitVar annotation. + +from dataclasses import InitVar as InitVarAlias + +from dataclasses import * + + +@dataclass +class Container: + init_var1: InitVarAlias[int] + init_var2: InitVar[int] + + not_init_var1: int + + +c = Container(1, 2, 3) +reveal_type(c.not_init_var1, expected_text="int") + +# This should generate an error +c.init_var1 + +# This should generate an error +c.init_var2 diff --git a/packages/pyright-internal/src/tests/samples/isinstance1.py b/packages/pyright-internal/src/tests/samples/isinstance1.py index 7cb1b25b8412..b734d6ed3bfc 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance1.py +++ b/packages/pyright-internal/src/tests/samples/isinstance1.py @@ -1,28 +1,28 @@ # This sample tests basic type narrowing behavior for # the isinstance call. -from typing import Any, List, Literal, Optional, Type, TypedDict, Union +from typing import Any, List, Optional, Type, TypedDict, Union def func1(x: Union[List[str], int]): if isinstance(x, list): - t1: Literal["List[str]"] = reveal_type(x) + reveal_type(x, expected_text="List[str]") else: - t2: Literal["int"] = reveal_type(x) + reveal_type(x, expected_text="int") def func2(x: Any): if isinstance(x, list): - t1: Literal["list[Unknown]"] = reveal_type(x) + reveal_type(x, expected_text="list[Unknown]") else: - t2: Literal["Any"] = reveal_type(x) + reveal_type(x, expected_text="Any") def func3(x): if isinstance(x, list): - t1: Literal["list[Unknown]"] = reveal_type(x) + reveal_type(x, expected_text="list[Unknown]") else: - t2: Literal["Unknown"] = reveal_type(x) + reveal_type(x, expected_text="Unknown") class SomeTypedDict(TypedDict): @@ -31,27 +31,27 @@ class SomeTypedDict(TypedDict): def func4(x: Union[int, SomeTypedDict]): if isinstance(x, dict): - t1: Literal["SomeTypedDict"] = reveal_type(x) + reveal_type(x, expected_text="SomeTypedDict") else: - t2: Literal["int"] = reveal_type(x) + reveal_type(x, expected_text="int") def func5(x: int | str | complex): - if isinstance(x, int | str): - t1: Literal["int | str"] = reveal_type(x) + if isinstance(x, (int, str)): + reveal_type(x, expected_text="int | str") else: - t2: Literal["complex"] = reveal_type(x) + reveal_type(x, expected_text="complex") def func6(x: Type[int] | Type[str] | Type[complex]): - if issubclass(x, int | str): - t1: Literal["Type[int] | Type[str]"] = reveal_type(x) + if issubclass(x, (int, str)): + reveal_type(x, expected_text="Type[int] | Type[str]") else: - t2: Literal["Type[complex]"] = reveal_type(x) + reveal_type(x, expected_text="Type[complex]") def func7(x: Optional[Union[int, SomeTypedDict]]): if isinstance(x, (dict, type(None))): - t1: Literal["SomeTypedDict | None"] = reveal_type(x) + reveal_type(x, expected_text="SomeTypedDict | None") else: - t2: Literal["int"] = reveal_type(x) + reveal_type(x, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/isinstance10.py b/packages/pyright-internal/src/tests/samples/isinstance10.py index 69fe64740999..6e574fafa42c 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance10.py +++ b/packages/pyright-internal/src/tests/samples/isinstance10.py @@ -3,13 +3,13 @@ # pyright: reportUnnecessaryIsInstance=true -from typing import Any, Iterable, Literal, Sized +from typing import Any, Iterable, Sized def f(v: Any) -> bool: if isinstance(v, Iterable): - t_v1: Literal["Iterable[Unknown]"] = reveal_type(v) + reveal_type(v, expected_text="Iterable[Unknown]") if isinstance(v, Sized): - t_v2: Literal[""] = reveal_type(v) + reveal_type(v, expected_text="") return True return False diff --git a/packages/pyright-internal/src/tests/samples/isinstance3.py b/packages/pyright-internal/src/tests/samples/isinstance3.py index 3c4c40b25cfd..e0ad26c84300 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance3.py +++ b/packages/pyright-internal/src/tests/samples/isinstance3.py @@ -24,9 +24,15 @@ class A(Generic[_T]): if isinstance(a, A[str]): pass -# This should generate an error because unions are not -# allowed. -if issubclass(a, Union[A, int]): +# This should generate an error in Python 3.9 and older +# because unions are not allowed. +if issubclass(A, Union[A, int]): + pass + +# This should generate an error in Python 3.9 and older +# because unions are not allowed. A second error will be +# generated because the | operator isn't allowed. +if issubclass(A, A | int): pass diff --git a/packages/pyright-internal/src/tests/samples/isinstance4.py b/packages/pyright-internal/src/tests/samples/isinstance4.py index 3a709c345244..67caff88c95b 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance4.py +++ b/packages/pyright-internal/src/tests/samples/isinstance4.py @@ -1,7 +1,8 @@ # This sample checks that isinstance and issubclass don't # allow the second argument to be a Protocol class. -from typing import Any, Callable, Literal, Protocol, Type, TypeVar, Union +from inspect import isfunction +from typing import Any, Callable, Protocol, Type, TypeVar, Union from types import FunctionType, LambdaType @@ -37,11 +38,20 @@ def get_type_of_object(object: Union[Callable[..., Any], CustomClass]): return "nothing" -_T = TypeVar("_T", bound=CustomClass) +_T1 = TypeVar("_T1", bound=CustomClass) -def func(cls: Type[_T], val: _T): +def func1(cls: Type[_T1], val: _T1): if issubclass(cls, CustomClass): - t1: Literal["Type[CustomClass]*"] = reveal_type(cls) + reveal_type(cls, expected_text="Type[CustomClass]*") else: - t2: Literal["Never"] = reveal_type(cls) + reveal_type(cls, expected_text="Never") + + +_T2 = TypeVar("_T2") + + +def func2(x: _T2) -> Union[_T2, int]: + if callable(x) and isfunction(x): + return 1 + return x diff --git a/packages/pyright-internal/src/tests/samples/isinstance6.py b/packages/pyright-internal/src/tests/samples/isinstance6.py index 6049cd51cfc8..b226926810ea 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance6.py +++ b/packages/pyright-internal/src/tests/samples/isinstance6.py @@ -1,21 +1,19 @@ # This sample tests isinstance and issubclass type narrowing # based on cls and self parameters. -from typing import Literal - class Foo: @classmethod def bar(cls, other: type): if issubclass(other, cls): - t1: Literal["Type[Foo]"] = reveal_type(other) + reveal_type(other, expected_text="Type[Self@Foo]") if issubclass(other, (int, cls)): - t2: Literal["Type[Foo] | Type[int]"] = reveal_type(other) + reveal_type(other, expected_text="Type[Self@Foo] | Type[int]") def baz(self, other: object): if isinstance(other, self.__class__): - t1: Literal["Foo"] = reveal_type(other) + reveal_type(other, expected_text="Self@Foo") if isinstance(other, (int, self.__class__)): - t2: Literal["Foo | int"] = reveal_type(other) + reveal_type(other, expected_text="Self@Foo | int") diff --git a/packages/pyright-internal/src/tests/samples/isinstance7.py b/packages/pyright-internal/src/tests/samples/isinstance7.py index 9114106ea347..faaeac8171b5 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance7.py +++ b/packages/pyright-internal/src/tests/samples/isinstance7.py @@ -1,7 +1,7 @@ # This sample tests that the negative filtering for the 'isinstance' # narrowing logic properly preserves a TypeVar. -from typing import Literal, TypeVar, Generic +from typing import TypeVar, Generic class Operator: @@ -17,9 +17,9 @@ def __init__( step: OpType, ) -> None: if isinstance(step, BasePipeline): - t1: Literal["BasePipeline[Unknown]*"] = reveal_type(step) + reveal_type(step, expected_text="BasePipeline[Unknown]*") else: - t2: Literal["Operator*"] = reveal_type(step) + reveal_type(step, expected_text="Operator*") T1 = TypeVar("T1", int, str) @@ -36,7 +36,7 @@ def do_nothing1(x: T1) -> T1: def func2(arg: T2) -> T2: if isinstance(arg, str): - t1: Literal["str*"] = reveal_type(arg) + reveal_type(arg, expected_text="str*") - t2: Literal["str* | object*"] = reveal_type(arg) + reveal_type(arg, expected_text="str* | object*") return arg diff --git a/packages/pyright-internal/src/tests/samples/isinstance9.py b/packages/pyright-internal/src/tests/samples/isinstance9.py index 3f8d5e72ce60..a0fc82d14eb0 100644 --- a/packages/pyright-internal/src/tests/samples/isinstance9.py +++ b/packages/pyright-internal/src/tests/samples/isinstance9.py @@ -1,26 +1,26 @@ # This sample tests the isinstance narrowing when the list # of classes includes a type defined by a type variable. -from typing import Any, Literal, Type, TypeVar, Union +from typing import Any, Type, TypeVar, Union T = TypeVar("T") def func1(cls: Type[T], obj: Any) -> T: assert isinstance(obj, cls) - t_obj: Literal["T@func1"] = reveal_type(obj) + reveal_type(obj, expected_text="T@func1") return obj v1 = func1(int, 3) -t_v1: Literal["int"] = reveal_type(v1) +reveal_type(v1, expected_text="int") def func2(klass: Type[T], obj: Union[T, int]) -> T: assert isinstance(obj, klass) - t_obj: Literal["T@func2"] = reveal_type(obj) + reveal_type(obj, expected_text="T@func2") return obj v2 = func2(str, 3) -t_v2: Literal["str"] = reveal_type(v2) +reveal_type(v2, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py b/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py new file mode 100644 index 000000000000..4d49dc818862 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/kwargsUnpack1.py @@ -0,0 +1,114 @@ +# This sample tests the handling of Unpack[TypedDict] when used with +# a **kwargs parameter in a function signature. + +from typing import Protocol, TypedDict +from typing_extensions import NotRequired, Required, Unpack + + +class TD1(TypedDict): + v1: Required[int] + v2: NotRequired[str] + + +class TD2(TD1): + v3: Required[str] + + +def func1(**kwargs: Unpack[TD2]) -> None: + v1 = kwargs["v1"] + reveal_type(v1, expected_text="int") + + # This should generate an error because v2 might not be present. + kwargs["v2"] + + if "v2" in kwargs: + v2 = kwargs["v2"] + reveal_type(v2, expected_text="str") + + v3 = kwargs["v3"] + reveal_type(v3, expected_text="str") + + +reveal_type(func1, expected_text="(**kwargs: **TD2) -> None") + + +def func2(v1: int, **kwargs: Unpack[TD1]) -> None: + pass + + +def func3(): + # This should generate an error because it is + # missing required keyword arguments. + func1() + + func1(v1=1, v2="", v3="5") + + td2 = TD2(v1=2, v3="4") + func1(**td2) + + # This should generate an error because v4 is not in TD2. + func1(v1=1, v2="", v3="5", v4=5) + + # This should generate an error because args are passed by position. + func1(1, "", "5") + + my_dict: dict[str, str] = {} + # This should generate an error because it's an untyped dict. + func1(**my_dict) + + func1(**{"v1": 2, "v3": "4", "v4": 4}) + + # This should generate an error because v1 is already specified. + func1(v1=2, **td2) + + # This should generate an error because v1 is already specified. + func2(1, **td2) + + # This should generate an error because v1 is matched to a + # named parameter and is not available for kwargs. + func2(v1=1, **td2) + + +class TDProtocol1(Protocol): + def __call__(self, *, v1: int, v3: str) -> None: + ... + + +class TDProtocol2(Protocol): + def __call__(self, *, v1: int, v3: str, v2: str = "") -> None: + ... + + +class TDProtocol3(Protocol): + def __call__(self, *, v1: int, v2: int, v3: str) -> None: + ... + + +class TDProtocol4(Protocol): + def __call__(self, *, v1: int) -> None: + ... + + +class TDProtocol5(Protocol): + def __call__(self, v1: int, v3: str) -> None: + ... + + +class TDProtocol6(Protocol): + def __call__(self, **kwargs: Unpack[TD2]) -> None: + ... + + +v1: TDProtocol1 = func1 +v2: TDProtocol2 = func1 + +# This should generate an error because v2 is the wrong type. +v3: TDProtocol3 = func1 + +# This should generate an error because v3 is missing. +v4: TDProtocol4 = func1 + +# This should generate an error because parameters are positional. +v5: TDProtocol5 = func1 + +v6: TDProtocol6 = func1 diff --git a/packages/pyright-internal/src/tests/samples/lambda3.py b/packages/pyright-internal/src/tests/samples/lambda3.py index 7fd023791c3a..df415ad36c58 100644 --- a/packages/pyright-internal/src/tests/samples/lambda3.py +++ b/packages/pyright-internal/src/tests/samples/lambda3.py @@ -2,6 +2,9 @@ # expressions in a lambda. +from typing import Callable, Protocol + + def test1(): var = 1 @@ -16,3 +19,16 @@ def test2(): def test3(): var = 0 lambda var=var: ... + + +class MyCallback(Protocol): + def __call__(self, y: int, a: int = 0) -> bool: + ... + + +lambda1: Callable[[int, int], bool] = lambda y, a=0: a == y +lambda2: MyCallback = lambda y, a=0: a == y + +lambda1(20) +lambda2(20) +lambda2(20, 30) diff --git a/packages/pyright-internal/src/tests/samples/lambda5.py b/packages/pyright-internal/src/tests/samples/lambda5.py new file mode 100644 index 000000000000..478fa761e094 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/lambda5.py @@ -0,0 +1,23 @@ +# This sample tests the case where a lambda's type is determined using +# bidirectional type inference and one or more of the parameters +# corresponds to a generic type. + +from typing import Callable, TypeVar, Generic, Any + +T = TypeVar("T") +MsgT = TypeVar("MsgT", bound="Msg[Any]") + + +class Msg(Generic[T]): + body: T + + +class Request: + ... + + +def check(func: "Callable[[MsgT, int], object]") -> MsgT: + ... + + +notification: Msg[Request] = check(lambda msg, foo: (msg.body, foo)) diff --git a/packages/pyright-internal/src/tests/samples/lambda6.py b/packages/pyright-internal/src/tests/samples/lambda6.py new file mode 100644 index 000000000000..a7d856969856 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/lambda6.py @@ -0,0 +1,15 @@ +# This sample validates that lambdas declared within a class +# body do not reference class-scoped variables within the +# lambda return expression. + + +var1 = [1, 2] + + +class A: + x1 = lambda: var1 + + var2 = [1, 2] + + # This should generate an error. + x2 = lambda: var2 diff --git a/packages/pyright-internal/src/tests/samples/list1.py b/packages/pyright-internal/src/tests/samples/list1.py index a4815d4911d5..96276f4c6f78 100644 --- a/packages/pyright-internal/src/tests/samples/list1.py +++ b/packages/pyright-internal/src/tests/samples/list1.py @@ -8,7 +8,6 @@ Dict, Generic, List, - Literal, Optional, Sequence, TypeVar, @@ -16,13 +15,13 @@ v1 = [1, 2, 3] -t_v1: Literal["list[int]"] = reveal_type(v1) +reveal_type(v1, expected_text="list[int]") v2 = [1, 3.4, "hi"] -t_v2: Literal["list[int | float | str]"] = reveal_type(v2) +reveal_type(v2, expected_text="list[int | float | str]") v3 = [] -t_v3: Literal["list[Unknown]"] = reveal_type(v3) +reveal_type(v3, expected_text="list[Unknown]") v4: List[object] = [] @@ -53,18 +52,18 @@ class Bar: v10 = Bar() -t_v10_1: Literal["list[Foo]"] = reveal_type(v10.baz) +reveal_type(v10.baz, expected_text="list[Foo]") v10.baz = [Foo()] -t_v10_2: Literal["list[Foo]"] = reveal_type(v10.baz) +reveal_type(v10.baz, expected_text="list[Foo]") v11: List[Any] = [["hi", ["hi"], [[{}]]]] -t_v11: Literal["list[Any]"] = reveal_type(v11) +reveal_type(v11, expected_text="list[Any]") v12: List[Optional[int]] = [None] * 3 -t_v12: Literal["list[int | None]"] = reveal_type(v12) +reveal_type(v12, expected_text="list[int | None]") v13: List[Optional[str]] = ["3", None] * 2 -t_v13: Literal["list[str | None]"] = reveal_type(v13) +reveal_type(v13, expected_text="list[str | None]") x1 = 3 v14: List[Optional[str]] = [None] * x1 diff --git a/packages/pyright-internal/src/tests/samples/listComprehension7.py b/packages/pyright-internal/src/tests/samples/listComprehension7.py new file mode 100644 index 000000000000..fe0543ac52e3 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/listComprehension7.py @@ -0,0 +1,15 @@ +# This sample validates that list comprehensions within a class +# body do not reference class-scoped variables within the +# comprehension unless they are within the initial iterator expression. + +outer_var = [1, 2] + + +class A: + var1 = [1, 2] + var2 = {x for x in var1} + + # This should generate an error. + var3 = {var1[0] for x in var1} + + var4 = {outer_var[0] for x in outer_var} diff --git a/packages/pyright-internal/src/tests/samples/listComprehension8.py b/packages/pyright-internal/src/tests/samples/listComprehension8.py new file mode 100644 index 000000000000..2bf4d8b6c197 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/listComprehension8.py @@ -0,0 +1,15 @@ +# This sample tests the evaluation of a list comprehension where +# there are interdependencies between some of the variables. + +# pyright: strict + + +class Foo: + input: str + output: str + + +def minify1(foo: Foo): + foo.output = "".join( + stripped for line in foo.input.splitlines() if (stripped := line.strip()) + ) diff --git a/packages/pyright-internal/src/tests/samples/literalString1.py b/packages/pyright-internal/src/tests/samples/literalString1.py new file mode 100644 index 000000000000..d57bead896b1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/literalString1.py @@ -0,0 +1,41 @@ +# This sample tests the evaluation of LiteralString as described +# in PEP 675. + +from typing_extensions import Literal, LiteralString + + +def func1(a: str, b: bytes, c: Literal["a"], d: Literal["a", "b"], e: Literal["a", 1]): + # This should generate an error. + v1: LiteralString = a + + # This should generate an error. + v2: LiteralString = b + + # This should generate an error. + v3: LiteralString = b"" + + v4: LiteralString = "Hello!" + + v5: LiteralString = "Hello " + "Bob" + + # This should generate an error. + v6: LiteralString = f"{a}" + + # This should generate an error. + v7: LiteralString[int] + + v8: LiteralString = c + + v9: LiteralString = d + + # This should generate an error. + v10: LiteralString = e + + +def func2(a: str): + ... + + +def func3(a: LiteralString): + func2(a) + a.lower() diff --git a/packages/pyright-internal/src/tests/samples/literals6.py b/packages/pyright-internal/src/tests/samples/literals6.py new file mode 100644 index 000000000000..f10fca5d8cc7 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/literals6.py @@ -0,0 +1,90 @@ +# This sample tests various illegal forms of Literal. + +from typing import Any, Literal, TypeVar +from pathlib import Path + +# This should generate an error. +Wrong1 = Literal[3 + 4] + +# This should generate an error. +Wrong2 = Literal["foo".replace("o", "b")] + +# This should generate an error. +Wrong3 = Literal[4 + 3j] + +# This should generate an error. +Wrong4 = Literal[-4 + 2j] + +# This should generate an error. +Wrong5 = Literal[(1, "foo", "bar")] + +# This should generate an error. +Wrong6 = Literal[{"a": "b", "c": "d"}] + +# This should generate an error. +Wrong7 = Literal[Path("abcd")] +T = TypeVar("T") + +# This should generate an error. +Wrong8 = Literal[T] + +# This should generate an error. +Wrong9 = Literal[3.14] + +# This should generate an error. +Wrong10 = Literal[Any] + +# This should generate an error. +Wrong11 = Literal[...] + + +def func(): + ... + + +# This should generate an error. +Wrong12 = Literal[func] +some_variable = "foo" + +# This should generate an error. +Wrong13 = Literal[some_variable] + + +# This should generate an error. +var1: Literal[3 + 4] + +# This should generate an error. +var2: Literal["foo".replace("o", "b")] + +# This should generate an error. +var3: Literal[4 + 3j] + +# This should generate an error. +var4: Literal[-4 + 2j] + +# This should generate an error. +var5: Literal[(1, "foo", "bar")] + +# This should generate an error. +var6: Literal[{"a": "b", "c": "d"}] + +# This should generate an error. +var7: Literal[Path("abcd")] + +# This should generate an error. +var8: Literal[T] + +# This should generate an error. +var9: Literal[3.14] + +# This should generate an error. +var10: Literal[Any] + +# This should generate an error. +var11: Literal[...] + +# This should generate an error. +var12: Literal[func] + +# This should generate an error. +var13: Literal[some_variable] diff --git a/packages/pyright-internal/src/tests/samples/literals7.py b/packages/pyright-internal/src/tests/samples/literals7.py new file mode 100644 index 000000000000..b0b0d519387b --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/literals7.py @@ -0,0 +1,38 @@ +# This sample tests the handling of very large integer values used in +# literals. + +from typing import Literal + + +# This should generate an error. +y1: Literal[ + 900001231231231456487987456452132130000000000000000000000000000001 +] = 900001231231231456487987456452132130000000000000000000000000000000 + +y2: Literal[ + 900001231231231456487987456452132130000000000000000000000000000001 +] = 900001231231231456487987456452132130000000000000000000000000000001 + +reveal_type( + y2, + expected_text="Literal[900001231231231456487987456452132130000000000000000000000000000001]", +) + +y3 = y2 + 1 +reveal_type( + y3, + expected_text="Literal[900001231231231456487987456452132130000000000000000000000000000002]", +) + +y4 = 0xFFFFFFFFFFF123456789456123456789456123456789456123 +reveal_type( + y4, + expected_text="Literal[1606938044258905427252460960878516708721138816242982137979171]", +) + +y5 = 0b101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010101010 +reveal_type(y5, expected_text="Literal[886151997189943915269204706853563050]") + + +y6 = 0xFFFFFFFFFFFFFFFFADF85458A2BB4A9AAFDC5620273D3CF1D8B9C583CE2D3695A9E13641146433FBCC939DCE249B3EF97D2FE363630C75D8F681B202AEC4617AD3DF1ED5D5FD65612433F51F5F066ED0856365553DED1AF3B557135E7F57C935984F0C70E0E68B77E2A689DAF3EFE8721DF158A136ADE73530ACCA4F483A797ABC0AB182B324FB61D108A94BB2C8E3FBB96ADAB760D7F4681D4F42A3DE394DF4AE56EDE76372BB190B07A7C8EE0A6D709E02FCE1CDF7E2ECC03404CD28342F619172FE9CE98583FF8E4F1232EEF28183C3FE3B1B4C6FAD733BB5FCBC2EC22005C58EF1837D1683B2C6F34A26C1B2EFFA886B4238611FCFDCDE355B3B6519035BBC34F4DEF99C023861B46FC9D6E6C9077AD91D2691F7F7EE598CB0FAC186D91CAEFE130985139270B4130C93BC437944F4FD4452E2D74DD364F2E21E71F54BFF5CAE82AB9C9DF69EE86D2BC522363A0DABC521979B0DEADA1DBF9A42D5C4484E0ABCD06BFA53DDEF3C1B20EE3FD59D7C25E41D2B669E1EF16E6F52C3164DF4FB7930E9E4E58857B6AC7D5F42D69F6D187763CF1D5503400487F55BA57E31CC7A7135C886EFB4318AED6A1E012D9E6832A907600A918130C46DC778F971AD0038092999A333CB8B7A1A1DB93D7140003C2A4ECEA9F98D0ACC0A8291CDCEC97DCF8EC9B55A7F88A46B4DB5A851F44182E1C68A007E5E0DD9020BFD64B645036C7A4E677D2C38532A3A23BA4442CAF53EA63BB454329B7624C8917BDD64B1C0FD4CB38E8C334C701C3ACDAD0657FCCFEC719B1F5C3E4E46041F388147FB4CFDB477A52471F7A9A96910B855322EDB6340D8A00EF092350511E30ABEC1FFF9E3A26E7FB29F8C183023C3587E38DA0077D9B4763E4E4B94B2BBC194C6651E77CAF992EEAAC0232A281BF6B3A739C1226116820AE8DB5847A67CBEF9C9091B462D538CD72B03746AE77F5E62292C311562A846505DC82DB854338AE49F5235C95B91178CCF2DD5CACEF403EC9D1810C6272B045B3B71F9DC6B80D63FDD4A8E9ADB1E6962A69526D43161C1A41D570D7938DAD4A40E329CCFF46AAA36AD004CF600C8381E425A31D951AE64FDB23FCEC9509D43687FEB69EDD1CC5E0B8CC3BDF64B10EF86B63142A3AB8829555B2F747C932665CB2C0F1CC01BD70229388839D2AF05E454504AC78B7582822846C0BA35C35F5C59160CC046FD8251541FC68C9C86B022BB7099876A460E7451A8A93109703FEE1C217E6C3826E52C51AA691E0E423CFC99E9E31650C1217B624816CDAD9A95F9D5B8019488D9C0A0A1FE3075A577E23183F81D4A3F2FA4571EFC8CE0BA8A4FE8B6855DFE72B0A66EDED2FBABFBE58A30FAFABE1C5D71A87E2F741EF8C1FE86FEA6BBFDE530677F0D97D11D49F7A8443D0822E506A9F4614E011E2A94838FF88CD68C8BB7C5C6424CFFFFFFFFFFFFFFFF +y7 = y6 * y6 diff --git a/packages/pyright-internal/src/tests/samples/loops10.py b/packages/pyright-internal/src/tests/samples/loops10.py index 302121d07fe8..78c9cb6e33cc 100644 --- a/packages/pyright-internal/src/tests/samples/loops10.py +++ b/packages/pyright-internal/src/tests/samples/loops10.py @@ -1,13 +1,11 @@ # This sample tests the case where dependent types within # a loop are assigned using tuples. -from typing import Literal - def fibonacci(): a, b = 1, 1 while True: yield a a, b = b, a + b - t1: Literal["int"] = reveal_type(a) - t2: Literal["int"] = reveal_type(b) + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/loops11.py b/packages/pyright-internal/src/tests/samples/loops11.py new file mode 100644 index 000000000000..4fa979b103ba --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/loops11.py @@ -0,0 +1,22 @@ +# This sample tests for the detection of unbound or partially-unbound +# variables within loops. + +import random + +for a in [1, 2, 3]: + # This should generate an error because b is unbound. + if b == 1: + b = 2 + + +for a in [1, 2, 3]: + if random.random() > 0.5: + c = 2 + + # This should generate an error because c is potentially unbound. + print(c) + +while True: + # This should generate an error because d is unbound. + if d == 1: + d = 2 diff --git a/packages/pyright-internal/src/tests/samples/loops12.py b/packages/pyright-internal/src/tests/samples/loops12.py new file mode 100644 index 000000000000..bd7918d17aec --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/loops12.py @@ -0,0 +1,18 @@ +# This sample tests the evaluation of a variable whose type is narrowed +# within a loop body. + + +class Foo: + def non_property(self) -> int: + ... + + def do_stuff(self, x: int | None): + while True: + if x is not None: + a = x + else: + a = self.non_property + + # This should generate an error because the type of "a" + # is not compatible with a "-" operator. + _ = a - 0 diff --git a/packages/pyright-internal/src/tests/samples/loops13.py b/packages/pyright-internal/src/tests/samples/loops13.py new file mode 100644 index 000000000000..724f36403475 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/loops13.py @@ -0,0 +1,12 @@ +# This sample tests the case where a loop uses tuple assignments. It verifies +# that no "unknown" values are evaluated for variables assigned in the loop. + +# pyright: strict + +nums: list[int] = [1, 2, 3] +max_product, min_product = nums[0], nums[0] + +for x in nums[1:]: + candidates = max_product * x, min_product * x + min_product = min(candidates) + max_product = max(candidates) diff --git a/packages/pyright-internal/src/tests/samples/loops14.py b/packages/pyright-internal/src/tests/samples/loops14.py new file mode 100644 index 000000000000..414e13c1d12f --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/loops14.py @@ -0,0 +1,22 @@ +# This sample tests a loop that modifies a variable through type narrowing. + +from typing import Union + + +class State: + def confirm_dialog(self) -> Union["State", bool]: + return False + + +state = State() +reveal_type(state, expected_text="State") + +for _ in range(1): + result = state.confirm_dialog() + if isinstance(result, State): + reveal_type(state, expected_text="State") + reveal_type(result, expected_text="State") + state = result + else: + reveal_type(state, expected_text="State") + reveal_type(result, expected_text="bool") diff --git a/packages/pyright-internal/src/tests/samples/loops15.py b/packages/pyright-internal/src/tests/samples/loops15.py new file mode 100644 index 000000000000..6cf4ed212e07 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/loops15.py @@ -0,0 +1,22 @@ +# This sample tests loops that contain nested comprehensions and +# variables that depend on each other. + +# pyright: strict + + +def func1(boards: list[list[list[int]]]): + for _ in [0]: + remain = [[set(line) for line in zip(*b)] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] + + +def func2(boards: list[list[list[int]]]): + for _ in [1]: + remain = [[set(line) for line in b] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] + + +def func3(boards: list[list[list[int]]]): + for _ in [1]: + remain = [[set(line) for line in (*b, *zip(*b))] for b in boards] + boards = [b for b, u in zip(boards, remain) if all(u)] diff --git a/packages/pyright-internal/src/tests/samples/loops5.py b/packages/pyright-internal/src/tests/samples/loops5.py index b0cf2a6c1355..31f793539f09 100644 --- a/packages/pyright-internal/src/tests/samples/loops5.py +++ b/packages/pyright-internal/src/tests/samples/loops5.py @@ -4,13 +4,11 @@ # pyright: strict -from typing import Literal - test = {"key": "value"} while True: a = test - t1: Literal["dict[str, str]"] = reveal_type(a) + reveal_type(a, expected_text="dict[str, str]") test = a.copy() - t2: Literal["dict[str, str]"] = reveal_type(test) + reveal_type(test, expected_text="dict[str, str]") diff --git a/packages/pyright-internal/src/tests/samples/loops8.py b/packages/pyright-internal/src/tests/samples/loops8.py index 4258d07924e3..a22bca83aadf 100644 --- a/packages/pyright-internal/src/tests/samples/loops8.py +++ b/packages/pyright-internal/src/tests/samples/loops8.py @@ -7,9 +7,11 @@ def test(parts: Iterable[str]): + x: list[str] = [] ns = "" for part in parts: if ns: ns += "a" else: ns += part + x.append(ns) diff --git a/packages/pyright-internal/src/tests/samples/match1.py b/packages/pyright-internal/src/tests/samples/match1.py index c53bfe9309dd..691e06cd4492 100644 --- a/packages/pyright-internal/src/tests/samples/match1.py +++ b/packages/pyright-internal/src/tests/samples/match1.py @@ -5,6 +5,9 @@ value_obj: Any = 4 +class Foo: + x: int + match (1, ): case a1, b1 if True: @@ -114,12 +117,12 @@ case int(): pass - case int(1, a40, value_obj.b as b40, c40=3|-2 + 5j|"hi" as d40, y=[e40, f40] as g40,): + case Foo(1, a40, value_obj.b as b40, c40=3|-2 + 5j|"hi" as d40, y=[e40, f40] as g40,): pass # This should generate an error because positional arguments # cannot appear after keyword arguments. - case str(1, a41, x=3, value_obj.b as b41, c41=3, y=[d41, e41] as f41): + case Foo(1, a41, x=3, value_obj.b as b41, c41=3, y=[d41, e41] as f41): pass # This should generate three errors because irrefutable patterns @@ -149,9 +152,6 @@ pass -class Foo: - x: int - def func1(): match = Foo() @@ -173,7 +173,7 @@ def func2(): def func3(): def match(a: int): ... - # This should be treated as a call statement + # This should be treated as a call statement. match(0) match (0): @@ -184,3 +184,7 @@ def func4(): match 1, 2, "3": case _: pass + +def func5(match: Any): + # This should be treated as a list, not a match statement. + match[2:8, 2:8] = 0 diff --git a/packages/pyright-internal/src/tests/samples/match10.py b/packages/pyright-internal/src/tests/samples/match10.py new file mode 100644 index 000000000000..140725748801 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/match10.py @@ -0,0 +1,61 @@ +# This sample tests the reportMatchNotExhaustive diagnostic check. + +from typing import Literal +from enum import Enum + +def func1(subj: Literal["a", "b"], cond: bool): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case "a": + pass + + case "b" if cond: + pass + + +def func2(subj: object): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case int(): + pass + +def func3(subj: object): + match subj: + case object(): + pass + +def func4(subj: tuple[str] | tuple[int]): + match subj[0]: + case str(): + pass + + case int(): + pass + +def func5(subj: Literal[1, 2, 3]): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case 1 | 2: + pass + +class Color(Enum): + red = 0 + green= 1 + blue = 2 + + +def func6(subj: Color): + # This should generate an error if reportMatchNotExhaustive is enabled. + match subj: + case Color.red: + pass + + case Color.green: + pass + + +class ClassA: + def method1(self) -> str: + match self: + case ClassA(): + return "" diff --git a/packages/pyright-internal/src/tests/samples/match2.py b/packages/pyright-internal/src/tests/samples/match2.py index ae7e5acbf088..60533db1d7a4 100644 --- a/packages/pyright-internal/src/tests/samples/match2.py +++ b/packages/pyright-internal/src/tests/samples/match2.py @@ -1,189 +1,189 @@ # This sample tests type checking for match statements (as # described in PEP 634) that contain sequence patterns. -from typing import Any, List, Literal, Protocol, Tuple, TypeVar, Union +from typing import Any, Generic, List, Protocol, Tuple, TypeVar, Union def test_unknown(value_to_match): match value_to_match: case a1, a2: - t_a1: Literal["Unknown"] = reveal_type(a1) - t_a2: Literal["Unknown"] = reveal_type(a2) + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="Unknown") case *b1, b2: - t_b1: Literal["tuple[Unknown, ...]"] = reveal_type(b1) - t_b2: Literal["Unknown"] = reveal_type(b2) + reveal_type(b1, expected_text="list[Unknown]") + reveal_type(b2, expected_text="Unknown") case c1, *c2: - t_c1: Literal["Unknown"] = reveal_type(c1) - t_c2: Literal["tuple[Unknown, ...]"] = reveal_type(c2) + reveal_type(c1, expected_text="Unknown") + reveal_type(c2, expected_text="list[Unknown]") case d1, *d2, d3: - t_d1: Literal["Unknown"] = reveal_type(d1) - t_d2: Literal["tuple[Unknown, ...]"] = reveal_type(d2) - t_d3: Literal["Unknown"] = reveal_type(d3) + reveal_type(d1, expected_text="Unknown") + reveal_type(d2, expected_text="list[Unknown]") + reveal_type(d3, expected_text="Unknown") case 3, *e1: - t_e1: Literal["tuple[Unknown, ...]"] = reveal_type(e1) + reveal_type(e1, expected_text="list[Unknown]") case "hi", *f1: - t_f1: Literal["tuple[Unknown, ...]"] = reveal_type(f1) + reveal_type(f1, expected_text="list[Unknown]") case *g1, "hi": - t_g1: Literal["tuple[Unknown, ...]"] = reveal_type(g1) + reveal_type(g1, expected_text="list[Unknown]") def test_list(value_to_match: List[str]): match value_to_match: case a1, a2: - t_a1: Literal["str"] = reveal_type(a1) - t_a2: Literal["str"] = reveal_type(a2) - t_v1: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") case *b1, b2: - t_b1: Literal["tuple[str, ...]"] = reveal_type(b1) - t_b2: Literal["str"] = reveal_type(b2) - t_v2: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="list[str]") + reveal_type(b2, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") case c1, *c2: - t_c1: Literal["str"] = reveal_type(c1) - t_c2: Literal["tuple[str, ...]"] = reveal_type(c2) - t_v3: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="str") + reveal_type(c2, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") case d1, *d2, d3: - t_d1: Literal["str"] = reveal_type(d1) - t_d2: Literal["tuple[str, ...]"] = reveal_type(d2) - t_d3: Literal["str"] = reveal_type(d3) - t_v4: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="str") + reveal_type(d2, expected_text="list[str]") + reveal_type(d3, expected_text="str") + reveal_type(value_to_match, expected_text="List[str]") case 3, *e1: - t_e1: Literal["Never"] = reveal_type(e1) - t_v5: Literal["Never"] = reveal_type(value_to_match) + reveal_type(e1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case "hi", *f1: - t_f1: Literal["tuple[str, ...]"] = reveal_type(f1) - t_v6: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(f1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") case *g1, "hi": - t_g1: Literal["tuple[str, ...]"] = reveal_type(g1) - t_v7: Literal["List[str]"] = reveal_type(value_to_match) + reveal_type(g1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="List[str]") def test_open_ended_tuple(value_to_match: Tuple[str, ...]): match value_to_match: case a1, a2: - t_a1: Literal["str"] = reveal_type(a1) - t_a2: Literal["str"] = reveal_type(a2) - t_v1: Literal["tuple[str, str]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="str") + reveal_type(value_to_match, expected_text="tuple[str, str]") case *b1, b2: - t_b1: Literal["tuple[str, ...]"] = reveal_type(b1) - t_b2: Literal["str"] = reveal_type(b2) - t_v2: Literal["Tuple[str, ...]"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="list[str]") + reveal_type(b2, expected_text="str") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") case c1, *c2: - t_c1: Literal["str"] = reveal_type(c1) - t_c2: Literal["tuple[str, ...]"] = reveal_type(c2) - t_v3: Literal["Tuple[str, ...]"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="str") + reveal_type(c2, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") case d1, *d2, d3: - t_d1: Literal["str"] = reveal_type(d1) - t_d2: Literal["tuple[str, ...]"] = reveal_type(d2) - t_d3: Literal["str"] = reveal_type(d3) - t_v4: Literal["Tuple[str, ...]"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="str") + reveal_type(d2, expected_text="list[str]") + reveal_type(d3, expected_text="str") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") case 3, *e1: - t_e1: Literal["Never"] = reveal_type(e1) - t_v5: Literal["Never"] = reveal_type(value_to_match) + reveal_type(e1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case "hi", *f1: - t_f1: Literal["tuple[str, ...]"] = reveal_type(f1) - t_v6: Literal["Tuple[str, ...]"] = reveal_type(value_to_match) + reveal_type(f1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") case *g1, "hi": - t_g1: Literal["tuple[str, ...]"] = reveal_type(g1) - t_v7: Literal["Tuple[str, ...]"] = reveal_type(value_to_match) + reveal_type(g1, expected_text="list[str]") + reveal_type(value_to_match, expected_text="Tuple[str, ...]") def test_definite_tuple(value_to_match: Tuple[int, str, float, complex]): match value_to_match: case a1, a2, a3, a4: - t_a1: Literal["int"] = reveal_type(a1) - t_a2: Literal["str"] = reveal_type(a2) - t_a3: Literal["float"] = reveal_type(a3) - t_a4: Literal["complex"] = reveal_type(a4) - t_v1: Literal["tuple[int, str, float, complex]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="int") + reveal_type(a2, expected_text="str") + reveal_type(a3, expected_text="float") + reveal_type(a4, expected_text="complex") + reveal_type(value_to_match, expected_text="tuple[int, str, float, complex]") case *b1, b2: - t_b1: Literal["tuple[int, str, float]"] = reveal_type(b1) - t_b2: Literal["complex"] = reveal_type(b2) - t_v2: Literal["tuple[int, str, float, complex]"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="list[int | str | float]") + reveal_type(b2, expected_text="complex") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") case c1, *c2: - t_c1: Literal["int"] = reveal_type(c1) - t_c2: Literal["tuple[str, float, complex]"] = reveal_type(c2) - t_v3: Literal["tuple[int, str, float, complex]"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="int") + reveal_type(c2, expected_text="list[str | float | complex]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") case d1, *d2, d3: - t_d1: Literal["int"] = reveal_type(d1) - t_d2: Literal["tuple[str, float]"] = reveal_type(d2) - t_d3: Literal["complex"] = reveal_type(d3) - t_v4: Literal["tuple[int, str, float, complex]"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="int") + reveal_type(d2, expected_text="list[str | float]") + reveal_type(d3, expected_text="complex") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") case 3, *e1: - t_e1: Literal["tuple[str, float, complex]"] = reveal_type(e1) - t_v5: Literal["tuple[Literal[3], str, float, complex]"] = reveal_type(value_to_match) + reveal_type(e1, expected_text="list[str | float | complex]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") case "hi", *f1: - t_f1: Literal["Never"] = reveal_type(f1) - t_v6: Literal["Never"] = reveal_type(value_to_match) + reveal_type(f1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case *g1, 3j: - t_g1: Literal["tuple[int, str, float]"] = reveal_type(g1) - t_v7: Literal["tuple[int, str, float, complex]"] = reveal_type(value_to_match) + reveal_type(g1, expected_text="list[int | str | float]") + reveal_type(value_to_match, expected_text="Tuple[int, str, float, complex]") case *h1, "hi": - t_h1: Literal["Never"] = reveal_type(h1) - t_v8: Literal["Never"] = reveal_type(value_to_match) + reveal_type(h1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") def test_union(value_to_match: Union[Tuple[complex, complex], Tuple[int, str, float, complex], List[str], Tuple[float, ...], Any]): match value_to_match: case a1, a2, a3, a4: - t_a1: Literal["int | str | float | Any"] = reveal_type(a1) - t_a2: Literal["str | float | Any"] = reveal_type(a2) - t_a3: Literal["float | str | Any"] = reveal_type(a3) - t_a4: Literal["complex | str | float | Any"] = reveal_type(a4) - t_v1: Literal["tuple[int, str, float, complex] | List[str] | tuple[float, float, float, float] | Any"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="int | str | float | Any") + reveal_type(a2, expected_text="str | float | Any") + reveal_type(a3, expected_text="float | str | Any") + reveal_type(a4, expected_text="complex | str | float | Any") + reveal_type(value_to_match, expected_text="tuple[int, str, float, complex] | List[str] | tuple[float, float, float, float] | Any") case *b1, b2: - t_b1: Literal["tuple[complex] | tuple[int, str, float] | tuple[str, ...] | tuple[float, ...] | tuple[Any, ...]"] = reveal_type(b1) - t_b2: Literal["complex | str | float | Any"] = reveal_type(b2) - t_v2: Literal["tuple[complex, complex] | tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="list[complex] | list[int | str | float] | list[str] | list[float] | list[Any]") + reveal_type(b2, expected_text="complex | str | float | Any") + reveal_type(value_to_match, expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any") case c1, *c2: - t_c1: Literal["complex | int | str | float | Any"] = reveal_type(c1) - t_c2: Literal["tuple[complex] | tuple[str, float, complex] | tuple[str, ...] | tuple[float, ...] | tuple[Any, ...]"] = reveal_type(c2) - t_v3: Literal["tuple[complex, complex] | tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="complex | int | str | float | Any") + reveal_type(c2, expected_text="list[complex] | list[str | float | complex] | list[str] | list[float] | list[Any]") + reveal_type(value_to_match, expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any") case d1, *d2, d3: - t_d1: Literal["complex | int | str | float | Any"] = reveal_type(d1) - t_d2: Literal["tuple[()] | tuple[str, float] | tuple[str, ...] | tuple[float, ...] | tuple[Any, ...]"] = reveal_type(d2) - t_d3: Literal["complex | str | float | Any"] = reveal_type(d3) - t_v4: Literal["tuple[complex, complex] | tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="complex | int | str | float | Any") + reveal_type(d2, expected_text="list[str | float] | list[str] | list[float] | list[Any]") + reveal_type(d3, expected_text="complex | str | float | Any") + reveal_type(value_to_match, expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | List[str] | Tuple[float, ...] | Any") case 3, e1: - t_e1: Literal["complex | float | Any"] = reveal_type(e1) - t_v5: Literal["tuple[Literal[3], complex] | tuple[Literal[3], float] | Any"] = reveal_type(value_to_match) + reveal_type(e1, expected_text="complex | float | Any") + reveal_type(value_to_match, expected_text="tuple[Literal[3], complex] | tuple[Literal[3], float] | Any") case "hi", *f1: - t_f1: Literal["tuple[str, ...] | tuple[Any, ...]"] = reveal_type(f1) - t_v6: Literal["List[str] | Any"] = reveal_type(value_to_match) + reveal_type(f1, expected_text="list[str] | list[Any]") + reveal_type(value_to_match, expected_text="List[str] | Any") case *g1, 3j: - t_g1: Literal["tuple[complex] | tuple[int, str, float] | tuple[Any, ...]"] = reveal_type(g1) - t_v7: Literal["tuple[complex, complex] | tuple[int, str, float, complex] | Any"] = reveal_type(value_to_match) + reveal_type(g1, expected_text="list[complex] | list[int | str | float] | list[Any]") + reveal_type(value_to_match, expected_text="Tuple[complex, complex] | Tuple[int, str, float, complex] | Any") case *h1, "hi": - t_h1: Literal["tuple[str, ...] | tuple[Any, ...]"] = reveal_type(h1) - t_v8: Literal["List[str] | Any"] = reveal_type(value_to_match) + reveal_type(h1, expected_text="list[str] | list[Any]") + reveal_type(value_to_match, expected_text="List[str] | Any") class SupportsLessThan(Protocol): @@ -196,29 +196,116 @@ def __le__(self, __other: Any) -> bool: ... def sort(seq: List[SupportsLessThanT]) -> List[SupportsLessThanT]: match seq: case [] | [_]: - t_v1: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return seq case [x, y] if x <= y: - t_v2: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return seq case [x, y]: - t_v3: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return [y, x] case [x, y, z] if x <= y <= z: - t_v4: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return seq case [x, y, z] if x > y > z: - t_v5: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return [z, y, x] case [p, *rest]: a = sort([x for x in rest if x <= p]) b = sort([x for x in rest if p < x]) - t_v6: Literal["List[SupportsLessThanT@sort]"] = reveal_type(seq) + reveal_type(seq, expected_text="List[SupportsLessThanT@sort]") return a + [p] + b return seq + +def test_exceptions(seq: Union[str, bytes, bytearray]): + match seq: + case [x, y]: + reveal_type(x, expected_text="Never") + reveal_type(y, expected_text="Never") + return seq + +def test_object(seq: object): + match seq: + case (a1, a2) as a3: + reveal_type(a1, expected_text="object") + reveal_type(a2, expected_text="object") + reveal_type(a3, expected_text="Sequence[object]") + reveal_type(seq, expected_text="Sequence[object]") + + case (*b1, b2) as b3: + reveal_type(b1, expected_text="list[object]") + reveal_type(b2, expected_text="object") + reveal_type(b3, expected_text="Sequence[object]") + reveal_type(seq, expected_text="Sequence[object]") + + case (c1, *c2) as c3: + reveal_type(c1, expected_text="object") + reveal_type(c2, expected_text="list[object]") + reveal_type(c3, expected_text="Sequence[object]") + reveal_type(seq, expected_text="Sequence[object]") + + case (d1, *d2, d3) as d4: + reveal_type(d1, expected_text="object") + reveal_type(d2, expected_text="list[object]") + reveal_type(d3, expected_text="object") + reveal_type(d4, expected_text="Sequence[object]") + reveal_type(seq, expected_text="Sequence[object]") + + case (3, *e1) as e2: + reveal_type(e1, expected_text="list[object]") + reveal_type(e2, expected_text="Sequence[object | int]") + reveal_type(seq, expected_text="Sequence[object | int]") + + case ("hi", *f1) as f2: + reveal_type(f1, expected_text="list[object]") + reveal_type(f2, expected_text="Sequence[object | str]") + reveal_type(seq, expected_text="Sequence[object | str]") + + case (*g1, "hi") as g2: + reveal_type(g1, expected_text="list[object]") + reveal_type(g2, expected_text="Sequence[object | str]") + reveal_type(seq, expected_text="Sequence[object | str]") + + case [1, "hi", True] as h1: + reveal_type(h1, expected_text="Sequence[int | str | bool]") + reveal_type(seq, expected_text="Sequence[int | str | bool]") + + case [1, i1] as i2: + reveal_type(i1, expected_text="object") + reveal_type(i2, expected_text="Sequence[object | int]") + reveal_type(seq, expected_text="Sequence[object | int]") + +_T = TypeVar('_T') + +class A(Generic[_T]): + a: _T + +class B: ... +class C: ... + +AAlias = A + +AInt = A[int] + +BOrC = B | C + +def test_illegal_type_alias(m: object): + match m: + case AAlias(a=i): + pass + + # This should generate an error because it raises an + # exception at runtime. + case AInt(a=i): + pass + + # This should generate an error because it raises an + # exception at runtime. + case BOrC(a=i): + pass diff --git a/packages/pyright-internal/src/tests/samples/match3.py b/packages/pyright-internal/src/tests/samples/match3.py index 963ff717774a..9d045a55e870 100644 --- a/packages/pyright-internal/src/tests/samples/match3.py +++ b/packages/pyright-internal/src/tests/samples/match3.py @@ -1,97 +1,124 @@ # This sample tests type checking for match statements (as # described in PEP 634) that contain class patterns. -from typing import Generic, Literal, NamedTuple, TypeVar, Union +from typing import Any, Generic, Literal, NamedTuple, Optional, TypeVar, Union from dataclasses import dataclass, field foo = 3 +T = TypeVar("T") + class ClassA: __match_args__ = ("attr_a", "attr_b") attr_a: int attr_b: str +class ClassB(Generic[T]): + __match_args__ = ("attr_a", "attr_b") + attr_a: T + attr_b: str + +class ClassC: + ... + +class ClassD(ClassC): + ... + + def test_unknown(value_to_match): match value_to_match: case ClassA(attr_a=a2) as a1: - t_a1: Literal["Unknown"] = reveal_type(a1) - t_a2: Literal["Unknown"] = reveal_type(a2) - t_v1: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="ClassA") + reveal_type(a2, expected_text="int") + reveal_type(value_to_match, expected_text="ClassA") # This should generate an error because foo isn't instantiable. case foo() as a3: pass -def test_custom_type(value_to_match: ClassA): +def test_any(value_to_match: Any): + match value_to_match: + case list() as a1: + reveal_type(a1, expected_text="list[Unknown]") + reveal_type(value_to_match, expected_text="list[Unknown]") + + +def test_custom_type(value_to_match: ClassA | ClassB[int] | ClassB[str] | ClassC): match value_to_match: case int() as a1: - t_a1: Literal["Never"] = reveal_type(a1) - t_v1: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case ClassA(attr_a=a4, attr_b=a5) as a3: - t_a3: Literal["ClassA"] = reveal_type(a3) - t_a4: Literal["int"] = reveal_type(a4) - t_a5: Literal["str"] = reveal_type(a5) - t_v3: Literal["ClassA"] = reveal_type(value_to_match) - t_v2: Literal["ClassA"] = reveal_type(value_to_match) + reveal_type(a3, expected_text="ClassA") + reveal_type(a4, expected_text="int") + reveal_type(a5, expected_text="str") + reveal_type(value_to_match, expected_text="ClassA") + reveal_type(value_to_match, expected_text="ClassA") + + case ClassB(a6, a7): + reveal_type(a6, expected_text="int | str") + reveal_type(a7, expected_text="str") + reveal_type(value_to_match, expected_text="ClassB[int] | ClassB[str]") - case ClassA(a6, a7): - t_a6: Literal["int"] = reveal_type(a6) - t_a7: Literal["str"] = reveal_type(a7) - t_v3: Literal["ClassA"] = reveal_type(value_to_match) + case ClassD() as a2: + reveal_type(a2, expected_text="ClassD") + reveal_type(value_to_match, expected_text="ClassD") - case ClassA() as a2: - t_a2: Literal["ClassA"] = reveal_type(a2) - t_v4: Literal["ClassA"] = reveal_type(value_to_match) + case ClassC() as a8: + reveal_type(a8, expected_text="ClassC") + reveal_type(value_to_match, expected_text="ClassC") def test_literal(value_to_match: Literal[3]): match value_to_match: case int() as a1: - t_a1: Literal["Literal[3]"] = reveal_type(a1) - t_v1: Literal["Literal[3]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Literal[3]") + reveal_type(value_to_match, expected_text="Literal[3]") case float() as a2: - t_a2: Literal["Never"] = reveal_type(a2) - t_v2: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a2, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case str() as a3: - t_a3: Literal["Never"] = reveal_type(a3) - t_v3: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a3, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") -TInt = TypeVar("TInt", bound=int) +TFloat = TypeVar("TFloat", bound=float) -def test_bound_typevar(value_to_match: TInt) -> TInt: +def test_bound_typevar(value_to_match: TFloat) -> TFloat: match value_to_match: case int() as a1: - t_a1: Literal["TInt@test_bound_typevar"] = reveal_type(a1) - t_v1: Literal["TInt@test_bound_typevar"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="int*") + reveal_type(value_to_match, expected_text="int*") case float() as a2: - t_a2: Literal["Never"] = reveal_type(a2) - t_v2: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a2, expected_text="float*") + reveal_type(value_to_match, expected_text="float*") case str() as a3: - t_a3: Literal["Never"] = reveal_type(a3) - t_v3: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a3, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") return value_to_match +TInt = TypeVar("TInt", bound=int) + def test_union(value_to_match: Union[TInt, Literal[3], float, str]) -> Union[TInt, Literal[3], float, str]: match value_to_match: case int() as a1: - t_a1: Literal["TInt@test_union | int"] = reveal_type(a1) - t_v1: Literal["TInt@test_union | int"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="int* | int") + reveal_type(value_to_match, expected_text="int* | int") case float() as a2: - t_a2: Literal["float"] = reveal_type(a2) - t_v2: Literal["float"] = reveal_type(value_to_match) + reveal_type(a2, expected_text="float") + reveal_type(value_to_match, expected_text="float") case str() as a3: - t_a3: Literal["str"] = reveal_type(a3) - t_v3: Literal["str"] = reveal_type(value_to_match) + reveal_type(a3, expected_text="str") + reveal_type(value_to_match, expected_text="str") return value_to_match @@ -107,59 +134,59 @@ class Point(Generic[T]): def func1(points: list[Point[float] | Point[complex]]): match points: case [] as a1: - t_a1: Literal["list[Point[float] | Point[complex]]"] = reveal_type(a1) - t_v1: Literal["list[Point[float] | Point[complex]]"] = reveal_type(points) + reveal_type(a1, expected_text="list[Point[float] | Point[complex]]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") case [Point(0, 0) as b1]: - t_b1: Literal["Point[float] | Point[complex]"] = reveal_type(b1) - t_v2: Literal["list[Point[float] | Point[complex]]"] = reveal_type(points) + reveal_type(b1, expected_text="Point[float] | Point[complex]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") case [Point(c1, c2)]: - t_c1: Literal["float | complex"] = reveal_type(c1) - t_c2: Literal["float | complex"] = reveal_type(c2) - t_v3: Literal["list[Point[float] | Point[complex]]"] = reveal_type(points) + reveal_type(c1, expected_text="float | complex") + reveal_type(c2, expected_text="float | complex") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") case [Point(0, d1), Point(0, d2)]: - t_d1: Literal["float | complex"] = reveal_type(d1) - t_d2: Literal["float | complex"] = reveal_type(d2) - t_v4: Literal["list[Point[float] | Point[complex]]"] = reveal_type(points) + reveal_type(d1, expected_text="float | complex") + reveal_type(d2, expected_text="float | complex") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") case _ as e1: - t_e1: Literal["list[Point[float] | Point[complex]]"] = reveal_type(e1) - t_v5: Literal["list[Point[float] | Point[complex]]"] = reveal_type(points) + reveal_type(e1, expected_text="list[Point[float] | Point[complex]]") + reveal_type(points, expected_text="list[Point[float] | Point[complex]]") def func2(subj: object): match subj: case list() as a1: - t_a1: Literal["list[Unknown]"] = reveal_type(a1) - t_v1: Literal["list[Unknown]"] = reveal_type(subj) + reveal_type(a1, expected_text="list[Unknown]") + reveal_type(subj, expected_text="list[Unknown]") def func3(subj: Union[int, str, dict[str, str]]): match subj: case int(x): - t_x1: Literal["int"] = reveal_type(x) - t_v1: Literal["int"] = reveal_type(subj) + reveal_type(x, expected_text="int") + reveal_type(subj, expected_text="int") case str(x): - t_x2: Literal["str"] = reveal_type(x) - t_v2: Literal["str"] = reveal_type(subj) + reveal_type(x, expected_text="str") + reveal_type(subj, expected_text="str") case dict(x): - t_x3: Literal["dict[str, str]"] = reveal_type(x) - t_v3: Literal["dict[str, str]"] = reveal_type(subj) + reveal_type(x, expected_text="dict[str, str]") + reveal_type(subj, expected_text="dict[str, str]") def func4(subj: object): match subj: case int(x): - t_x1: Literal["int"] = reveal_type(x) - t_v1: Literal["int"] = reveal_type(subj) + reveal_type(x, expected_text="int") + reveal_type(subj, expected_text="int") case str(x): - t_x2: Literal["str"] = reveal_type(x) - t_v2: Literal["str"] = reveal_type(subj) + reveal_type(x, expected_text="str") + reveal_type(subj, expected_text="str") # Test the auto-generation of __match_args__ for dataclass. @@ -179,15 +206,15 @@ class Dataclass2: def func5(subj: object): match subj: case Dataclass1(a, b): - t_a1: Literal["int"] = reveal_type(a) - t_b1: Literal["complex"] = reveal_type(b) - t_v1: Literal["Dataclass1"] = reveal_type(subj) + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="complex") + reveal_type(subj, expected_text="Dataclass1") case Dataclass2(a, b, c): - t_a2: Literal["int"] = reveal_type(a) - t_b2: Literal["str"] = reveal_type(b) - t_c2: Literal["float"] = reveal_type(c) - t_v2: Literal["Dataclass2"] = reveal_type(subj) + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + reveal_type(c, expected_text="float") + reveal_type(subj, expected_text="Dataclass2") # Test the auto-generation of __match_args__ for named tuples. @@ -197,19 +224,107 @@ def func5(subj: object): def func6(subj: object): match subj: case NT1(a, b): - t_a1: Literal["int"] = reveal_type(a) - t_b1: Literal["complex"] = reveal_type(b) - t_v1: Literal["NT1"] = reveal_type(subj) + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="complex") + reveal_type(subj, expected_text="NT1") case NT2(a, b, c): - t_a2: Literal["int"] = reveal_type(a) - t_b2: Literal["str"] = reveal_type(b) - t_c2: Literal["float"] = reveal_type(c) - t_v2: Literal["NT2"] = reveal_type(subj) + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + reveal_type(c, expected_text="float") + reveal_type(subj, expected_text="NT2") def func7(subj: object): match subj: case complex(real=a, imag=b): - t_a1: Literal["float"] = reveal_type(a) - t_b1: Literal["float"] = reveal_type(b) + reveal_type(a, expected_text="float") + reveal_type(b, expected_text="float") + +T2 = TypeVar("T2") + + +class Parent(Generic[T]): + ... + + +class Child1(Parent[T]): + ... + + +class Child2(Parent[T], Generic[T, T2]): + ... + + +def func8(subj: Parent[int]): + match subj: + case Child1() as a1: + reveal_type(a1, expected_text='Child1[int]') + reveal_type(subj, expected_text='Child1[int]') + + case Child2() as b1: + reveal_type(b1, expected_text='Child2[int, Unknown]') + reveal_type(subj, expected_text='Child2[int, Unknown]') + +T3 = TypeVar("T3") + +def func9(v: T3) -> Optional[T3]: + match v: + case str(): + reveal_type(v, expected_text='str*') + return v + + case _: + return None + + +T4 = TypeVar("T4", int, str) + +def func10(v: T4) -> Optional[T4]: + match v: + case str(): + reveal_type(v, expected_text='str*') + return v + + case int(): + reveal_type(v, expected_text='int*') + return v + + case list(): + reveal_type(v, expected_text='Never') + return v + + case _: + return None + +def func11(subj: Any): + match subj: + case Child1() as a1: + reveal_type(a1, expected_text='Child1[Unknown]') + reveal_type(subj, expected_text='Child1[Unknown]') + + case Child2() as b1: + reveal_type(b1, expected_text='Child2[Unknown, Unknown]') + reveal_type(subj, expected_text='Child2[Unknown, Unknown]') + + +def func12(subj: int, flt_cls: type[float], union_val: float | int): + match subj: + # This should generate an error because int doesn't accept two arguments. + case int(1, 2): + pass + + match subj: + # This should generate an error because float doesn't accept keyword arguments. + case float(x=1): + pass + + match subj: + # This should generate an error because flt_cls is not a class. + case flt_cls(): + pass + + # This should generate an error because it is a union. + case union_val(): + pass + diff --git a/packages/pyright-internal/src/tests/samples/match4.py b/packages/pyright-internal/src/tests/samples/match4.py index 9bda1717e391..ba32474a7bf2 100644 --- a/packages/pyright-internal/src/tests/samples/match4.py +++ b/packages/pyright-internal/src/tests/samples/match4.py @@ -1,18 +1,18 @@ # This sample tests type checking for match statements (as # described in PEP 634) that contain value patterns. -from enum import Enum -from typing import Literal, Tuple, TypeVar, Union +from enum import Enum, auto +from typing import Tuple, TypeVar, Union from http import HTTPStatus def handle_reply(reply: Tuple[HTTPStatus, str] | Tuple[HTTPStatus]): match reply: case (HTTPStatus.OK as a1, a2): - t_a1: Literal["Literal[HTTPStatus.OK]"] = reveal_type(a1) - t_a2: Literal["str"] = reveal_type(a2) + reveal_type(a1, expected_text="Literal[HTTPStatus.OK]") + reveal_type(a2, expected_text="str") case (HTTPStatus.NOT_FOUND as d1, ): - t_d1: Literal["Literal[HTTPStatus.NOT_FOUND]"] = reveal_type(d1) + reveal_type(d1, expected_text="Literal[HTTPStatus.NOT_FOUND]") class MyEnum(Enum): @@ -27,22 +27,22 @@ def __eq__(self, object: "MyClass") -> bool: ... def test_unknown(value_to_match): match value_to_match: case MyEnum.V1 as a1: - t_a1: Literal["Unknown"] = reveal_type(a1) - t_v1: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Unknown") + reveal_type(value_to_match, expected_text="Unknown") def test_enum(value_to_match: MyEnum): match value_to_match: case MyEnum.V1 as a1: - t_a1: Literal["Literal[MyEnum.V1]"] = reveal_type(a1) - t_v1: Literal["Literal[MyEnum.V1]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Literal[MyEnum.V1]") + reveal_type(value_to_match, expected_text="Literal[MyEnum.V1]") def test_class_var(value_to_match: str): match value_to_match: case MyClass.class_var_1 as a1: - t_a1: Literal["Never"] = reveal_type(a1) - t_v1: Literal["Never"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") TInt = TypeVar("TInt", bound=MyEnum) @@ -50,8 +50,51 @@ def test_class_var(value_to_match: str): def test_union(value_to_match: Union[TInt, MyEnum]) -> Union[TInt, MyEnum]: match value_to_match: case MyEnum.V1 as a1: - t_a1: Literal["Literal[MyEnum.V1]"] = reveal_type(a1) - t_v1: Literal["Literal[MyEnum.V1]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Literal[MyEnum.V1]") + reveal_type(value_to_match, expected_text="Literal[MyEnum.V1]") return value_to_match + +class Medal(Enum): + gold = 1 + silver = 2 + bronze = 3 + +class Color(Enum): + red = 1 + blue = 2 + green = 3 + + +def test_enum_narrowing(m: Union[Medal, Color, int]): + match m: + case Medal.gold as a1: + reveal_type(a1, expected_text='Literal[Medal.gold]') + reveal_type(m, expected_text='Literal[Medal.gold]') + + case Medal.silver as b1: + reveal_type(b1, expected_text='Literal[Medal.silver]') + reveal_type(m, expected_text='Literal[Medal.silver]') + + case Color() as c1: + reveal_type(c1, expected_text='Color') + reveal_type(m, expected_text='Color') + + case d1: + reveal_type(d1, expected_text='int | Literal[Medal.bronze]') + reveal_type(m, expected_text='int | Literal[Medal.bronze]') + + +class Foo(Enum): + bar = auto() + + def __str__(self) -> str: + match self: + case Foo.bar: + return "bar" + + case x: + reveal_type(x, expected_text="Never") + + diff --git a/packages/pyright-internal/src/tests/samples/match5.py b/packages/pyright-internal/src/tests/samples/match5.py index f47c2812b28f..daee8caf3ffc 100644 --- a/packages/pyright-internal/src/tests/samples/match5.py +++ b/packages/pyright-internal/src/tests/samples/match5.py @@ -6,26 +6,26 @@ def test_unknown(value_to_match): match value_to_match: case {"hello": a1, **a2}: - t_a1: Literal["Unknown"] = reveal_type(a1) - t_a2: Literal["dict[Unknown, Unknown]"] = reveal_type(a2) - t_v1: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Unknown") + reveal_type(a2, expected_text="dict[Unknown, Unknown]") + reveal_type(value_to_match, expected_text="Unknown") def test_dict(value_to_match: Dict[str | int, str | int]): match value_to_match: case {1: a1}: - t_a1: Literal["str | int"] = reveal_type(a1) - t_v1: Literal["Dict[str | int, str | int]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="str | int") + reveal_type(value_to_match, expected_text="Dict[str | int, str | int]") case {"hi": b1, "hi2": b2, **b3}: - t_b1: Literal["str | int"] = reveal_type(b1) - t_b2: Literal["str | int"] = reveal_type(b2) - t_b3: Literal["dict[str | int, str | int]"] = reveal_type(b3) - t_v2: Literal["Dict[str | int, str | int]"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="str | int") + reveal_type(b2, expected_text="str | int") + reveal_type(b3, expected_text="dict[str | int, str | int]") + reveal_type(value_to_match, expected_text="Dict[str | int, str | int]") case {3j: c1}: - t_c1: Literal["Never"] = reveal_type(c1) - t_v3: Literal["Never"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") class Movie(TypedDict): @@ -39,32 +39,32 @@ class MovieInfo: def test_typed_dict(value_to_match: Movie): match value_to_match: case {"title": a1, "release_year": a2, **a3}: - t_a1: Literal["str"] = reveal_type(a1) - t_a2: Literal["int"] = reveal_type(a2) - t_a3: Literal["dict[str, Unknown]"] = reveal_type(a3) - t_v1: Literal["Movie"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="str") + reveal_type(a2, expected_text="int") + reveal_type(a3, expected_text="dict[str, Unknown]") + reveal_type(value_to_match, expected_text="Movie") case {3: b1, "title": b2}: - t_b1: Literal["Never"] = reveal_type(b1) - t_b2: Literal["Never"] = reveal_type(b2) - t_v2: Literal["Never"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="Never") + reveal_type(b2, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case {"director": c1}: - t_c1: Literal["Never"] = reveal_type(c1) - t_v2: Literal["Never"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="Never") + reveal_type(value_to_match, expected_text="Never") case {MovieInfo.field_of_interest: d1}: - t_d1: Literal["int | float"] = reveal_type(d1) - t_v1: Literal["Movie"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="int | float") + reveal_type(value_to_match, expected_text="Movie") def test_union(value_to_match: Dict[str | int, str | int] | Movie | str): match value_to_match: case {3: a1}: - t_a1: Literal["str | int"] = reveal_type(a1) - t_v1: Literal["Dict[str | int, str | int]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="str | int") + reveal_type(value_to_match, expected_text="Dict[str | int, str | int]") case {"gross_earnings": b1}: - t_b1: Literal["str | int | float"] = reveal_type(b1) - t_v2: Literal["Dict[str | int, str | int] | Movie"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="str | int | float") + reveal_type(value_to_match, expected_text="Dict[str | int, str | int] | Movie") diff --git a/packages/pyright-internal/src/tests/samples/match6.py b/packages/pyright-internal/src/tests/samples/match6.py index 2a8ee3a04ac9..c6f69aee5967 100644 --- a/packages/pyright-internal/src/tests/samples/match6.py +++ b/packages/pyright-internal/src/tests/samples/match6.py @@ -1,57 +1,64 @@ # This sample tests type checking for match statements (as # described in PEP 634) that contain literal patterns. -from typing import Literal def test_unknown(value_to_match): match value_to_match: case 3 as a1, -3 as a2: - t_a1: Literal["Literal[3]"] = reveal_type(a1) - t_a2: Literal["Literal[-3]"] = reveal_type(a2) - t_v1: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="Literal[3]") + reveal_type(a2, expected_text="Literal[-3]") + reveal_type(value_to_match, expected_text="Unknown") case 3j as b1, -3 + 5j as b2: - t_b1: Literal["complex"] = reveal_type(b1) - t_b2: Literal["complex"] = reveal_type(b2) - t_v2: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="complex") + reveal_type(b2, expected_text="complex") + reveal_type(value_to_match, expected_text="Unknown") case "hi" as c1, None as c2: - t_c1: Literal["Literal['hi']"] = reveal_type(c1) - t_c2: Literal["None"] = reveal_type(c2) - t_v3: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="Literal['hi']") + reveal_type(c2, expected_text="None") + reveal_type(value_to_match, expected_text="Unknown") case True as d1, False as d2: - t_d1: Literal["Literal[True]"] = reveal_type(d1) - t_d2: Literal["Literal[False]"] = reveal_type(d2) - t_v4: Literal["Unknown"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="Literal[True]") + reveal_type(d2, expected_text="Literal[False]") + reveal_type(value_to_match, expected_text="Unknown") def test_tuple(value_to_match: tuple[int | float | str | complex, ...]): match value_to_match: case (3, -3) as a1: - t_a1: Literal["tuple[Literal[3], Literal[-3]]"] = reveal_type(a1) - t_v1: Literal["tuple[Literal[3], Literal[-3]]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="tuple[Literal[3], Literal[-3]]") + reveal_type(value_to_match, expected_text="tuple[Literal[3], Literal[-3]]") case (3j , -3 + 5j) as b1: - t_b1: Literal["tuple[complex, complex]"] = reveal_type(b1) - t_v2: Literal["tuple[complex, complex]"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="tuple[complex, complex]") + reveal_type(value_to_match, expected_text="tuple[complex, complex]") def test_union(value_to_match: int | float | str | complex | bool | None): match value_to_match: case (3 | -3j) as a1: - t_a1: Literal["complex | Literal[3]"] = reveal_type(a1) - t_v1: Literal["complex | Literal[3]"] = reveal_type(value_to_match) + reveal_type(a1, expected_text="complex | Literal[3]") + reveal_type(value_to_match, expected_text="complex | Literal[3]") case (True | False | 3.4 | -3 + 3j | None) as b1: - t_b1: Literal["float | complex | bool | None"] = reveal_type(b1) - t_v2: Literal["float | complex | bool | None"] = reveal_type(value_to_match) + reveal_type(b1, expected_text="float | complex | bool | None") + reveal_type(value_to_match, expected_text="float | complex | bool | None") case ("hi" | 3.4) as c1: - t_c1: Literal["float | Literal['hi']"] = reveal_type(c1) - t_v3: Literal["float | Literal['hi']"] = reveal_type(value_to_match) + reveal_type(c1, expected_text="float | Literal['hi']") + reveal_type(value_to_match, expected_text="float | Literal['hi']") case ((True | "True") as d1) | ((False | "False") as d1): - t_d1: Literal["bool | Literal['True', 'False']"] = reveal_type(d1) - t_v4: Literal["bool | Literal['True', 'False']"] = reveal_type(value_to_match) + reveal_type(d1, expected_text="bool | Literal['True', 'False']") + reveal_type(value_to_match, expected_text="bool | Literal['True', 'False']") +def test_none(value_to_match: int | None): + match value_to_match: + case None as a1: + reveal_type(a1, expected_text="None") + + case a2: + reveal_type(a2, expected_text="int") + diff --git a/packages/pyright-internal/src/tests/samples/match7.py b/packages/pyright-internal/src/tests/samples/match7.py index d499ba46af9a..7326f36b47b6 100644 --- a/packages/pyright-internal/src/tests/samples/match7.py +++ b/packages/pyright-internal/src/tests/samples/match7.py @@ -1,32 +1,30 @@ # This sample tests type narrowing of subject expressions for # match statements. -from typing import Literal - def func1(subj: int | dict[str, str] | tuple[int] | str, cond: bool): match subj: case (3 | "hi"): - t_v1: Literal["Literal[3, 'hi']"] = reveal_type(subj) + reveal_type(subj, expected_text="Literal[3, 'hi']") return case int(y) if cond: - t_v2: Literal["int"] = reveal_type(subj) + reveal_type(subj, expected_text="int") return case int(y): - t_v3: Literal["int"] = reveal_type(subj) + reveal_type(subj, expected_text="int") return case int(): - t_v4: Literal["Never"] = reveal_type(subj) + reveal_type(subj, expected_text="Never") return case str(z): - t_v5: Literal["str"] = reveal_type(subj) + reveal_type(subj, expected_text="str") return - t_v6: Literal["dict[str, str] | tuple[int]"] = reveal_type(subj) + reveal_type(subj, expected_text="dict[str, str] | tuple[int]") return subj @@ -37,7 +35,7 @@ def func2(subj: int | str) -> str: case int(): return "int" - t_v1: Literal['str'] = reveal_type(subj) + reveal_type(subj, expected_text='str') # This should generate an error because there is the potential @@ -50,7 +48,7 @@ def func3(subj: int | str) -> str: case int() if subj < 0: return "int" - t_v1: Literal['int | str'] = reveal_type(subj) + reveal_type(subj, expected_text='int | str') def func4(subj: int | str) -> str: diff --git a/packages/pyright-internal/src/tests/samples/match8.py b/packages/pyright-internal/src/tests/samples/match8.py new file mode 100644 index 000000000000..3a2bc641796d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/match8.py @@ -0,0 +1,19 @@ +# This sample tests keyword-only class pattern matching for +# dataclasses. + +from dataclasses import dataclass, field + +@dataclass +class Point: + optional: int | None = field(default=None, kw_only=True) + x: int + y: int + + +obj = Point(1, 2) +match obj: + case Point(x, y, optional=opt): + reveal_type(x, expected_text="int") + reveal_type(y, expected_text="int") + reveal_type(opt, expected_text="int | None") + distance = (x ** 2 + y ** 2) ** 0.5 \ No newline at end of file diff --git a/packages/pyright-internal/src/tests/samples/match9.py b/packages/pyright-internal/src/tests/samples/match9.py new file mode 100644 index 000000000000..ed3d2df9ddcc --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/match9.py @@ -0,0 +1,53 @@ +# This sample tests class-based pattern matching when the class is +# marked final and can be discriminated based on the argument patterns. + +from typing import final + + +class A: + title: str + +class B: + name: str + +class C: + name: str + +def func1(r: A | B | C): + match r: + case object(title=_): + reveal_type(r, expected_text='A | B | C') + + case object(name=_): + reveal_type(r, expected_text='A | B | C') + + case _: + reveal_type(r, expected_text='A | B | C') + +@final +class AFinal: + title: str + +@final +class BFinal: + name: str + +@final +class CFinal: + name: str + +@final +class DFinal: + nothing: str + + +def func2(r: AFinal | BFinal | CFinal | DFinal): + match r: + case object(title=_): + reveal_type(r, expected_text='AFinal') + + case object(name=_): + reveal_type(r, expected_text='BFinal | CFinal') + + case _: + reveal_type(r, expected_text='DFinal') diff --git a/packages/pyright-internal/src/tests/samples/maxParseDepth1.py b/packages/pyright-internal/src/tests/samples/maxParseDepth1.py new file mode 100644 index 000000000000..14e320586653 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/maxParseDepth1.py @@ -0,0 +1,499 @@ +# This sample tests the parser's ability to detect parse trees that are too deep +# and report an error rather than risking a crash in the binder or type evaluator. + +x = 3 + +for _ in range(2): + x = ( + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + -x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + + x + ) diff --git a/packages/pyright-internal/src/tests/samples/maxParseDepth2.py b/packages/pyright-internal/src/tests/samples/maxParseDepth2.py new file mode 100644 index 000000000000..202ca7d28525 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/maxParseDepth2.py @@ -0,0 +1,16 @@ + +from typing import Any + + +def func(x: dict[int, Any], y: Any): + x[0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0][0] + + + y.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x.x + + + y()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()()() + + + y.x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]().x[0]() + diff --git a/packages/pyright-internal/src/tests/samples/memberAccess1.py b/packages/pyright-internal/src/tests/samples/memberAccess1.py index 7fc80aa7f364..4c0e13c6076c 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess1.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess1.py @@ -1,7 +1,7 @@ # This sample validates that member access magic functions # like __get__ and __set__ are handled correctly. -from typing import Any, Generic, Literal, Type, TypeVar, overload +from typing import Any, Generic, Type, TypeVar, overload from functools import cached_property _T = TypeVar("_T") @@ -25,8 +25,8 @@ def func1(cls): a: Column[str] = cls.bar -t1: Literal["Column[str]"] = reveal_type(Foo.bar) -t2: Literal["str"] = reveal_type(Foo().bar) +reveal_type(Foo.bar, expected_text="Column[str]") +reveal_type(Foo().bar, expected_text="str") class Foo2: @@ -48,4 +48,4 @@ class SomeClass: instance: Factory -t3: Literal["SomeClass"] = reveal_type(SomeClass.instance) +reveal_type(SomeClass.instance, expected_text="SomeClass") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess10.py b/packages/pyright-internal/src/tests/samples/memberAccess10.py index 780c6556b5b3..85e1a4457c9f 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess10.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess10.py @@ -2,7 +2,7 @@ # protocol (i.e. a `__get__` method), and a member is accessed through # the class. -from typing import Any, Literal +from typing import Any class _IntDescriptorMeta(type): @@ -21,8 +21,8 @@ class X: number_cls = IntDescriptorClass -t1: Literal["int"] = reveal_type(X.number_cls) -t2: Literal["int"] = reveal_type(X().number_cls) +reveal_type(X.number_cls, expected_text="int") +reveal_type(X().number_cls, expected_text="int") # This should generate an error X.number_cls = "hi" diff --git a/packages/pyright-internal/src/tests/samples/memberAccess12.py b/packages/pyright-internal/src/tests/samples/memberAccess12.py index eaddceec022f..c203be69719f 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess12.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess12.py @@ -1,7 +1,7 @@ # This sample tests a member access when the metaclass implements a descriptor # protocol. -from typing import Any, Literal, Optional, Union, Type, TypeVar, overload +from typing import Any, Optional, Union, Type, TypeVar, overload T = TypeVar("T") @@ -32,5 +32,5 @@ class B: a = A -t1: Literal["Type[A]"] = reveal_type(B.a) -t2: Literal["A"] = reveal_type(B().a) +reveal_type(B.a, expected_text="Type[A]") +reveal_type(B().a, expected_text="A") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess13.py b/packages/pyright-internal/src/tests/samples/memberAccess13.py index 6a9eadc86261..098f3ff0a73b 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess13.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess13.py @@ -1,7 +1,7 @@ # This sample tests a member access when the member is a class # that inherits from Any. -from typing import Literal, Type +from typing import Type from unittest.mock import Mock @@ -9,9 +9,9 @@ class MockProducer: produce: Type[Mock] = Mock -t1: Literal["Type[Mock]"] = reveal_type(MockProducer.produce) -t2: Literal["Type[Mock]"] = reveal_type(MockProducer().produce) +reveal_type(MockProducer.produce, expected_text="Type[Mock]") +reveal_type(MockProducer().produce, expected_text="Type[Mock]") -t3: Literal["Mock"] = reveal_type(MockProducer.produce()) -t3: Literal["Mock"] = reveal_type(MockProducer().produce()) +reveal_type(MockProducer.produce(), expected_text="Mock") +reveal_type(MockProducer().produce(), expected_text="Mock") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess14.py b/packages/pyright-internal/src/tests/samples/memberAccess14.py new file mode 100644 index 000000000000..1b8205e85352 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/memberAccess14.py @@ -0,0 +1,63 @@ +# This sample tests the use of a generic descriptor class that +# is parameterized based on the type of the object that instantiates +# the descriptor. + + +from typing import Any, Callable, Generic, TypeVar, Union, overload + +T = TypeVar("T") +T_contra = TypeVar("T_contra", contravariant=True) +V_co = TypeVar("V_co", covariant=True) +CachedSlotPropertyT = TypeVar( + "CachedSlotPropertyT", bound="CachedSlotProperty[Any, Any]" +) + + +class CachedSlotProperty(Generic[T_contra, V_co]): + def __init__(self, name: str, function: Callable[[T_contra], V_co]) -> None: + ... + + @overload + def __get__( + self: CachedSlotPropertyT, instance: None, owner: type[T_contra] + ) -> CachedSlotPropertyT: + ... + + @overload + def __get__(self, instance: T_contra, owner: Any) -> V_co: + ... + + def __get__( + self: CachedSlotPropertyT, instance: Union[T_contra, None], owner: Any + ) -> Union[CachedSlotPropertyT, V_co]: + ... + + +def cached_slot_property( + name: str, +) -> Callable[[Callable[[T_contra], V_co]], CachedSlotProperty[T_contra, V_co]]: + ... + + +class C(Generic[T]): + def __init__(self, data: T) -> None: + ... + + @cached_slot_property("_prop") + def prop(self) -> int: + ... + + +class D(C[float]): + ... + + +reveal_type(C.prop, expected_text="CachedSlotProperty[Self@C[T@C], int]") +reveal_type(D.prop, expected_text="CachedSlotProperty[D, int]") + + +c = C("") +reveal_type(c.prop, expected_text="int") + +d = D(1) +reveal_type(d.prop, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess15.py b/packages/pyright-internal/src/tests/samples/memberAccess15.py new file mode 100644 index 000000000000..a69cc4b34eed --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/memberAccess15.py @@ -0,0 +1,20 @@ +# This sample tests the case where an accessed member is a +# method that has a "self" or "cls" parameter with no explicit +# type annotation and an inferred type that is based on this value. + + +class A: + async def get(self): + return self + + +class B(A): + pass + + +async def run(): + val1 = await A().get() + reveal_type(val1, expected_text="A") + + val2 = await B().get() + reveal_type(val2, expected_text="B") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess16.py b/packages/pyright-internal/src/tests/samples/memberAccess16.py new file mode 100644 index 000000000000..2330867ea5d8 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/memberAccess16.py @@ -0,0 +1,24 @@ +# This sample tests the case where a member is accessed from a "type" +# instance or a Type[T]. + +# pyright: strict + +from typing import Type, TypeVar + +T = TypeVar("T") + + +def func1(t: Type[T]) -> Type[T]: + def __repr__(self: T) -> str: + ... + + t.__repr__ = __repr__ + return t + + +def func2(t: type) -> type: + def __repr__(self: object) -> str: + ... + + t.__repr__ = __repr__ + return t diff --git a/packages/pyright-internal/src/tests/samples/memberAccess17.py b/packages/pyright-internal/src/tests/samples/memberAccess17.py new file mode 100644 index 000000000000..6767e5bb507d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/memberAccess17.py @@ -0,0 +1,31 @@ +# This sample tests the case where a __getattr__ method override +# differentiates based on the name of the accessed member. + +from typing import Any, overload, Literal + + +class Obj: + @overload + def __getattr__(self, name: Literal["foo"]) -> int: + ... + + @overload + def __getattr__(self, name: Literal["bar"]) -> str: + ... + + def __getattr__(self, name: str) -> Any: + if name == "foo": + return 1 + return "1" + + +obj = Obj() +b1 = obj.foo +reveal_type(b1, expected_text="int") +b2 = getattr(obj, "foo") +reveal_type(b2, expected_text="Any") + +c1 = obj.bar +reveal_type(c1, expected_text="str") +c2 = getattr(obj, "bar") +reveal_type(c2, expected_text="Any") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess18.py b/packages/pyright-internal/src/tests/samples/memberAccess18.py new file mode 100644 index 000000000000..c9d5849d59cc --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/memberAccess18.py @@ -0,0 +1,31 @@ +# This sample tests that a descriptor returned by a __getattr__ method +# is not applied as part of a member access expression evaluation. + +from typing import Any, Generic, TypeVar + +_T = TypeVar("_T") + + +class A: + pass + + +class Descriptor: + def __get__(self, instance: object, owner: Any) -> A: + return A() + + +class CollectionThing(Generic[_T]): + thing: _T + + def __getitem__(self, key: str) -> _T: + return self.thing + + def __getattr__(self, key: str) -> _T: + return self.thing + + +c1: CollectionThing[Descriptor] = CollectionThing() + +reveal_type(c1["key"], expected_text="Descriptor") +reveal_type(c1.key, expected_text="Descriptor") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess6.py b/packages/pyright-internal/src/tests/samples/memberAccess6.py index 580b9610e281..6715e2010401 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess6.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess6.py @@ -25,7 +25,9 @@ def __get__(self: "Column[_T]", instance: None, type: Any) -> "Column[_T]": def __get__(self: "Column[_T]", instance: FooBase, type: Any) -> _T: ... - def __get__(self, instance: Optional[FooBase], type: Any) -> Optional[_T]: + def __get__( + self, instance: Optional[FooBase], type: Any + ) -> Optional[_T] | "Column[_T]": ... diff --git a/packages/pyright-internal/src/tests/samples/memberAccess7.py b/packages/pyright-internal/src/tests/samples/memberAccess7.py index d09c1b73896b..7fedfe7011da 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess7.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess7.py @@ -1,7 +1,7 @@ # This sample tests the handling of a __getattr__ method that returns # a callable. Such a method should not be bound. -from typing import Callable, Literal, TypeVar +from typing import Callable, TypeVar class ClassA: @@ -30,7 +30,7 @@ class ClassB(metaclass=MetaClass): v1 = ClassB.some_function(3) -t_v1: Literal["int"] = reveal_type(v1) +reveal_type(v1, expected_text="int") v2 = ClassB.some_function("hi") -t_v2: Literal["str"] = reveal_type(v2) +reveal_type(v2, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/memberAccess8.py b/packages/pyright-internal/src/tests/samples/memberAccess8.py index 65c932585ee0..fb18905e752d 100644 --- a/packages/pyright-internal/src/tests/samples/memberAccess8.py +++ b/packages/pyright-internal/src/tests/samples/memberAccess8.py @@ -1,6 +1,6 @@ -# This sample tests the use of a generic property class. +# This sample tests the use of a generic descriptor class. -from typing import Any, Callable, Generic, Literal, Optional, Type, TypeVar, overload +from typing import Any, Callable, Generic, Optional, Type, TypeVar, overload _T = TypeVar("_T") @@ -26,14 +26,14 @@ class Foo: foo = Foo() v1 = foo.bar -t1: Literal["str"] = reveal_type(v1) +reveal_type(v1, expected_text="str") foo.bar = "" del foo.bar v2 = foo.baz -t2: Literal["list[int]"] = reveal_type(v2) +reveal_type(v2, expected_text="list[int]") foo.baz = [1] del foo.baz @@ -71,5 +71,5 @@ def foo(self) -> str: b = B() -t_b1: Literal["str"] = reveal_type(b.foo) -t_b2: Literal["Minimal[B, str]"] = reveal_type(B.foo) +reveal_type(b.foo, expected_text="str") +reveal_type(B.foo, expected_text="Minimal[B, str]") diff --git a/packages/pyright-internal/src/tests/samples/metaclass4.py b/packages/pyright-internal/src/tests/samples/metaclass4.py index ca9e1ee2d2d4..e3694dfe298e 100644 --- a/packages/pyright-internal/src/tests/samples/metaclass4.py +++ b/packages/pyright-internal/src/tests/samples/metaclass4.py @@ -8,7 +8,9 @@ def do_something(self, p1: str, p2: int): pass -MyCustomClass = MyMeta("MyCustomClass") +MyCustomClass = MyMeta("MyCustomClass", (object,), {}) + +reveal_type(MyCustomClass, expected_text="Type[MyCustomClass]") class DerivedCustomClass(MyCustomClass): diff --git a/packages/pyright-internal/src/tests/samples/metaclass5.py b/packages/pyright-internal/src/tests/samples/metaclass5.py index 64711dce0ad0..995c28a058b6 100644 --- a/packages/pyright-internal/src/tests/samples/metaclass5.py +++ b/packages/pyright-internal/src/tests/samples/metaclass5.py @@ -1,7 +1,7 @@ # This sample tests the handling of metaclass magic methods for # binary operators. -from typing import Literal, Type +from typing import Type class MetaFoo(type): @@ -17,17 +17,17 @@ class Foo(metaclass=MetaFoo): def func1(a: Foo): - t1: Literal["Type[Foo]"] = reveal_type(type(a)) - t2: Literal["Type[str]"] = reveal_type(type("string1")) + reveal_type(type(a), expected_text="Type[Foo]") + reveal_type(type("string1"), expected_text="Type[str]") - t3: Literal["bool"] = reveal_type(type(a) == type("hi")) - t4: Literal["bool"] = reveal_type(type("hi") == type("hi")) - t5: Literal["bool"] = reveal_type(str != str) - t6: Literal["str"] = reveal_type(Foo == type(a)) - t7: Literal["bool"] = reveal_type(Foo != type(a)) - t8: Literal["str"] = reveal_type(type(a) == Foo) + reveal_type(type(a) == type("hi"), expected_text="bool") + reveal_type(type("hi") == type("hi"), expected_text="bool") + reveal_type(str != str, expected_text="bool") + reveal_type(Foo == type(a), expected_text="str") + reveal_type(Foo != type(a), expected_text="bool") + reveal_type(type(a) == Foo, expected_text="str") # This should generate an error str + str - t9: Literal["int"] = reveal_type(Foo + Foo) + reveal_type(Foo + Foo, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/metaclass6.py b/packages/pyright-internal/src/tests/samples/metaclass6.py index 902cc0906682..05fb447096e9 100644 --- a/packages/pyright-internal/src/tests/samples/metaclass6.py +++ b/packages/pyright-internal/src/tests/samples/metaclass6.py @@ -2,7 +2,6 @@ # protocols if a class type is passed. from enum import Enum -from typing import Literal class Foo(str, Enum): @@ -10,7 +9,7 @@ class Foo(str, Enum): for member in Foo: - t1: Literal["Foo"] = reveal_type(member) + reveal_type(member, expected_text="Foo") foo_members = list(Foo) -t2: Literal["list[Foo]"] = reveal_type(foo_members) +reveal_type(foo_members, expected_text="list[Foo]") diff --git a/packages/pyright-internal/src/tests/samples/metaclass7.py b/packages/pyright-internal/src/tests/samples/metaclass7.py index a05545029f38..e3f7489ff397 100644 --- a/packages/pyright-internal/src/tests/samples/metaclass7.py +++ b/packages/pyright-internal/src/tests/samples/metaclass7.py @@ -2,8 +2,6 @@ # __call__ method, thus overriding the __new__ method on classes # that are created from it. -from typing import Literal - class FactoryMetaClass1(type): def __call__(cls, **kwargs): @@ -20,7 +18,7 @@ class Factory1(BaseFactory1, metaclass=FactoryMetaClass1): v1 = Factory1() -t_v1: Literal["Factory1"] = reveal_type(v1) +reveal_type(v1, expected_text="Factory1") class FactoryMetaClass2(type): @@ -37,4 +35,4 @@ class Factory2(BaseFactory2, metaclass=FactoryMetaClass2): v2 = Factory2() -t_v2: Literal["NoReturn"] = reveal_type(v2) +reveal_type(v2, expected_text="NoReturn") diff --git a/packages/pyright-internal/src/tests/samples/metaclass8.py b/packages/pyright-internal/src/tests/samples/metaclass8.py new file mode 100644 index 000000000000..8c6cf99ba37c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/metaclass8.py @@ -0,0 +1,20 @@ +# This sample tests the case where a generic class is used +# for a metaclass. + +from typing import Any, Generic, TypeVar + + +T = TypeVar("T") + + +class A(type, Generic[T]): + ... + + +# This should generate an error because generic metaclasses are not allowed. +class B(Generic[T], metaclass=A[T]): + ... + + +class C(metaclass=A[Any]): + ... diff --git a/packages/pyright-internal/src/tests/samples/metaclass9.py b/packages/pyright-internal/src/tests/samples/metaclass9.py new file mode 100644 index 000000000000..7fdf2b7445f2 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/metaclass9.py @@ -0,0 +1,77 @@ +# This sample tests the handling of metaclass keyword arguments. + +from typing import Tuple, Dict, Any, Type +from typing_extensions import Self + + +class Meta1(type): + def __new__( + cls: Type[Self], + cls_name: str, + bases: Tuple[type, ...], + attrs: Dict[str, Any], + *, + param1: int, + param2: str, + param3: str = "", + ) -> Self: + ... + + +class Class1_1(metaclass=Meta1, param1=1, param2="", param3=""): + ... + + +class Class1_2(metaclass=Meta1, param2="", param1=1): + ... + + +# This should generate an error because param1 is the wrong type. +class Class1_3(metaclass=Meta1, param1="", param2=""): + ... + + +# This should generate an error because param1 and param2 are missing. +class Class1_4(metaclass=Meta1): + ... + + +# This should generate an error because param4 doesn't exist. +class Class1_5(metaclass=Meta1, param2="", param1=1, param4=3): + ... + + +class Meta2(type): + def __new__( + cls: Type[Self], + cls_name: str, + bases: Tuple[type, ...], + attrs: Dict[str, Any], + *, + param1: int, + **kwargs: str, + ) -> Self: + ... + + +class Class2_1(metaclass=Meta2, param1=1, param2="", param3=""): + ... + + +class Class2_2(metaclass=Meta2, param2="", param1=1, param20=""): + ... + + +# This should generate an error because param1 is the wrong type. +class Class2_3(metaclass=Meta2, param1="", param2=""): + ... + + +# This should generate an error because param1 is missing. +class Class2_4(metaclass=Meta2): + ... + + +# This should generate an error because param4 is the wrong type. +class Class2_5(metaclass=Meta2, param2="", param1=1, param4=3): + ... diff --git a/packages/pyright-internal/src/tests/samples/missingSuper1.py b/packages/pyright-internal/src/tests/samples/missingSuper1.py new file mode 100644 index 000000000000..cd45717d329c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/missingSuper1.py @@ -0,0 +1,73 @@ +# This sample tests the reportMissingSuperCall diagnostic check. + +from typing import final + + +class ParentA: + pass + + +class ParentB: + # This should generate an error because it's missing a super().__init__ call. + def __init__(self): + pass + + +class ParentBPrime(ParentB): + pass + + +class ParentC: + pass + + +@final +class ParentD: + def __init__(self): + pass + + def __init_subclass__(cls) -> None: + pass + + +class ChildA(ParentA, ParentB): + # This should generate an error. + def __init__(self): + pass + + # This should generate an error. + def __init_subclass__(cls) -> None: + pass + + +class ChildB(ParentA, ParentB): + def __init__(self): + super().__init__() + + +class ChildC1(ParentA, ParentB): + def __init__(self): + ParentB.__init__(self) + + +class ChildC2(ParentA, ParentB): + def __init__(self): + ParentA.__init__(self) + ParentB.__init__(self) + + +class ChildCPrime(ParentA, ParentBPrime, ParentC): + def __init__(self): + super(ParentBPrime).__init__() + + +class ChildD(ParentC): + # This should generate an error. + def __init__(self): + pass + + +@final +class ChildE(ParentC): + def __init__(self): + pass diff --git a/packages/pyright-internal/src/tests/samples/mro4.py b/packages/pyright-internal/src/tests/samples/mro4.py new file mode 100644 index 000000000000..43144e733df2 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/mro4.py @@ -0,0 +1,30 @@ +# This sample tests the special-case handling of Generic in a class +# hierarchy. The Generic class implementation in CPython has a method +# called __mro_entries__ that elides the Generic base class in cases +# where one or more subsequent base classes are specialized generic classes. + +from typing import Generic, TypeVar + +T1 = TypeVar("T1") +T2 = TypeVar("T2") + + +class Foo1(Generic[T1]): + ... + + +class Foo2(Generic[T1]): + ... + + +class Bar1(Generic[T1, T2], Foo1[T1], Foo2[T2]): + ... + + +class Bar2(Generic[T1, T2], Foo1, Foo2[T2]): + ... + + +# This should generate an error because a consistent MRO cannot be found. +class Bar3(Generic[T1, T2], Foo1, Foo2): + ... diff --git a/packages/pyright-internal/src/tests/samples/nameBindings4.py b/packages/pyright-internal/src/tests/samples/nameBindings4.py index 32ad26c42c9e..dce32e85daee 100644 --- a/packages/pyright-internal/src/tests/samples/nameBindings4.py +++ b/packages/pyright-internal/src/tests/samples/nameBindings4.py @@ -2,12 +2,9 @@ # inner scope but the target symbol has global binding. -from typing import Literal - - def func1(): global Enum from enum import Enum -t_enum: Literal["Type[Enum]"] = reveal_type(Enum) +reveal_type(Enum, expected_text="Type[Enum]") diff --git a/packages/pyright-internal/src/tests/samples/namedTuples1.py b/packages/pyright-internal/src/tests/samples/namedTuples1.py index ae3ecb33b404..b0ba616a8080 100644 --- a/packages/pyright-internal/src/tests/samples/namedTuples1.py +++ b/packages/pyright-internal/src/tests/samples/namedTuples1.py @@ -79,3 +79,25 @@ NamedTuple6 = namedtuple("NamedTuple6", "field1 field2 field3", defaults=[1, 2]) NamedTuple6() + + +def func1(dyn_str: str): + NamedTuple7 = namedtuple("NamedTuple7", dyn_str) + + n = NamedTuple7() + a, b = n + reveal_type(a, expected_text="Any") + reveal_type(b, expected_text="Any") + + +def func2(): + NamedTuple8 = namedtuple("NamedTuple8", ("a", "b", "c")) + n1 = NamedTuple8(a=1, b=2, c=3) + + a, b, c = n1 + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="Unknown") + reveal_type(c, expected_text="Unknown") + + # This should generate an error. + n2 = NamedTuple8(a=1, b=2) diff --git a/packages/pyright-internal/src/tests/samples/namedTuples4.py b/packages/pyright-internal/src/tests/samples/namedTuples4.py index 4da67f8cb2f0..2f6c7a3dcff8 100644 --- a/packages/pyright-internal/src/tests/samples/namedTuples4.py +++ b/packages/pyright-internal/src/tests/samples/namedTuples4.py @@ -2,7 +2,7 @@ # The synthesized __new__ method should be able to handle this. from collections import namedtuple -from typing import Literal, NamedTuple +from typing import NamedTuple Class1 = namedtuple("Class1", "name") @@ -12,7 +12,7 @@ class Class2(Class1): some_class_member = 1 -t1: Literal["Class2"] = reveal_type(Class2(name="a")) +reveal_type(Class2(name="a"), expected_text="Class2") Class3 = NamedTuple("Class3", [("name", str)]) @@ -21,4 +21,4 @@ class Class4(Class3): some_class_member = 1 -t2: Literal["Class4"] = reveal_type(Class4(name="a")) +reveal_type(Class4(name="a"), expected_text="Class4") diff --git a/packages/pyright-internal/src/tests/samples/namedTuples5.py b/packages/pyright-internal/src/tests/samples/namedTuples5.py new file mode 100644 index 000000000000..a3d6ffb0ed1b --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/namedTuples5.py @@ -0,0 +1,15 @@ +# This sample tests the case where a NamedTuple object is referenced +# through a `self` parameter. + +from typing import NamedTuple + + +class Fruit(NamedTuple): + name: str + cost: float + + def new_cost(self, new_cost: float): + my_name, my_cost = self + reveal_type(my_name, expected_text="str") + reveal_type(my_cost, expected_text="float") + return Fruit(my_name, new_cost) diff --git a/packages/pyright-internal/src/tests/samples/namedTuples6.py b/packages/pyright-internal/src/tests/samples/namedTuples6.py new file mode 100644 index 000000000000..e1bf2360af43 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/namedTuples6.py @@ -0,0 +1,31 @@ +# This sample tests that writes to named attributes within a named +# tuple class are flagged as errors. + +from collections import namedtuple +from typing import NamedTuple + + +class NT1(NamedTuple): + val1: str + val2: int + + +nt1 = NT1("x", 0) + +# This should generate an error. +nt1.val1 = "" + + +NT2 = NamedTuple("NT2", [("val1", str), ("val2", int)]) + +nt2 = NT2("x", 0) + +# This should generate an error. +nt2.val2 = 3 + +NT3 = namedtuple("NT3", ["val1", "val2"]) + +nt3 = NT3("x", 0) + +# This should generate an error. +nt3.val1 = "" diff --git a/packages/pyright-internal/src/tests/samples/never1.py b/packages/pyright-internal/src/tests/samples/never1.py index a371e5cf5cef..28dc91ff3d9e 100644 --- a/packages/pyright-internal/src/tests/samples/never1.py +++ b/packages/pyright-internal/src/tests/samples/never1.py @@ -1,14 +1,12 @@ # This sample verifies that "Never" doesn't appear in # an inferred function return type. -from typing import Literal - def func1(a: str = ""): if not isinstance(a, str): - t1: Literal["Never"] = reveal_type(a) + reveal_type(a, expected_text="Never") return [a] x1 = func1() -t1: Literal["list[Unknown] | None"] = reveal_type(x1) +reveal_type(x1, expected_text="list[Unknown] | None") diff --git a/packages/pyright-internal/src/tests/samples/never2.py b/packages/pyright-internal/src/tests/samples/never2.py new file mode 100644 index 000000000000..2d0d4037e51a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/never2.py @@ -0,0 +1,43 @@ +# This sample tests the handling of the "Never" type, +# ensuring that it's treated as the same as NoReturn. + +from typing import NoReturn +from typing_extensions import Never + + +def assert_never1(val: Never) -> NoReturn: + raise Exception("Should never get here") + + +def assert_never2(val: NoReturn) -> NoReturn: + raise Exception("Should never get here") + + +# This should generate an error because Never doesn't accept type arguments. +def assert_never3(val: Never[int]): + ... + + +# This should generate an error because NoReturn doesn't accept type arguments. +def assert_never4(val: NoReturn[int]): + ... + + +def func1(val: str | int) -> str: + if isinstance(val, (str, int)): + return "str or int" + else: + assert_never1(val) + + +def func2(val: str | int) -> str: + if isinstance(val, (str, int)): + return "str or int" + else: + assert_never2(val) + + +reveal_type(assert_never1, expected_text="(val: Never) -> NoReturn") + +# This should generate an error. +assert_never1(1) diff --git a/packages/pyright-internal/src/tests/samples/none1.py b/packages/pyright-internal/src/tests/samples/none1.py index c66cf40c8b57..1ad35209a122 100644 --- a/packages/pyright-internal/src/tests/samples/none1.py +++ b/packages/pyright-internal/src/tests/samples/none1.py @@ -15,3 +15,11 @@ def func1(a: Optional[int]): a.__class__ a.__doc__ + + +def func2(x: type[None]): + ... + + +func2(None.__class__) +func2(type(None)) diff --git a/packages/pyright-internal/src/tests/samples/none2.py b/packages/pyright-internal/src/tests/samples/none2.py new file mode 100644 index 000000000000..09fcb756dc3a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/none2.py @@ -0,0 +1,22 @@ +# This sample checks that Type[None] is handled correctly. + + +from typing import Type + + +def func1(a: Type[None]) -> Type[str] | Type[None]: + reveal_type(a, expected_text="Type[None]") + + # This should generate an error because None is + # not compatible with Type[None]. + return None + + +val1 = func1(type(None)) + +if val1 is not None: + reveal_type(val1, expected_text="Type[str] | Type[None]") + +# This should generate an error because None isn't +# assignable to Type[None]. +val2 = func1(None) diff --git a/packages/pyright-internal/src/tests/samples/noreturn1.py b/packages/pyright-internal/src/tests/samples/noreturn1.py index 457ed1efd050..51a6f0b0b1c9 100644 --- a/packages/pyright-internal/src/tests/samples/noreturn1.py +++ b/packages/pyright-internal/src/tests/samples/noreturn1.py @@ -41,3 +41,38 @@ def func5(x: bool) -> NoReturn: x1: Callable[[bool], bool] = func2 + + +async def func6() -> NoReturn: + ... + + +async def func7() -> NoReturn: + await func6() + + +class A: + def __init__(self) -> NoReturn: + ... + + +def func8() -> NoReturn: + A() + + +class B: + def __new__(cls) -> NoReturn: + ... + + +def func9() -> NoReturn: + B() + + +class C: + def __call__(self) -> NoReturn: + ... + + +def func10() -> NoReturn: + C()() diff --git a/packages/pyright-internal/src/tests/samples/noreturn3.py b/packages/pyright-internal/src/tests/samples/noreturn3.py new file mode 100644 index 000000000000..b226930f573a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/noreturn3.py @@ -0,0 +1,16 @@ +# This sample tests the case where a local NoReturn call depends +# on the inferred type of a local variable. + +from typing import NoReturn + + +class MyClass: + def no_return( + self, + ) -> NoReturn: + ... + + +def client_code() -> NoReturn: + instance = MyClass() + instance.no_return() diff --git a/packages/pyright-internal/src/tests/samples/noreturn4.py b/packages/pyright-internal/src/tests/samples/noreturn4.py new file mode 100644 index 000000000000..79b2c64d020c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/noreturn4.py @@ -0,0 +1,29 @@ +# This sample verifies that a `NoReturn` type can be assigned +# to any other type. + +from typing import Callable, NoReturn, TypeVar + + +_T = TypeVar("_T", int, str) + + +def func1(x: Callable[[NoReturn], None]): + ... + + +def func2(x: int) -> NoReturn: + ... + + +def func3(x: _T) -> _T: + return x + + +def func4(x: NoReturn): + v1: object = x + v2: int = x + v3: str | int = x + v4: None = x + v5: Callable[[int, str], str] = x + func1(func2) + func3(x) diff --git a/packages/pyright-internal/src/tests/samples/operators6.py b/packages/pyright-internal/src/tests/samples/operators6.py index df98c023f008..331ede6b7fbc 100644 --- a/packages/pyright-internal/src/tests/samples/operators6.py +++ b/packages/pyright-internal/src/tests/samples/operators6.py @@ -2,10 +2,9 @@ # magic methods work correctly. import ctypes -from typing import Literal v1 = ctypes.POINTER(ctypes.c_bool) * 3 -t_v1: Literal["Type[Array[pointer[c_bool]]]"] = reveal_type(v1) +reveal_type(v1, expected_text="Type[Array[pointer[c_bool]]]") v2 = 3 * ctypes.POINTER(ctypes.c_bool) -t_v2: Literal["Type[Array[pointer[c_bool]]]"] = reveal_type(v2) +reveal_type(v2, expected_text="Type[Array[pointer[c_bool]]]") diff --git a/packages/pyright-internal/src/tests/samples/operators7.py b/packages/pyright-internal/src/tests/samples/operators7.py new file mode 100644 index 000000000000..f88239a4588d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/operators7.py @@ -0,0 +1,24 @@ +# This sample tests the handling of binary operators when used with +# generic types. + +from typing import TypeVar + +_TInt = TypeVar("_TInt", bound=int) + + +def func1(n: _TInt) -> _TInt: + x = n + 1 + reveal_type(x, expected_text="int") + + # This should generate an error. + return x + + +_TIntOrStr = TypeVar("_TIntOrStr", int, str) + + +def func2(n: _TIntOrStr) -> _TIntOrStr: + x = n + n + reveal_type(x, expected_text="int* | str*") + + return x diff --git a/packages/pyright-internal/src/tests/samples/operators8.py b/packages/pyright-internal/src/tests/samples/operators8.py new file mode 100644 index 000000000000..eed6e47cf9bf --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/operators8.py @@ -0,0 +1,112 @@ +# This sample tests various "literal math" binary and unary operations that +# are applied when all operands are literal types with the same associated +# class. + +from typing import Literal + + +def func1(a: Literal[1, 2], b: Literal[0, 4], c: Literal[3, 4]): + c1 = a * b + c + reveal_type(c1, expected_text="Literal[3, 4, 7, 8, 11, 12]") + + c2 = a // 0 + reveal_type(c2, expected_text="int") + + c3 = a % 0 + reveal_type(c3, expected_text="int") + + c4 = ((a * 1000) % 39) // c + reveal_type(c4, expected_text="Literal[8, 6, 3, 2]") + + c5 = a + True + reveal_type(c5, expected_text="int") + + c1 -= 5 + reveal_type(c1, expected_text="Literal[-2, -1, 2, 3, 6, 7]") + + c1 = -c1 + reveal_type(c1, expected_text="Literal[2, 1, -2, -3, -6, -7]") + + c1 = +c1 + reveal_type(c1, expected_text="Literal[2, 1, -2, -3, -6, -7]") + + c6 = 1 + for _ in range(100): + c6 += a + reveal_type(c6, expected_text="int") + + +def func2(cond: bool): + c1 = "Hi " + ("Steve" if cond else "Amy") + reveal_type(c1, expected_text="Literal['Hi Steve', 'Hi Amy']") + + +def func3(cond: bool): + c1 = b"Hi " + (b"Steve" if cond else b"Amy") + reveal_type(c1, expected_text="Literal[b'Hi Steve', b'Hi Amy']") + + +def func4(a: Literal[True], b: Literal[False]): + c1 = a and b + reveal_type(c1, expected_text="Literal[False]") + + c2 = a and a + reveal_type(c2, expected_text="Literal[True]") + + c3 = a or b + reveal_type(c3, expected_text="Literal[True]") + + c4 = not a + reveal_type(c4, expected_text="Literal[False]") + + c5 = not b + reveal_type(c5, expected_text="Literal[True]") + + c6 = not b and not a + reveal_type(c6, expected_text="Literal[False]") + + c7 = not b or not a + reveal_type(c7, expected_text="Literal[True]") + + c8 = b + reveal_type(c8, expected_text="Literal[False]") + while True: + c8 = not c8 + reveal_type(c8, expected_text="bool") + + +mode = Literal[ + "a", + "b", + "c", + "d", + "e", + "f", + "g", + "h", + "i", + "j", + "k", + "l", + "m", + "n", + "o", + "p", + "q", + "r", + "s", + "t", + "u", + "v", + "w", + "z", + "y", + "z", +] + + +def func5( + a: mode, b: mode, c: mode, d: mode, e: mode, f: mode, g: mode, h: mode, i: mode +): + # Make sure this degenerate case falls back to "str". + reveal_type(a + b + c + d + e + f + g + h + i, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/overload1.py b/packages/pyright-internal/src/tests/samples/overload1.py index 175ede236d5e..5c2abf6e570a 100644 --- a/packages/pyright-internal/src/tests/samples/overload1.py +++ b/packages/pyright-internal/src/tests/samples/overload1.py @@ -1,6 +1,6 @@ # This sample tests the type checker's handling of the overload decorator. -from typing import Literal, overload, Optional +from typing import overload, Optional from datetime import datetime, timezone, timedelta @@ -47,4 +47,4 @@ def func1(x): return x -t_f1: Literal["float"] = reveal_type(func1(abs(0.0))) +reveal_type(func1(abs(0.0)), expected_text="float") diff --git a/packages/pyright-internal/src/tests/samples/overload10.py b/packages/pyright-internal/src/tests/samples/overload10.py new file mode 100644 index 000000000000..742db18e5be2 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/overload10.py @@ -0,0 +1,48 @@ +# This sample tests an overload that provides a signature for +# a *args parameter. + + +from typing import Iterable, Tuple, TypeVar, overload + + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + + +@overload +def func1(__iter1: Iterable[_T1]) -> Tuple[_T1]: + ... + + +@overload +def func1(__iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> Tuple[_T1, _T2]: + ... + + +# This should generate an error because this overload overlaps +# with the first one and returns a different type. +@overload +def func1(*iterables: Iterable[_T1]) -> Tuple[_T1, ...]: + ... + + +def func1(*iterables: Iterable[_T1]) -> Tuple[_T1, ...]: + ... + + +def func2(x: Iterable[int]): + v1 = func1(x) + reveal_type(v1, expected_text="Tuple[int]") + + v2 = func1(x, x) + reveal_type(v2, expected_text="Tuple[int, int]") + + y = [x, x, x, x] + + v3 = func1(*y) + reveal_type(v3, expected_text="Tuple[int, ...]") + + z = (x, x) + + v4 = func1(*z) + reveal_type(v4, expected_text="Tuple[int, int]") diff --git a/packages/pyright-internal/src/tests/samples/overload5.py b/packages/pyright-internal/src/tests/samples/overload5.py index 747808628bbe..824284fdcbb8 100644 --- a/packages/pyright-internal/src/tests/samples/overload5.py +++ b/packages/pyright-internal/src/tests/samples/overload5.py @@ -4,6 +4,7 @@ from typing import ( Any, Generic, + List, Literal, Optional, Tuple, @@ -46,13 +47,13 @@ def func1(*args: Any, **kwargs: Any) -> Any: @overload def func2(a: int, b: Any) -> int: - """ Overload """ + """Overload""" # This should generate an error because the overload is obscured. @overload def func2(a: int, b: int) -> int: - """ Overload """ + """Overload""" def func2(*args: Any, **kwargs: Any) -> Any: @@ -233,3 +234,59 @@ def func15(**kwargs: Any) -> str: def func15(*args: Any, **kwargs: Any) -> Any: pass + + +@overload +def func16(var: None) -> List[Any]: + ... + + +@overload +def func16(var: _T1) -> List[_T1]: + ... + + +def func16(var: Union[_T1, None]) -> List[_T1] | List[Any]: + ... + + +@overload +def func17(a: int, b: List[int]) -> int: + ... + + +@overload +def func17(a: int, b: List[_T1]) -> _T1: + ... + + +def func17(*args: Any, **kwargs: Any) -> Any: + pass + + +class ClassA(Generic[_T1]): + @overload + def __call__(self, f: _T1) -> _T1: + ... + + @overload + def __call__(self, f: _T1 | None) -> _T1: + ... + + def __call__(self, f: _T1 | None) -> _T1: + ... + + +class ClassB: + # This should generate an error because the overload is overlapping. + @overload + def __call__(self, f: _T1) -> _T1: + ... + + # This should generate an error because the overload is overlapped. + @overload + def __call__(self, f: _T1 | None) -> _T1: + ... + + def __call__(self, f: _T1 | None) -> _T1: + ... diff --git a/packages/pyright-internal/src/tests/samples/overload6.py b/packages/pyright-internal/src/tests/samples/overload6.py index 54075408b063..4c543b59defa 100644 --- a/packages/pyright-internal/src/tests/samples/overload6.py +++ b/packages/pyright-internal/src/tests/samples/overload6.py @@ -23,6 +23,7 @@ def __round__(self) -> int: def __round__(self, ndigits: int) -> _T_co: ... + # This should generate an error because the return type isn't compatible. def __round__(self, ndigits: int = 0) -> _T_co: ... @@ -51,7 +52,7 @@ def __round__(self, ndigits: int) -> "Fraction": def __round__(self, ndigits: None = ...) -> int: ... - def __round__(self, ndigits: Optional[int]) -> Union["Fraction", int]: + def __round__(self, ndigits: Optional[int] = None) -> Union["Fraction", int]: ... diff --git a/packages/pyright-internal/src/tests/samples/overload7.py b/packages/pyright-internal/src/tests/samples/overload7.py index 3d9c54f28e94..6ed56485eb59 100644 --- a/packages/pyright-internal/src/tests/samples/overload7.py +++ b/packages/pyright-internal/src/tests/samples/overload7.py @@ -3,10 +3,14 @@ from typing import ( Any, + Awaitable, + Callable, Dict, Generic, + Iterable, List, Literal, + NoReturn, Optional, Tuple, Type, @@ -238,3 +242,66 @@ def func11(var: int) -> int: def func11(var: Union[_T6, int]) -> Union[_T6, int]: ... + + +_T7 = TypeVar("_T7") +_T8 = TypeVar("_T8") +_T9 = TypeVar("_T9") + + +@overload +def func12(func: Callable[[_T7], _T8], iterable: Iterable[_T7], /) -> Iterable[_T8]: + ... + + +@overload +def func12( + func: Callable[[_T7], _T8], iterable: Iterable[_T7], /, default_value: _T9 +) -> Iterable[_T8 | _T9]: + ... + + +def func12( + func: Callable[[_T7], _T8], + iterable: Iterable[_T7], + /, + default_value: _T9 = None, +) -> Iterable[_T8 | _T9]: + ... + + +@overload +def func13(x: int) -> NoReturn: + ... + + +@overload +def func13(x: str) -> str | NoReturn: + ... + + +def func13(x: int | str) -> str: + ... + + +_T14 = TypeVar("_T14") + + +class Wrapper1(Generic[_T14]): + ... + + +@overload +def func14(target: Callable[..., Awaitable[_T14]]) -> Wrapper1[_T14]: + ... + + +@overload +def func14(target: Callable[..., _T14]) -> Wrapper1[_T14]: + ... + + +def func14( + target: Callable[..., Awaitable[_T14]] | Callable[..., _T14] +) -> Wrapper1[_T14]: + ... diff --git a/packages/pyright-internal/src/tests/samples/overload8.py b/packages/pyright-internal/src/tests/samples/overload8.py index 59669dfb93f2..81627add52b4 100644 --- a/packages/pyright-internal/src/tests/samples/overload8.py +++ b/packages/pyright-internal/src/tests/samples/overload8.py @@ -35,7 +35,7 @@ def overloaded1(x: Union[A, B]) -> Union[str, B]: def func1(a: Union[A, B], b: Union[A, B, C]): v1 = overloaded1(a) - t1: Literal["str | B"] = reveal_type(v1) + reveal_type(v1, expected_text="str | B") # This should generate an error because C is not allowed # for the first argument. @@ -78,12 +78,12 @@ def overloaded2( def func2(a: LargeUnion, b: Literal[2, 3, 4], c: Literal[2, 3, 4, 9, 10]): v1 = overloaded2("a", 2) - t1: Literal["str"] = reveal_type(v1) + reveal_type(v1, expected_text="str") v2 = overloaded2(a, b) - t2: Literal["str | float"] = reveal_type(v2) + reveal_type(v2, expected_text="str | float") # This should generate an error because the expansion of union types # will exceed the max number of expansions (64). v3 = overloaded2(a, c) - t2: Literal["str | float"] = reveal_type(v2) + reveal_type(v2, expected_text="str | float") diff --git a/packages/pyright-internal/src/tests/samples/paramInference1.py b/packages/pyright-internal/src/tests/samples/paramInference1.py new file mode 100644 index 000000000000..7378d4aa72c8 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramInference1.py @@ -0,0 +1,27 @@ +# This sample tests the logic that infers parameter types based on +# default argument values or annotated base class methods. + + +class Parent: + def func1(self, a: int, b: str) -> float: + ... + + +class Child(Parent): + def func1(self, a, b): + reveal_type(self, expected_text="Self@Child") + reveal_type(a, expected_text="int") + reveal_type(b, expected_text="str") + return a + + +def func2(a, b=0, c=None): + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="int") + reveal_type(c, expected_text="Unknown | None") + + +def func3(a=(1, 2), b=[1,2], c={1: 2}): + reveal_type(a, expected_text="Unknown") + reveal_type(b, expected_text="Unknown") + reveal_type(c, expected_text="Unknown") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec1.py b/packages/pyright-internal/src/tests/samples/paramSpec1.py index 1b9775acedb0..5212601b2a42 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec1.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec1.py @@ -1,6 +1,6 @@ # This sample tests error conditions for ParamSpec (PEP 612). -from typing import Any, Callable, List, ParamSpec, Tuple, cast +from typing import Any, Callable, List, ParamSpec, Protocol, Tuple, cast TParams = ParamSpec("TParams") @@ -33,3 +33,32 @@ def func1(x: Callable[TParams, Any]): # This should generate an error. g: Tuple[TParams] + + +P = ParamSpec("P") + + +class SomeWrapper(Protocol[P]): + def __call__(self, *args: P.args, **kwargs: P.kwargs): + ... + + +# This should generate an error because P cannot be used with other +# type arguments. +def func2(x: SomeWrapper[P, int]): + pass + + +# This should generate an error because P cannot be used with other +# type arguments. +def func3(x: SomeWrapper[[P, int]]): + pass + + +def func4(x: SomeWrapper[P]): + pass + + +# This form is considered an error. +def func5(x: SomeWrapper[[P]]): + pass diff --git a/packages/pyright-internal/src/tests/samples/paramSpec10.py b/packages/pyright-internal/src/tests/samples/paramSpec10.py index c122df553b8d..90f611e07a36 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec10.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec10.py @@ -1,7 +1,7 @@ # This sample tests the use of ParamSpec along with Concatenate in # a return type. -from typing import Callable, Literal, Protocol, TypeVar, Concatenate, ParamSpec +from typing import Callable, Protocol, TypeVar, Concatenate, ParamSpec from threading import RLock import functools @@ -45,13 +45,13 @@ def test_3(cls: MyClass, param1: int) -> str: testClass = MyClass() res1 = testClass.test_1(42) -t_res1: Literal["str"] = reveal_type(res1) +reveal_type(res1, expected_text="str") res2 = testClass.test_2() -t_res2: Literal["str"] = reveal_type(res2) +reveal_type(res2, expected_text="str") res3 = test_3(testClass, 42) -t_res3: Literal["str"] = reveal_type(res3) +reveal_type(res3, expected_text="str") res4: Callable[[MyClass, int], str] = with_lock(test_3) -t_res4: Literal["(__p0: MyClass, param1: int) -> str"] = reveal_type(res4) +reveal_type(res4, expected_text="(MyClass, param1: int) -> str") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec12.py b/packages/pyright-internal/src/tests/samples/paramSpec12.py index b3ce05f39780..92bebdb5f800 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec12.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec12.py @@ -1,6 +1,6 @@ # This sample tests various error conditions for ParamSpec usage. -from typing import Callable, TypeVar, ParamSpec +from typing import Annotated, Callable, TypeVar, ParamSpec P = ParamSpec("P") R = TypeVar("R") @@ -19,6 +19,12 @@ def inner3(*args: P.args, **kwargs: "P.kwargs") -> None: def inner4(*args: "P.args", **kwargs: "P.kwargs") -> None: pass + def inner5(*args: Annotated[P.args, ""], **kwargs: "P.kwargs") -> None: + pass + + def inner6(*args: P.args, **kwargs: Annotated["P.kwargs", ""]) -> None: + pass + # This should generate two errors because P.kwargs cannot be # used with *args and P.args cannot be used with **kwargs. def mixed_up(*args: P.kwargs, **kwargs: P.args) -> None: diff --git a/packages/pyright-internal/src/tests/samples/paramSpec13.py b/packages/pyright-internal/src/tests/samples/paramSpec13.py index 82e8656fe420..60137aab6b21 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec13.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec13.py @@ -1,7 +1,7 @@ # This sample tests cases where a ParamSpec is used as a type parameter # for a generic type alias, a generic function, and a generic class. -from typing import Callable, Concatenate, Generic, List, Literal, ParamSpec, TypeVar +from typing import Callable, Concatenate, Generic, List, ParamSpec, TypeVar _P = ParamSpec("_P") @@ -21,7 +21,7 @@ def func2(a: str, b: List[int]) -> str: v1 = func1(func2) -t_v1: Literal["(__p0: int, a: str, b: List[int]) -> str"] = reveal_type(v1) +reveal_type(v1, expected_text="(int, a: str, b: List[int]) -> str") # This should generate an error because 'int' isn't assignable to # ParamSpec _P. @@ -44,13 +44,13 @@ def remote(self, *args: _P.args, **kwargs: _P.kwargs) -> RemoteResponse[_R]: r1 = RemoteFunction(func2) -t_r1: Literal["RemoteFunction[(a: str, b: List[int]), str]"] = reveal_type(r1) +reveal_type(r1, expected_text="RemoteFunction[(a: str, b: List[int]), str]") v2 = r1("hi", []) -r_v2: Literal["str"] = reveal_type(v2) +reveal_type(v2, expected_text="str") v3 = r1.remote("hi", []) -r_v3: Literal["RemoteResponse[str]"] = reveal_type(v3) +reveal_type(v3, expected_text="RemoteResponse[str]") # This should generate an error r1(1, []) @@ -71,4 +71,4 @@ def remote(func: Callable[_P, _R]) -> RemoteFunction[_P, _R]: v4 = remote(func2) -t_v4: Literal["RemoteFunction[(a: str, b: List[int]), str]"] = reveal_type(v4) +reveal_type(v4, expected_text="RemoteFunction[(a: str, b: List[int]), str]") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec14.py b/packages/pyright-internal/src/tests/samples/paramSpec14.py index 9ca429d8b579..fc1ff55e0727 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec14.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec14.py @@ -1,7 +1,7 @@ # This sample tests the handling of ParamSpec when used with # static methods and class methods. -from typing import Literal, Callable +from typing import Callable from typing_extensions import ParamSpec P = ParamSpec("P") @@ -26,5 +26,5 @@ def identity_static(val: float) -> float: return val -t1: Literal["int"] = reveal_type(Foo.identity_cls(1.2)) -t2: Literal["int"] = reveal_type(Foo.identity_static(1.2)) +reveal_type(Foo.identity_cls(1.2), expected_text="int") +reveal_type(Foo.identity_static(1.2), expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec15.py b/packages/pyright-internal/src/tests/samples/paramSpec15.py index 66fd807191ae..05ceccfd705a 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec15.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec15.py @@ -1,7 +1,7 @@ # This sample tests the handling of generic classes parameterized # with a ParamSpec. -from typing import Callable, Generic, Literal, TypeVar +from typing import Callable, Generic, TypeVar from typing_extensions import ParamSpec P = ParamSpec("P") @@ -22,7 +22,7 @@ def func(a: int) -> str: a = Foo(func) -t_a: Literal["Foo[(a: int), str]"] = reveal_type(a) +reveal_type(a, expected_text="Foo[(a: int), str]") c = foo(a, 2) -t_c: Literal["str"] = reveal_type(c) +reveal_type(c, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec16.py b/packages/pyright-internal/src/tests/samples/paramSpec16.py index 6432ba7e9251..e0784eb6cd0d 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec16.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec16.py @@ -1,7 +1,7 @@ # This sample tests the matching of nested callables that each use # ParamSpec. -from typing import Callable, Generic, Literal, TypeVar +from typing import Callable, Generic, TypeVar from typing_extensions import Concatenate, ParamSpec @@ -25,4 +25,4 @@ def bar(func: Callable[[int], float], a: str) -> bool: ... -t1: Literal["Foo[(_p0: int), float, (a: str), bool]"] = reveal_type(bar) +reveal_type(bar, expected_text="Foo[(int), float, (a: str), bool]") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec17.py b/packages/pyright-internal/src/tests/samples/paramSpec17.py index 6cf2de38c2e3..821b1840a731 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec17.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec17.py @@ -17,6 +17,12 @@ def repr_func_call(func: Callable[P, object], *args: P.args, **kwargs: P.kwargs) return func.__name__ + "(" + ", ".join(arg_reprs) + ")" +def print( + *values: object, + sep: str | None = ..., + end: str | None = ..., +) -> None: ... + repr_func_call(print) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec18.py b/packages/pyright-internal/src/tests/samples/paramSpec18.py index cfcc183598f5..6018d06ca1a7 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec18.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec18.py @@ -1,6 +1,6 @@ # This sample tests the handling of a ParamSpec within a callback protocol. -from typing import Callable, Concatenate, Literal, ParamSpec, Protocol +from typing import Callable, Concatenate, ParamSpec, Protocol P = ParamSpec("P") @@ -18,7 +18,7 @@ def func_with_callable(cb: FooCallableWithConcatenate[P]) -> Callable[P, bool]: x1 = func_with_callable(callback) -t1: Literal["(b: str, c: str) -> bool"] = reveal_type(x1) +reveal_type(x1, expected_text="(b: str, c: str) -> bool") class FooWithConcatenate(Protocol[P]): @@ -31,4 +31,13 @@ def func_with_protocol(cb: FooWithConcatenate[P]) -> Callable[P, bool]: x2 = func_with_protocol(callback) -t2: Literal["(b: str, c: str) -> bool"] = reveal_type(x2) +reveal_type(x2, expected_text="(b: str, c: str) -> bool") + + +class CallbackPos(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: + ... + + +def invoke_pos(cb: CallbackPos[P], /, *args: P.args, **kwargs: P.kwargs) -> None: + cb(*args, **kwargs) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec19.py b/packages/pyright-internal/src/tests/samples/paramSpec19.py new file mode 100644 index 000000000000..652f8c500870 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec19.py @@ -0,0 +1,93 @@ +# This sample tests the case where a ParamSpec is used within a generic +# type alias with a Callable. + +from typing import Any, Callable, Generic, Protocol +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") + +# Example 1: Callable generic type alias + +CommandHandler1 = Callable[Concatenate[int, P], dict[str, Any]] + + +class Command1(Generic[P]): + def __init__(self, handler: CommandHandler1[P]) -> None: + ... + + +class Application1: + def func1(self, handler: CommandHandler1[P]) -> Command1[P]: + return Command1(handler) + + def func2( + self, + handler: CommandHandler1[P], + ) -> Callable[[CommandHandler1[P]], Command1[P]]: + def decorator(handler: CommandHandler1[P]) -> Command1[P]: + return self.func1(handler) + + return decorator + + +# Example 2: Callback Protocol + + +class CommandHandler2(Protocol[P]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> dict[str, Any]: + ... + + +class Command2(Generic[P]): + def __init__(self, handler: CommandHandler2[P]) -> None: + ... + + +class Application2: + def func1(self, handler: CommandHandler2[P]) -> Command2[P]: + return Command2(handler) + + def func2( + self, + handler: CommandHandler2[P], + ) -> Callable[[CommandHandler2[P]], Command2[P]]: + def decorator(handler: CommandHandler2[P]) -> Command2[P]: + return self.func1(handler) + + return decorator + + +def handler(arg1: int, arg2: str) -> dict[str, Any]: + ... + + +v1: CommandHandler2 = handler + + +def func1_1(x: CommandHandler1[str]): + x(3, "hi") + + +def func1_2(x: CommandHandler1[[str, int]]): + x(3, "hi", 3) + + +def func2_1(x: CommandHandler2[str]): + x("hi") + + +def func2_2(x: CommandHandler2[[str, int]]): + x("hi", 3) + + +HandlerAlias = Callable[P, None] + +list_of_handlers: list[HandlerAlias[...]] = [] + + +class HandlerProtocol(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: + ... + + +list_of_handler_protocols: list[HandlerProtocol[...]] = [] diff --git a/packages/pyright-internal/src/tests/samples/paramSpec20.py b/packages/pyright-internal/src/tests/samples/paramSpec20.py new file mode 100644 index 000000000000..02b38ae32dcb --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec20.py @@ -0,0 +1,87 @@ +# This sample tests the handling of class specialization expressions +# that provide signatures for ParamSpecs. + +from typing import Any, Callable, Concatenate, Generic, ParamSpec, TypeVar + + +T = TypeVar("T") +P1 = ParamSpec("P1") +P2 = ParamSpec("P2") + + +class X(Generic[T, P1]): + f: Callable[P1, int] + x: T + + +def x1(x: X[int, P2]) -> str: + ... + + +def x2(x: X[int, Concatenate[int, P2]]) -> str: + ... + + +def X3(x: X[int, [int, bool]]) -> str: + ... + + +def x4(x: X[int, ...]) -> str: + ... + + +# This should generate an error because "int" can't be bound to a ParamSpec. +def x5(x: X[int, int]) -> str: + ... + + +# This should generate an error. +def x6(x: X[..., ...]) -> str: + ... + + +# This should generate an error. +def x7(x: X[[int], [int, int]]) -> str: + ... + + +class Y(Generic[P2]): + def __init__(self, cb: Callable[P2, Any]) -> None: + ... + + def m1(self) -> X[int, Concatenate[float, P2]]: + ... + + +y1 = Y(x4) +reveal_type(y1, expected_text="Y[(x: X[int, (...)])]") + +y2 = y1.m1() +reveal_type(y2, expected_text="X[int, (float, x: X[int, (...)])]") + + +class Z(Generic[P1]): + f: Callable[P1, int] + + +def z1(x: Z[[int, str, bool]]) -> str: + ... + + +def z2(x: Z[int, str, bool]) -> str: + ... + + +# This should generate an error. +def z3(x: Z[[int, [str], bool]]) -> str: + ... + + +# This should generate an error. +def z4(x: Z[[[int, str, bool]]]) -> str: + ... + + +# This should generate an error. +def z5(x: Z[[...]]) -> str: + ... diff --git a/packages/pyright-internal/src/tests/samples/paramSpec21.py b/packages/pyright-internal/src/tests/samples/paramSpec21.py new file mode 100644 index 000000000000..3fba562f8472 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec21.py @@ -0,0 +1,58 @@ +# This sample tests the case where a ParamSpec is used within a source +# and destination callback protocol. + +from typing import Callable, Protocol +from typing_extensions import Concatenate, ParamSpec + +P1 = ParamSpec("P1") +P2 = ParamSpec("P2") +P3 = ParamSpec("P3") +P4 = ParamSpec("P4") + + +class Context: + ... + + +class Response: + ... + + +class ContextCallback(Protocol[P1]): + def __call__( + self, ctx: Context, /, *args: P1.args, **kwargs: P1.kwargs + ) -> Response: + ... + + +def call_context_callback( + callback: ContextCallback[P3], /, *args: P3.args, **kwargs: P3.kwargs +) -> Response: + ... + + +class IntContextCallback(Protocol[P2]): + def __call__( + self, ctx: Context, value: int, /, *args: P2.args, **kwargs: P2.kwargs + ) -> Response: + ... + + +def call_int_context_callback( + callback: IntContextCallback[P4], value: int, /, *args: P4.args, **kwargs: P4.kwargs +) -> Response: + return call_context_callback(callback, value, *args, **kwargs) + + +P5 = ParamSpec("P5") +P6 = ParamSpec("P6") +P7 = ParamSpec("P7") + +ContextCallable = Callable[Concatenate[Context, P5], Response] +IntContextCallable = Callable[Concatenate[Context, int, P6], Response] + + +def call_int_context_callable( + callback: IntContextCallable[P7], value: int, /, *args: P7.args, **kwargs: P7.kwargs +) -> Response: + return call_context_callback(callback, value, *args, **kwargs) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec22.py b/packages/pyright-internal/src/tests/samples/paramSpec22.py new file mode 100644 index 000000000000..f803927eac59 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec22.py @@ -0,0 +1,26 @@ +# This sample tests the case where a specialized generic class that uses +# a ParamSpec and a callback protocol is assigned to a Callable that +# uses a ParamSpec. + +from typing import Callable, Generic, TypeVar +from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") +OUT = TypeVar("OUT") + + +class MyPartial(Generic[P, OUT]): + def __init__(self, first: int, func: Callable[Concatenate[int, P], OUT]) -> None: + self.first = first + self.func = func + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> OUT: + ... + + +class MyPartialCreator(Generic[P, OUT]): + def __init__(self, func: Callable[Concatenate[int, P], OUT]): + self.func = func + + def create_partial(self, first: int) -> Callable[P, OUT]: + return MyPartial[P, OUT](first=first, func=self.func) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec23.py b/packages/pyright-internal/src/tests/samples/paramSpec23.py new file mode 100644 index 000000000000..d5f1078c36c6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec23.py @@ -0,0 +1,21 @@ +# This sample tests the case where a Callable that includes a Concatenate +# is assigned to a ParamSpec that doesn't include a Concatenate. + + +from typing import Callable, TypeVar +from typing_extensions import Concatenate, ParamSpec + +Pi = ParamSpec("Pi") + + +def is_inty(f: Callable[Pi, object]) -> Callable[Pi, int]: + ... + + +Po = ParamSpec("Po") +T = TypeVar("T") + + +def outer(f: Callable[Concatenate[str, Po], object]): + x = is_inty(f) + reveal_type(x, expected_text="(str, **Po@outer) -> int") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec24.py b/packages/pyright-internal/src/tests/samples/paramSpec24.py new file mode 100644 index 000000000000..938b8fd50b97 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec24.py @@ -0,0 +1,67 @@ +# This sample tests the case where a Callable that includes a Concatenate +# is used as an input parameter to a function that returns a generic +# type parameterized by a ParamSpec and specialized with a Concatenate. + +from __future__ import annotations +from typing_extensions import Self, Concatenate, ParamSpec +from typing import Any, Callable, TypeVar, Protocol, Generic, overload + +T = TypeVar("T") +O = TypeVar("O") +P = ParamSpec("P") + + +class _callable_cache(Protocol[P, T]): + foo: int = 0 + val: T + + def __init__(self, val: T) -> None: + self.val = val + + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> T: + return self.val + + +class _wrapped_cache(_callable_cache[P, T], Generic[O, P, T]): + @overload + def __get__( # type: ignore + self, instance: None, owner: type[O] + ) -> _callable_cache[Concatenate[O, P], T]: + ... + + @overload + def __get__(self, instance: O, owner: type[O]) -> Self: + ... + + +@overload +def cache(fn: Callable[Concatenate[A, P], T]) -> _wrapped_cache[A, P, T]: # type: ignore + ... + + +@overload +def cache(fn: Callable[P, T]) -> _wrapped_cache[Any, P, T]: + ... + + +@cache +def not_in_class(a: int, b: str) -> str: + ... + + +class A: + @cache + def in_class(self, a: int, b: str) -> str: + ... + + +reveal_type(not_in_class, expected_text="_wrapped_cache[Any, (a: int, b: str), str]") +not_in_class(1, "") + +a = A() + +reveal_type(a.in_class, expected_text="_wrapped_cache[A, (a: int, b: str), str]") +a.in_class(1, "") + +reveal_type(A.in_class, expected_text="_callable_cache[(A, a: int, b: str), str]") +A.in_class(a, 1, "") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec25.py b/packages/pyright-internal/src/tests/samples/paramSpec25.py new file mode 100644 index 000000000000..5eafcb08fe71 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec25.py @@ -0,0 +1,36 @@ +# This sample tests the case where a generic type uses a ParamSpec +# as a type parameter and it is specialized using an empty signature. + +from typing import Any, Callable, Concatenate, Generic, ParamSpec + +P = ParamSpec("P") + + +class Context: + ... + + +CommandHandler = Callable[Concatenate[Context, P], Any] + + +class Command(Generic[P]): + def __init__( + self, + handler: CommandHandler[P], + ) -> None: + ... + + +def handler_no_args(ctx: Context) -> None: + ... + + +def handler_one_arg(ctx: Context, a: int) -> None: + ... + + +cmd_no_args = Command(handler_no_args) +reveal_type(cmd_no_args, expected_text="Command[()]") + +cmd_one_arg = Command(handler_one_arg) +reveal_type(cmd_one_arg, expected_text="Command[(a: int)]") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec26.py b/packages/pyright-internal/src/tests/samples/paramSpec26.py new file mode 100644 index 000000000000..ad9bf946812b --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec26.py @@ -0,0 +1,25 @@ +# This sample tests the case where a generic class parameterized by a +# ParamSpec is specialized using a Concatenate[] type argument. + +from typing import ParamSpec, Concatenate, Generic, Callable, Any + +P = ParamSpec("P") + + +class A(Generic[P]): + def __init__(self, func: Callable[P, Any]) -> None: + ... + + +def func1(baz: A[Concatenate[int, P]]) -> A[P]: + ... + + +def test(a: int, b: str) -> str: + ... + + +val1 = A(test) +reveal_type(val1, expected_text="A[(a: int, b: str)]") +val2 = func1(val1) +reveal_type(val2, expected_text="A[(b: str)]") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec27.py b/packages/pyright-internal/src/tests/samples/paramSpec27.py new file mode 100644 index 000000000000..947955c94f2d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec27.py @@ -0,0 +1,57 @@ +# This sample tests the case where an ellipsis is used to specialize +# a generic class parameterized by a ParamSpec. + +from typing import Callable, Generic, Protocol +from typing_extensions import Concatenate, ParamSpec, TypeAlias + +P = ParamSpec("P") + + +def func1(a: int, b: str) -> None: + ... + + +def func2(a: str, b: str) -> None: + ... + + +class Handler(Protocol[P]): + def __call__(self, /, *args: P.args, **kwargs: P.kwargs) -> None: + ... + + +class ConcatHandler(Protocol[P]): + def __call__(self, a: int, /, *args: P.args, **kwargs: P.kwargs) -> None: + ... + + +ConcatCallableHandler: TypeAlias = Callable[Concatenate[int, P], None] + + +handler_callable1: Callable[..., None] = func1 +concat_handler_callable1: ConcatCallableHandler[...] = func1 + +# This should generate an error because the first param of func2 is not int. +concat_handler_callable2: ConcatCallableHandler[...] = func2 + +handler1: Handler[...] = func1 +concat_handler1: ConcatHandler[...] = func1 + +# This should generate an error because the first param of func2 is not int. +concat_handler2: ConcatHandler[...] = func2 + + +class Command(Generic[P]): + def __init__(self, handler: Handler[P]) -> None: + self.handler: Handler[P] = handler + + +commands: list[Command[...]] = [] + + +def do_something(int_handler: Handler[int], var_args_handler: Handler[P], /) -> None: + int_command = Command(int_handler) + commands.append(int_command) + + var_args_command = Command(var_args_handler) + commands.append(var_args_command) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec28.py b/packages/pyright-internal/src/tests/samples/paramSpec28.py new file mode 100644 index 000000000000..df028737affb --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec28.py @@ -0,0 +1,39 @@ +# This sample tests a complicated combination of ParamSpec usage. + +# pyright: strict + +from typing import Any, Callable, Concatenate, ParamSpec, Protocol, TypeVar + + +_Fn = TypeVar("_Fn", bound=Callable[..., Any]) +_Ret = TypeVar("_Ret") +_Args = ParamSpec("_Args") +_Self = TypeVar("_Self", bound="_GenerativeType") + + +def decorator( + target: Callable[Concatenate[_Fn, _Args], _Ret] +) -> Callable[[_Fn], Callable[_Args, _Ret]]: + ... + + +class _GenerativeType(Protocol): + def _generate(self: "_Self") -> "_Self": + ... + + +def generative( + fn: Callable[Concatenate[_Self, _Args], None] +) -> Callable[Concatenate[_Self, _Args], _Self]: + @decorator + def _generative( + fn: Callable[Concatenate[_Self, _Args], None], + self: _Self, + *args: _Args.args, + **kw: _Args.kwargs + ) -> _Self: + ... + + decorated = _generative(fn) + + return decorated diff --git a/packages/pyright-internal/src/tests/samples/paramSpec29.py b/packages/pyright-internal/src/tests/samples/paramSpec29.py new file mode 100644 index 000000000000..e9294acd527c --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec29.py @@ -0,0 +1,37 @@ +# This sample tests the case where an inner function uses concatenation +# and the return type of the outer function doesn't. + +from typing import Callable, Concatenate, ParamSpec + +P = ParamSpec("P") + + +def decorator1(f: Callable[P, None]) -> Callable[P, None]: + def inner(var: int, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator2(f: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def inner(*args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator3(f: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def inner(var: str, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + # This should generate an error because the concatenated parameters don't match. + return inner + + +def decorator4(f: Callable[P, None]) -> Callable[Concatenate[str, P], None]: + def inner(var: str, *args: P.args, **kwargs: P.kwargs) -> None: + f(*args, **kwargs) + + return inner diff --git a/packages/pyright-internal/src/tests/samples/paramSpec3.py b/packages/pyright-internal/src/tests/samples/paramSpec3.py index 1fcbed1e91b0..f16059a01f95 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec3.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec3.py @@ -1,6 +1,15 @@ # This sample tests ParamSpec (PEP 612) behavior. -from typing import Awaitable, Callable, Optional, ParamSpec, TypeVar, Union, overload +from typing import ( + Awaitable, + Callable, + Generic, + Optional, + ParamSpec, + TypeVar, + Union, + overload, +) Ps = ParamSpec("Ps") R = TypeVar("R") @@ -51,3 +60,12 @@ def bar(x: Union[int, str]) -> Optional[str]: # This should generate an error because ParamSpec cannot # be used with an overloaded function. x = add_logging(bar) + + +class Foo(Generic[Ps, R]): + def __init__(self, func: Callable[Ps, R]): + self.func = func + + +def transform_foo(f: Callable[Ps, R]) -> Foo[Ps, R]: + return Foo(f) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec30.py b/packages/pyright-internal/src/tests/samples/paramSpec30.py new file mode 100644 index 000000000000..38041e561f8a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/paramSpec30.py @@ -0,0 +1,49 @@ +# This sample tests a complicated combination of ParamSpec usage. + +# pyright: strict + +from typing import Callable, TypeVar, overload +from typing_extensions import Concatenate, ParamSpec + +_T = TypeVar("_T") +_R = TypeVar("_R") +_P = ParamSpec("_P") + + +@overload +def error_decorator( + error_codes: None = None, +) -> Callable[[Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R]]: + ... + + +@overload +def error_decorator( + error_codes: list[str], +) -> Callable[ + [Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R | None] +]: + ... + + +def error_decorator( + error_codes: list[str] | None = None, +) -> Callable[ + [Callable[Concatenate[_T, _P], _R]], Callable[Concatenate[_T, _P], _R | None] +]: + """Filter specific errors and raise custom exception for remaining once.""" + + def decorator( + func: Callable[Concatenate[_T, _P], _R] + ) -> Callable[Concatenate[_T, _P], _R | None]: + def wrapper(self: _T, *args: _P.args, **kwargs: _P.kwargs) -> _R | None: + try: + return func(self, *args, **kwargs) + except Exception as ex: + if error_codes is not None: + return None + raise Exception("Custom exception") from ex + + return wrapper + + return decorator diff --git a/packages/pyright-internal/src/tests/samples/paramSpec4.py b/packages/pyright-internal/src/tests/samples/paramSpec4.py index bde79593224b..9469a3bdfd31 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec4.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec4.py @@ -50,3 +50,60 @@ def decorator2(f: Callable[Concatenate[int, int], int]) -> Callable[P, int]: # its type arguments. def decorator3(f: Callable[Concatenate, int]) -> Callable[P, int]: ... + + +def decorator4(func: Callable[P, None]) -> Callable[Concatenate[int, P], None]: + def wrapper(x: int, /, *args: P.args, **kwargs: P.kwargs) -> None: + ... + + return wrapper + + +def func1(func: Callable[Concatenate[int, P], None]) -> Callable[P, None]: + ... + + +def func2(a: int, b: str, c: str) -> None: + ... + + +def func3(a: int, /, b: str, c: str) -> None: + ... + + +def func4(a: int, b: str, /, c: str) -> None: + ... + + +v1 = func1(func2) +reveal_type(v1, expected_text="(b: str, c: str) -> None") + +v2 = func1(func3) +reveal_type(v2, expected_text="(b: str, c: str) -> None") + +v3 = func1(func4) +reveal_type(v3, expected_text="(b: str, /, c: str) -> None") + + +def func5(__fn: Callable[P, R], *args: P.args, **kwargs: P.kwargs) -> R: + ... + + +def func6(name: str, *args: str): + ... + + +v5 = func5(func6, "a", "b", "c") + +# This should generate an error because 1 isn't assignable to str. +v6 = func5(func6, "a", "b", "c", 1) + + +def func7(name: str, **kwargs: str): + ... + + +v7 = func5(func7, "a", b="b", c="c") + +# This should generate an error because 1 isn't assignable to str. +v8 = func5(func7, "a", b="b", c=1) diff --git a/packages/pyright-internal/src/tests/samples/paramSpec5.py b/packages/pyright-internal/src/tests/samples/paramSpec5.py index fe1de9ad280b..de7413460bd7 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec5.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec5.py @@ -2,7 +2,7 @@ # callable type has keyword-only or positional-only # parameter separators. -from typing import Callable, Literal, ParamSpec, TypeVar +from typing import Callable, ParamSpec, TypeVar P = ParamSpec("P") @@ -18,7 +18,7 @@ def foo(*, value: str) -> None: bar = decorator(foo) -t1: Literal["(*, value: str) -> None"] = reveal_type(bar) +reveal_type(bar, expected_text="(*, value: str) -> None") def baz(value: str, /) -> None: @@ -26,4 +26,4 @@ def baz(value: str, /) -> None: qux = decorator(baz) -t2: Literal["(value: str, /) -> None"] = reveal_type(qux) +reveal_type(qux, expected_text="(value: str, /) -> None") diff --git a/packages/pyright-internal/src/tests/samples/paramSpec7.py b/packages/pyright-internal/src/tests/samples/paramSpec7.py index de1c5b4b315b..1a8778b9133d 100644 --- a/packages/pyright-internal/src/tests/samples/paramSpec7.py +++ b/packages/pyright-internal/src/tests/samples/paramSpec7.py @@ -1,7 +1,7 @@ # This sample tests the handling of a specialized function # used as an argument to a ParamSpec. -from typing import Callable, Generic, Literal, ParamSpec, TypeVar +from typing import Callable, Generic, ParamSpec, TypeVar T = TypeVar("T") P = ParamSpec("P") @@ -18,5 +18,5 @@ def qux(self, v: T) -> None: baz: Baz[int] = Baz() -t1: Literal["(v: int) -> None"] = reveal_type(baz.qux) -t2: Literal["(v: int) -> None"] = reveal_type(foo(baz.qux)) +reveal_type(baz.qux, expected_text="(v: int) -> None") +reveal_type(foo(baz.qux), expected_text="(v: int) -> None") diff --git a/packages/pyright-internal/src/tests/samples/parameters1.py b/packages/pyright-internal/src/tests/samples/parameters1.py new file mode 100644 index 000000000000..fde1c8d27d26 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/parameters1.py @@ -0,0 +1,12 @@ +# This sample tests the reportMissingParameterType check. + + +class A: + # This should generate an error if reportMissingParameterType is enabled + # because 'y' is missing a type annotation. + def method1(self, x: int, _, y) -> int: + ... + + def method2(self, x, y): + # type: (int, int) -> int + ... diff --git a/packages/pyright-internal/src/tests/samples/partial1.py b/packages/pyright-internal/src/tests/samples/partial1.py new file mode 100644 index 000000000000..151e894f2018 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/partial1.py @@ -0,0 +1,163 @@ +# This sample tests the functools.partial support. + +from functools import partial +from typing import TypeVar + +_T1 = TypeVar("_T1") + + +def func1(): + """func1""" + return 0 + + +p1_0 = partial(func1) + +reveal_type(p1_0(), expected_text="Literal[0]") + +# This should generate an error. +p1_0("") + +# This should generate an error. +p1_1 = partial(func1, "", "") + + +def func2(name: str, number: int) -> None: + """func2""" + pass + + +p2_0 = partial(func2) + +reveal_type(p2_0("", 3), expected_text="None") + +# This should generate an error. +p2_0() + +# This should generate an error. +p2_0("") + +# This should generate an error. +p2_0("", 3, 3) + +# This should generate an error. +p2_0("", 3, 3) + +p2_0("", number=3) + +# This should generate an error. +p2_0("", 3, number=3) + +p2_1 = partial(func2, "") + +# This should generate an error. +p2_1() + +p2_1(3) +p2_1(number=3) + +# This should generate an error. +p2_1(3, number=3) + +p2_2 = partial(func2, number=3) +p2_2("") + +p2_3 = partial(func2, number=3, name="") +p2_3() + + +def func3(name: str, /, number: int): + return 0 + + +p3_0 = partial(func3) + +reveal_type(p3_0("", 3), expected_text="Literal[0]") + +# This should generate an error. +p3_0(name="", number=3) + +p3_1 = partial(func3, "") + +p3_1(3) +p3_1(number=3) + + +def func4(name: str, *, number: int): + return 0 + + +p4_0 = partial(func4) + +# This should generate an error. +p4_0("", 3) + +p4_0("", number=3) + + +def func5(name: _T1, number: _T1) -> _T1: + return name + + +p5_0 = partial(func5) +reveal_type(p5_0(3, 3), expected_text="int") +reveal_type(p5_0("3", "3"), expected_text="str") + + +p5_1 = partial(func5, 2) + +p5_1(3) + +# This should generate an error. +p5_1("3") + + +def func6(a: int, name: _T1, number: _T1) -> _T1: + return name + + +p6_0 = partial(func6, 3, 4) + +reveal_type(p6_0(3), expected_text="int") + + +def func7(a: int, name: float, *args: str): + return 0 + + +p7_0 = partial(func7, 3, 3, "", "", "") +p7_0("", "") + +# This should generate an error. +p7_0(3) + +p7_1 = partial(func7) +p7_1(3, 0) +p7_1(3, 0, "", "") + +# This should generate an error. +p7_1(3, 0, foo=3) + + +def func8(a: int, name: str, **kwargs: int): + return 0 + + +p8_0 = partial(func8, 3, "") +p8_0() +p8_0(foo=3) + +# This should generate an error. +p8_0(foo="") + +p8_1 = partial(func8) +p8_1(3, "") + +# This should generate an error. +p8_1(3) + + +# This should generate an error. +p8_1(3, "", 5) + +p8_1(3, "", foo=4, bar=5) diff --git a/packages/pyright-internal/src/tests/samples/partial2.py b/packages/pyright-internal/src/tests/samples/partial2.py new file mode 100644 index 000000000000..efab0f675d0a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/partial2.py @@ -0,0 +1,31 @@ +# This sample tests the functools.partial support for keyword parameters. + +from functools import partial +from typing import Protocol + + +def func1(a: int, b: int, x: str) -> str: + return x + + +class Proto1(Protocol): + def __call__(self, x: str) -> str: + ... + + +func2: Proto1 = partial(func1, 3, 4, x="a") +func2() +func2(x="b") + + +class Proto2(Protocol): + def __call__(self, b: int) -> str: + ... + + +func3: Proto2 = partial(func1, 3, b=3, x="a") +func3() +func3(x="b") +func3(b=3) +func3(x="b", b=3) +func3(b=3, x="b") diff --git a/packages/pyright-internal/src/tests/samples/properties10.py b/packages/pyright-internal/src/tests/samples/properties10.py index 3ddcbb01f144..fa1a969fff75 100644 --- a/packages/pyright-internal/src/tests/samples/properties10.py +++ b/packages/pyright-internal/src/tests/samples/properties10.py @@ -1,8 +1,6 @@ # This sample tests the case where properties are unannotated, # and the type needs to be determined via inference. -from typing import Literal - class C: def __init__(self): @@ -18,4 +16,4 @@ def x(self, value): c = C() -t1: Literal["Unknown | None"] = reveal_type(c.x) +reveal_type(c.x, expected_text="Unknown | None") diff --git a/packages/pyright-internal/src/tests/samples/properties11.py b/packages/pyright-internal/src/tests/samples/properties11.py index 0a5242953792..3223ffb9c739 100644 --- a/packages/pyright-internal/src/tests/samples/properties11.py +++ b/packages/pyright-internal/src/tests/samples/properties11.py @@ -2,7 +2,7 @@ # are supported in Python 3.9 and newer. -from typing import Literal, Type, TypeVar +from typing import Type, TypeVar class Class1: @@ -17,9 +17,9 @@ def prop1(cls, value: str): pass -t1: Literal["str"] = reveal_type(Class1.prop1) +reveal_type(Class1.prop1, expected_text="str") -t2: Literal["str"] = reveal_type(Class1().prop1) +reveal_type(Class1().prop1, expected_text="str") Class1.prop1 = "hi" @@ -41,5 +41,5 @@ class Class3(Class2): ... -t3: Literal["Type[Class2]"] = reveal_type(Class2.prop1) -t4: Literal["Type[Class3]"] = reveal_type(Class3.prop1) +reveal_type(Class2.prop1, expected_text="Type[Class2]") +reveal_type(Class3.prop1, expected_text="Type[Class3]") diff --git a/packages/pyright-internal/src/tests/samples/properties13.py b/packages/pyright-internal/src/tests/samples/properties13.py new file mode 100644 index 000000000000..06fcc0bf6e22 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/properties13.py @@ -0,0 +1,15 @@ +# This sample tests the case where a property is defined on a metaclass. + + +class MyMeta(type): + @property + def something(cls) -> "Base": + return Base(1234) + + +class Base(metaclass=MyMeta): + def __new__(cls, arg) -> "Base": + ... + + +reveal_type(Base.something, expected_text="Base") diff --git a/packages/pyright-internal/src/tests/samples/properties3.py b/packages/pyright-internal/src/tests/samples/properties3.py index 2ffa90b2a542..708ddb951ae7 100644 --- a/packages/pyright-internal/src/tests/samples/properties3.py +++ b/packages/pyright-internal/src/tests/samples/properties3.py @@ -2,16 +2,19 @@ # custom subclasses of property. -class custom_property(property): +from typing import Any, Callable + + +class custom_property1(property): pass -class Custom(object): - @custom_property +class Custom1(object): + @custom_property1 def x(self) -> int: return 3 - @custom_property + @custom_property1 def y(self) -> float: return 3.5 @@ -24,28 +27,67 @@ def y(self): pass -m = Custom() +m1 = Custom1() -a: int = m.x +a1: int = m1.x # This should generate an error because m.x is # an int and cannot be assigned to str. -b: str = m.x +b1: str = m1.x -c: float = m.y +c1: float = m1.y # This should generate an error because m.y is # a float and cannot be assigned to int. -d: int = m.y +d1: int = m1.y # This should generate an error because there # is no setter for x. -m.x = 4 +m1.x = 4 -m.y = 4 +m1.y = 4 # This should generate an error because there is # no deleter for x. -del m.x +del m1.x + +del m1.y + + +class custom_property2(property): + _custom_func: Callable[..., Any] | None + + def custom_function(self, _custom_func: Callable[..., Any]): + self._custom_func = _custom_func + return self + + +class Custom2(object): + @custom_property2 + def x(self) -> int: + return 3 + + @custom_property2 + def y(self) -> float: + return 3.5 + + @y.setter + def y(self, val: float): + pass + + @y.deleter + def y(self): + pass + + @y.custom_function + def y(self): + pass + + +m2 = Custom2() + +a2 = m2.y +reveal_type(a2, expected_text="float") -del m.y +m2.y = 4 +del m2.y diff --git a/packages/pyright-internal/src/tests/samples/properties4.py b/packages/pyright-internal/src/tests/samples/properties4.py index c993c163938d..3d73861f9773 100644 --- a/packages/pyright-internal/src/tests/samples/properties4.py +++ b/packages/pyright-internal/src/tests/samples/properties4.py @@ -1,7 +1,7 @@ # This sample tests the handling of a property that's defined # with a generic type for the "self" parameter. -from typing import Literal, TypeVar +from typing import TypeVar _P = TypeVar("_P", bound=str) @@ -14,4 +14,4 @@ def parent(self: _P) -> _P: p = Foo().parent -t1: Literal["Foo"] = reveal_type(p) +reveal_type(p, expected_text="Foo") diff --git a/packages/pyright-internal/src/tests/samples/properties9.py b/packages/pyright-internal/src/tests/samples/properties9.py index 5384d2d1cbda..98626eb11f70 100644 --- a/packages/pyright-internal/src/tests/samples/properties9.py +++ b/packages/pyright-internal/src/tests/samples/properties9.py @@ -1,6 +1,6 @@ # This sample verifies the case where a property returns a callable -from typing import Callable, Literal +from typing import Callable class ClassA: @@ -20,5 +20,5 @@ def str_to_int(arg: str, base: int) -> int: val1: int = obj.converter("123", 10) val2: int = obj.converter_prop("123", 10) -t1: Literal["(_p0: str, _p1: int) -> int"] = reveal_type(obj.converter) -t2: Literal["(_p0: str, _p1: int) -> int"] = reveal_type(obj.converter_prop) +reveal_type(obj.converter, expected_text="(str, int) -> int") +reveal_type(obj.converter_prop, expected_text="(str, int) -> int") diff --git a/packages/pyright-internal/src/tests/samples/protocol1.py b/packages/pyright-internal/src/tests/samples/protocol1.py index bdda51098874..5c1b89f78cc5 100644 --- a/packages/pyright-internal/src/tests/samples/protocol1.py +++ b/packages/pyright-internal/src/tests/samples/protocol1.py @@ -1,6 +1,6 @@ # This sample tests the type checker's handling of generic protocol types. -from typing import List, TypeVar, Protocol +from typing import List, Optional, TypeVar, Protocol T = TypeVar("T") T_co = TypeVar("T_co", covariant=True) @@ -74,3 +74,20 @@ class NotProto2: # This should generate an error because "Protocol" cannot be used # as a type argument. var2: List[Protocol] = [] + + +class Abstract1(Protocol[T_contra]): + def do(self, x: Optional[T_contra]): + ... + + +class Concrete1: + def do(self, x: Optional[int]): + pass + + +def use_protocol1(a: Abstract1[int]): + a.do(1) + + +use_protocol1(Concrete1()) diff --git a/packages/pyright-internal/src/tests/samples/protocol15.py b/packages/pyright-internal/src/tests/samples/protocol15.py index d08ee3f78a7d..9cea6e6f9c22 100644 --- a/packages/pyright-internal/src/tests/samples/protocol15.py +++ b/packages/pyright-internal/src/tests/samples/protocol15.py @@ -1,7 +1,7 @@ -# This sample tests the handling of protocols with properties that -# make use of generics. +# This sample tests the handling of protocols with properties and +# methods that make use of generics. -from typing import Protocol, TypeVar +from typing import Callable, Protocol, TypeVar T = TypeVar("T") @@ -11,11 +11,17 @@ class Proto(Protocol): def f(self: T) -> T: ... + def m(self, item: T, callback: Callable[[T], str]) -> str: + ... + class Concrete: @property def f(self) -> "Concrete": return self + def m(self, item: T, callback: Callable[[T], str]) -> str: + ... + x: Proto = Concrete() diff --git a/packages/pyright-internal/src/tests/samples/protocol17.py b/packages/pyright-internal/src/tests/samples/protocol17.py index eec3ecc19145..1120202840dc 100644 --- a/packages/pyright-internal/src/tests/samples/protocol17.py +++ b/packages/pyright-internal/src/tests/samples/protocol17.py @@ -1,6 +1,7 @@ # This sample tests for generic protocol variance consistency. from typing import Protocol, TypeVar, Union +from typing_extensions import ParamSpec # pyright: strict @@ -76,3 +77,12 @@ def m1(self) -> _T1: def m2(self, p1: _T1) -> None: pass + + +P = ParamSpec("P") +R = TypeVar("R", covariant=True) + + +class Callback(Protocol[P, R]): + def __call__(self, *args: P.args, **kwargs: P.kwargs) -> R: + ... diff --git a/packages/pyright-internal/src/tests/samples/protocol20.py b/packages/pyright-internal/src/tests/samples/protocol20.py index 1e722a23d365..c78b0eb085f0 100644 --- a/packages/pyright-internal/src/tests/samples/protocol20.py +++ b/packages/pyright-internal/src/tests/samples/protocol20.py @@ -1,7 +1,7 @@ # This sample tests the case where a TypeVar is bound to a # protocol class. -from typing import Literal, Protocol, Type, TypeVar +from typing import Protocol, Type, TypeVar class ClsProtocol(Protocol): @@ -18,5 +18,5 @@ def test(cls: Type[T1]) -> T1: return cls() -t1: Literal["Sample"] = reveal_type(Sample.test()) -t2: Literal["Sample"] = reveal_type(Sample().test()) +reveal_type(Sample.test(), expected_text="Sample") +reveal_type(Sample().test(), expected_text="Sample") diff --git a/packages/pyright-internal/src/tests/samples/protocol21.py b/packages/pyright-internal/src/tests/samples/protocol21.py new file mode 100644 index 000000000000..a850fa0efe3a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol21.py @@ -0,0 +1,26 @@ +# This sample tests the handling of protocol classes that define properties +# to indicate a read-only attribute. It also tests that a member access through +# a protocol class (not an instance) is flagged as an error. + +from typing import Protocol, Type + + +class A(Protocol): + @property + def name(self) -> str: + ... + + +class B: + name: str + + +def do_something(a: A, class_a: Type[A]) -> None: + val1 = a.name + reveal_type(val1, expected_text="str") + + # This should generate an error because accesses to + # properties from a protocol class are not allowed. + val2 = class_a.name + + val3: A = B() diff --git a/packages/pyright-internal/src/tests/samples/protocol22.py b/packages/pyright-internal/src/tests/samples/protocol22.py new file mode 100644 index 000000000000..591b8f9570c5 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol22.py @@ -0,0 +1,43 @@ +# This sample tests that a type variable existing in a union type +# of multiple type variables is treated as covariant with the +# union type, thus affecting the variance restriction. + +from typing import Protocol, Tuple, TypeVar, Union + +# pyright: strict + +_T1 = TypeVar("_T1") +_T1_co = TypeVar("_T1_co", covariant=True) +_T1_contra = TypeVar("_T1_contra", contravariant=True) + +_T2 = TypeVar("_T2") +_T2_co = TypeVar("_T2_co", covariant=True) +_T2_contra = TypeVar("_T2_contra", contravariant=True) + + +# This is right, as `_T1_co` and `_T2_co` are only covariant with +# return type. +class P1(Protocol[_T1_co, _T2_co]): + def m1(self) -> Union[_T1_co, _T2_co]: + ... + + +# This is right, as `_T1_contra` and `_T2_contra` are only covariant +# with the argument type. +class P2(Protocol[_T1_contra, _T2_contra]): + def m1(self, a: Union[_T1_contra, _T2_contra]) -> None: + ... + + +# This is right, as `_T1` and `_T2` are both covariant with the +# argument type and the return type. +class P3(Protocol[_T1, _T2]): + def m1(self, a: _T1, b: _T2) -> Union[_T1, _T2]: + ... + + +# This is right, as `_T1` and `_T2` are both covariant with the +# argument type and the return type. +class P4(Protocol[_T1, _T2]): + def m2(self, a: Union[_T1, _T2]) -> Tuple[_T1, _T2]: + ... diff --git a/packages/pyright-internal/src/tests/samples/protocol23.py b/packages/pyright-internal/src/tests/samples/protocol23.py new file mode 100644 index 000000000000..20aecc8d8f11 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol23.py @@ -0,0 +1,41 @@ +# This sample tests the provision in PEP 544 that a protocol class +# can't be assigned to Type[Proto]. + +from abc import abstractmethod +from typing import Protocol, Type + + +class Proto(Protocol): + @abstractmethod + def meth(self) -> int: + ... + + +class Concrete: + def meth(self) -> int: + return 42 + + +def func1(cls: Type[Proto]) -> int: + return cls().meth() + + +func1(Concrete) + +# This should generate an error because Proto is a protocol class, +# not a concrete class type that implements the protocol. +func1(Proto) + +val1: Type[Proto] +val1 = Concrete +val1().meth() + +# This should generate an error because Proto is a protocol class. +val1 = Proto + + +def func2() -> Type[Proto]: + ... + + +val1 = func2() diff --git a/packages/pyright-internal/src/tests/samples/protocol24.py b/packages/pyright-internal/src/tests/samples/protocol24.py new file mode 100644 index 000000000000..337bee5116f1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol24.py @@ -0,0 +1,74 @@ +# This sample tests the provision in PEP 544 where a class type can +# be assigned to a protocol. + +from typing import Any, Protocol + + +class ProtoA(Protocol): + def meth(_self, x: int) -> int: + ... + + +class ProtoB(Protocol): + def meth(_self, self: Any, x: int) -> int: + ... + + +class C: + def meth(self, x: int) -> int: + ... + + +# This should generate an error because C.meth isn't compatible +# with ProtoA().meth. +a: ProtoA = C + +b: ProtoB = C + + +class ProtoD(Protocol): + var1: int + + @property + def var2(self) -> str: + ... + + +class E: + var1: int + var2: str + + +class F: + var1: int + var2: int + + +d: ProtoD = E + +# This should generate an error because var2 is the wrong type. +e: ProtoD = F + + +class Jumps(Protocol): + def jump(self) -> int: + ... + + +class Jumper1: + @classmethod + def jump(cls) -> int: + ... + + +class Jumper2: + def jump(self) -> int: + ... + + +def do_jump(j: Jumps): + print(j.jump()) + + +do_jump(Jumper1) +do_jump(Jumper2()) diff --git a/packages/pyright-internal/src/tests/samples/protocol25.py b/packages/pyright-internal/src/tests/samples/protocol25.py new file mode 100644 index 000000000000..7c04b4ed36c1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol25.py @@ -0,0 +1,31 @@ +# This sample tests the special-case handling for __slots__ and +# __class_getitem__ during protocol matching. + + +from typing import Any, Final, Iterable, Protocol + + +class B: + ... + + +class C: + def __class_getitem__(cls, __item: Any) -> Any: + ... + + +class SupportsClassGetItem(Protocol): + __slots__: str | Iterable[str] = () + + def __class_getitem__(cls, __item: Any) -> Any: + ... + + +b1: SupportsClassGetItem = B() # OK (missing __class_getitem__ is ignored) +c1: SupportsClassGetItem = C() # OK + + +# This should generate an error because __class_getitem__ is not exempt +# when performing class object protocol matching. +b2: SupportsClassGetItem = B # Error +c2: SupportsClassGetItem = C # OK diff --git a/packages/pyright-internal/src/tests/samples/protocol26.py b/packages/pyright-internal/src/tests/samples/protocol26.py new file mode 100644 index 000000000000..3d1b8f6732fe --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol26.py @@ -0,0 +1,40 @@ +# This sample tests protocol class assignment in a case that involves tricky +# recursion. + +from __future__ import annotations +from typing import Protocol, Sequence, TypeVar, Union, overload + +_T_co = TypeVar("_T_co", covariant=True) + + +class SupportsIndex(Protocol): + def __index__(self) -> int: + ... + + +class TupleLike(Sequence[_T_co]): + @overload + def __getitem__(self, __x: SupportsIndex) -> _T_co: + ... + + @overload + def __getitem__(self, __x: slice) -> TupleLike[_T_co]: + ... + + def __getitem__(self, __x: slice | SupportsIndex) -> _T_co | TupleLike[_T_co]: + ... + + +class NestedSequence(Protocol[_T_co]): + @overload + def __getitem__(self, index: int, /) -> _T_co | NestedSequence[_T_co]: + ... + + @overload + def __getitem__(self, index: slice, /) -> NestedSequence[_T_co]: + ... + + +def func(t: TupleLike[int]): + x: Union[int, NestedSequence[int]] = t + y: NestedSequence[int] = t diff --git a/packages/pyright-internal/src/tests/samples/protocol27.py b/packages/pyright-internal/src/tests/samples/protocol27.py new file mode 100644 index 000000000000..9c2a14b23fac --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/protocol27.py @@ -0,0 +1,76 @@ +# This sample tests the logic that validates that a concrete class that +# explicitly derives from a protocol class implements the variables +# and functions defined in the protocol. + +from typing import ClassVar, Protocol + + +class Protocol1(Protocol): + cm1: ClassVar[int] + cm2: ClassVar[int] = 0 + + im1: int + im2: int = 2 + im3: int + + def __init__(self): + self.im3 = 3 + + +class Protocol2(Protocol): + cm10: int + + +class Protocol3(Protocol2, Protocol): + cm11: int + + +# This should generate an error. +class Concrete1(Protocol1): + ... + + +class Concrete2(Protocol1): + cm1 = 3 + im1 = 0 + + +# This should generate an error. +class Concrete3(Protocol1, Protocol3): + cm1 = 3 + + def __init__(self): + im1 = 0 + + +class Concrete4(Protocol1, Protocol3): + cm1 = 3 + cm10 = 3 + + def __init__(self): + self.im1 = 3 + self.im10 = 10 + self.cm11 = 3 + + +class Protocol5(Protocol): + def foo(self) -> int: + ... + + +# This should generate an error because "foo" is +# not implemented. +class Concrete5(Protocol5): + pass + + +class Protocol6(Protocol): + x: int + + +class Mixin: + x = 3 + + +class Concrete6(Mixin, Protocol6): + pass diff --git a/packages/pyright-internal/src/tests/samples/protocolModule2.py b/packages/pyright-internal/src/tests/samples/protocolModule2.py index f5990cc3fc92..bbf8c7df2565 100644 --- a/packages/pyright-internal/src/tests/samples/protocolModule2.py +++ b/packages/pyright-internal/src/tests/samples/protocolModule2.py @@ -1,6 +1,6 @@ # This sample tests protocol matching for modules. -from typing import Literal, Protocol, Type, TypeVar, Union, runtime_checkable +from typing import Protocol, Type, TypeVar, Union, runtime_checkable from . import protocolModule1 import datetime from importlib import import_module @@ -63,21 +63,21 @@ class NonProtocol: # Test type narrowing of module symbols for isinstance checks. def func1(x: Type[_T]): if isinstance(datetime, (P1, P2, NonProtocol, x)): - t1: Literal["P1 | P2 | _T@func1"] = reveal_type(datetime) + reveal_type(datetime, expected_text="P1 | P2 | _T@func1") else: - t2: Literal['Module("datetime")'] = reveal_type(datetime) + reveal_type(datetime, expected_text='Module("datetime")') def func2(): if not isinstance(datetime, P1): - t1: Literal['Module("datetime")'] = reveal_type(datetime) + reveal_type(datetime, expected_text='Module("datetime")') else: - t2: Literal["P1"] = reveal_type(datetime) + reveal_type(datetime, expected_text="P1") def func3(): my_module = import_module("my_module") if isinstance(my_module, (P1, NonProtocol)): - t1: Literal["P1"] = reveal_type(my_module) + reveal_type(my_module, expected_text="P1") else: - t2: Literal["ModuleType"] = reveal_type(my_module) + reveal_type(my_module, expected_text="ModuleType") diff --git a/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py b/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py new file mode 100644 index 000000000000..2b8772df1dfe --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/pseudoGeneric1.py @@ -0,0 +1,20 @@ +# This sample tests type checking scenarios related to "pseudo generic" +# classes - those whose constructors are unannotated. + +from typing import List + +_DEFAULT_VALUE = object() + + +class MyClass(object): + def __init__( + self, + name, + description=_DEFAULT_VALUE, + ): + ... + + +x: List[MyClass] = [MyClass("a", description="b")] +y: List[MyClass] = [MyClass("c")] +z: List[MyClass] = x + y diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py new file mode 100644 index 000000000000..d13283f480fc --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias1.py @@ -0,0 +1,79 @@ +# This sample tests Pyright's handling of recursive type aliases. + +from typing import Dict, List, Mapping, TypeVar, Union + +MyTree = List[Union["MyTree", int]] + +t1: MyTree = [1, 2, 3, [3, 4], [[3], 5]] + +# This should generate an error because a str is not allowed. +t2: MyTree = [3, ""] + +# This should generate an error because a str is not allowed. +t3: MyTree = [1, 2, 3, [3, 4], [3, 4, 5, [3, "4"]]] + +_T = TypeVar("_T") +GenericUnion = Union[int, _T] + +i1: GenericUnion[str] = "hi" +i1 = 3 + +i2: GenericUnion[float] = 3 +# This should generate an error because str isn't compatible. +i2 = "hi" + +Foo = Union[bool, List["Foo"], Dict["Foo", "Foo"]] + +bar1: Foo = [True, [True, False]] +bar2: Foo = [True, [True], {True: False}] +bar3: Foo = {[True]: False} +bar4: Foo = {True: [False]} + +# These should generate errors. +baz1: Foo = [True, ["True", False]] +baz2: Foo = [True, [True], {True: "False"}] +baz3: Foo = {["True"]: False} +baz4: Foo = {True: ["False"]} + +Json = Union[None, int, str, float, List["Json"], Dict[str, "Json"]] + +# This should generate an error +a1: Json = {"a": 1, "b": 3j} + +# This should generate an error +a2: Json = [2, 3j] + +RecursiveTuple = Union[str | int, tuple["RecursiveTuple", ...]] + + +b1: RecursiveTuple = (1, 1) +b2: RecursiveTuple = (1, "1") +b3: RecursiveTuple = (1, "1", 1, "2") +b4: RecursiveTuple = (1, ("1", 1), "2") +b5: RecursiveTuple = (1, ("1", 1), (1, (1, 2))) + +# This should generate an error +b6: RecursiveTuple = (1, ("1", 1), (1, (1, [2]))) + +# This should generate an error +b6: RecursiveTuple = (1, [1]) + + +RecursiveMapping = Union[str, int, Mapping[str, "RecursiveMapping"]] + + +c1: RecursiveMapping = 1 +c2: RecursiveMapping = "1" +c3: RecursiveMapping = {"1": "1"} +c4: RecursiveMapping = {"1": "1", "2": 1} +c5: RecursiveMapping = {"1": "1", "2": 1, "3": {}} +c6: RecursiveMapping = {"1": "1", "2": 1, "3": {"0": "0", "1": "2", "2": {}}} + +# This should generate an error. +c7: RecursiveMapping = {"1": [1]} + +# This should generate an error. +c8: RecursiveMapping = {"1": "1", "2": 1, "3": [1, 2]} + +# This should generate an error. +c9: RecursiveMapping = {"1": "1", "2": 1, "3": {"0": "0", "1": 1, "2": [1, 2, 3]}} diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py new file mode 100644 index 000000000000..82fe7653ff7e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias2.py @@ -0,0 +1,32 @@ +# This sample tests Pyright's handling of recursive type aliases +# that are also generic. + +from typing import List, TypeVar, Union + +_T1 = TypeVar("_T1", str, int) +_T2 = TypeVar("_T2") + +GenericTypeAlias1 = List[Union["GenericTypeAlias1[_T1]", _T1]] + +SpecializedTypeAlias1 = GenericTypeAlias1[str] + +a1: SpecializedTypeAlias1 = ["hi", ["hi", "hi"]] + +# This should generate an error because int doesn't match the +# constraint of the TypeVar _T1. +SpecializedClass2 = GenericTypeAlias1[float] + +b1: GenericTypeAlias1[str] = ["hi", "bye", [""], [["hi"]]] + +# This should generate an error. +b2: GenericTypeAlias1[str] = ["hi", [2.4]] + + +GenericTypeAlias2 = List[Union["GenericTypeAlias2[_T1, _T2]", _T1, _T2]] + +c2: GenericTypeAlias2[str, int] = [[3, ["hi"]], "hi"] + +c3: GenericTypeAlias2[str, float] = [[3, ["hi", 3.4, [3.4]]], "hi"] + +# This should generate an error because a float is a type mismatch. +c4: GenericTypeAlias2[str, int] = [[3, ["hi", 3, [3.4]]], "hi"] diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py new file mode 100644 index 000000000000..723574825680 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias3.py @@ -0,0 +1,26 @@ +# This sample tests Pyright's handling of recursive type aliases. + +from typing import List, TypeAlias, Union + +# This should generate an error because the forward reference +# type needs to be in quotes. +GenericClass0 = List[Union[GenericClass0, int]] + +# This should generate an error because the type alias directly +# refers to itself. +RecursiveUnion = Union["RecursiveUnion", int] + +a1: RecursiveUnion = 3 + +# This should generate an error because the type alias refers +# to itself through a mutually-referential type alias. +MutualReference1 = Union["MutualReference2", int] +MutualReference2 = Union["MutualReference1", str] + +# This should generate an error because the type alias refers +# to itself. +MutualReference3: TypeAlias = "MutualReference3" + + +RecursiveType: TypeAlias = list[Union[str, "RecursiveType"]] +reveal_type(RecursiveType, expected_text="Type[list[str | RecursiveType]]") diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py new file mode 100644 index 000000000000..e51b501c53f8 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias4.py @@ -0,0 +1,55 @@ +# This sample tests the handling of complex recursive types. + +# pyright: strict, reportUnusedVariable=false + +from typing import Dict, List, Optional, Union + + +JSONArray = List["JSONType"] +JSONObject = Dict[str, "JSONType"] + +JSONPrimitive = Union[str, float, int, bool, None] +JSONStructured = Union[JSONArray, JSONObject] + +JSONType = Union[JSONPrimitive, JSONStructured] + + +# Using type alias checking for list: +def f2(args: JSONStructured): + if isinstance(args, List): + reveal_type( + args, + expected_text="List[str | float | int | bool | JSONArray | Dict[str, JSONType] | None]", + ) + else: + reveal_type( + args, + expected_text="Dict[str, str | float | int | bool | List[JSONType] | JSONObject | None]", + ) + dargs: JSONObject = args + + +# Using type alias checking for dict: +def f3(args: JSONStructured): + if isinstance(args, Dict): + reveal_type( + args, + expected_text="Dict[str, str | float | int | bool | List[JSONType] | JSONObject | None]", + ) + else: + reveal_type( + args, + expected_text="List[str | float | int | bool | JSONArray | Dict[str, JSONType] | None]", + ) + largs: JSONArray = args + + +# Using type alias for "is None" narrowing: +LinkedList = Optional[tuple[int, "LinkedList"]] + + +def g(xs: LinkedList): + while xs is not None: + x, rest = xs + yield x + xs = rest diff --git a/packages/pyright-internal/src/tests/samples/typeAlias13.pyi b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias5.pyi similarity index 100% rename from packages/pyright-internal/src/tests/samples/typeAlias13.pyi rename to packages/pyright-internal/src/tests/samples/recursiveTypeAlias5.pyi diff --git a/packages/pyright-internal/src/tests/samples/typeAlias15.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias6.py similarity index 100% rename from packages/pyright-internal/src/tests/samples/typeAlias15.py rename to packages/pyright-internal/src/tests/samples/recursiveTypeAlias6.py diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py new file mode 100644 index 000000000000..77ecc936f5ec --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias7.py @@ -0,0 +1,15 @@ +# This sample tests a recursive type alias used within +# a recursive function. + +from typing import Dict, Union + + +A = Union[str, Dict[str, "A"]] + + +def foo(x: A): + if isinstance(x, str): + print(x) + else: + for _, v in x.items(): + foo(v) diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py new file mode 100644 index 000000000000..c20b7b8ec5aa --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias8.py @@ -0,0 +1,38 @@ +# This sample tests the case where a recursive type alias is used +# to define a TypedDict that refers to itself in one of its fields. + +from __future__ import annotations + +from typing import Union, TypedDict, List + + +class _FooOptional(TypedDict, total=False): + options: List[AllBar] + type: int + + +class Foo(_FooOptional): + id: int + name: str + + +class BarA(TypedDict): + type: int + + +class BarB(TypedDict): + options: List[AllBar] + type: int + + +AllBar = Union[BarA, BarB] + + +def foo(a: AllBar): + reveal_type(a, expected_text="BarA | BarB") + options = a.get("options", []) + reveal_type(options, expected_text="Any | List[BarA | BarB]") + + for option in options: + reveal_type(option, expected_text="Any | BarA | BarB") + reveal_type(option["type"], expected_text="Any | int") diff --git a/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py new file mode 100644 index 000000000000..3223af60107a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/recursiveTypeAlias9.py @@ -0,0 +1,22 @@ +# This sample tests that recursive type aliases work well with +# a generic dataclass constructor. + +from dataclasses import dataclass +from typing import Union, Generic, TypeVar + +A = TypeVar("A") +JSON = Union[str, dict[str, "JSON"]] + + +@dataclass +class Example(Generic[A]): + val: A + + +a: JSON = {"a": "b"} +b: JSON = "a" +c: Example[JSON] = Example(a) +d: Example[JSON] = Example("a") +e: Example[JSON] = Example({}) +f: Example[JSON] = Example({"a": "b"}) +g: Example[JSON] = Example({"a": {"a": "b"}}) diff --git a/packages/pyright-internal/src/tests/samples/required3.py b/packages/pyright-internal/src/tests/samples/required3.py new file mode 100644 index 000000000000..f2d6a5a819ac --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/required3.py @@ -0,0 +1,22 @@ +# This sample tests the handling of Required and NotRequired using +# the alternative syntax form of TypedDict. + +from typing import TypedDict +from typing_extensions import Required, NotRequired + +Example1 = TypedDict( + "Example", {"required": Required[int], "not_required": NotRequired[int]} +) + +v1_0: Example1 = {"required": 1} + +# This should generage an error. +v1_1: Example1 = {"not_required": 1} + +Example2 = TypedDict("Example", required=Required[int], not_required=NotRequired[int]) + + +v2_0: Example2 = {"required": 1} + +# This should generage an error. +v2_1: Example2 = {"not_required": 1} diff --git a/packages/pyright-internal/src/tests/samples/revealedType1.py b/packages/pyright-internal/src/tests/samples/revealedType1.py index 8a501f267152..44418d92544e 100644 --- a/packages/pyright-internal/src/tests/samples/revealedType1.py +++ b/packages/pyright-internal/src/tests/samples/revealedType1.py @@ -1,6 +1,6 @@ # This sample tests the special reveal_type call. -from typing import Union +from typing import Literal, Union a: Union[str, int] if 2 + 3: @@ -14,3 +14,17 @@ a = "yup" reveal_type(a) + + +reveal_type(a, expected_type=Literal["yup"]) +reveal_type(a, expected_text="Literal['yup']") +reveal_type(a, expected_text="Literal['yup']", expected_type=Literal["yup"]) + +# This should generate an error. +reveal_type() + +# This should generate an error. +reveal_type(a, a) + + +reveal_type(a, x=3) diff --git a/packages/pyright-internal/src/tests/samples/self1.py b/packages/pyright-internal/src/tests/samples/self1.py new file mode 100644 index 000000000000..63cb93d39718 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/self1.py @@ -0,0 +1,84 @@ +# This sample tests various error conditions for the Self type + +from typing import Generic, Type, TypeVar +from typing_extensions import Self + + +T = TypeVar("T") + +# This should generate an error because Self can't be used in this context. +class A(Self): + ... + + +# This should generate an error because Self can't be used in this context. +x: Self + + +def func1() -> None: + # This should generate an error because Self can't be used in this context. + x: Self + + +# This should generate an error because Self can't be used in this context. +def func2(a: Self) -> None: + ... + + +# This should generate an error because Self can't be used in this context. +def func3() -> Self: + ... + + +class B: + x: Self + + def method1(self) -> Self: + return self + + def method2(self, a: Self) -> None: + x: Self = a + y = Self + + def method3(self: Self) -> Self: + # This should generate an error because Self doesn't accept a type arg. + y: Self[int] + return self + + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + def method4(self: T, a: Self) -> T: + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + x: Self + + return self + + @classmethod + def method5(cls) -> Type[Self]: + return cls + + @classmethod + def method6(cls, a: Self) -> None: + ... + + @classmethod + def method7(cls: Type[Self]) -> Type[Self]: + return cls + + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + @classmethod + def method8(cls: Type[T], a: Self) -> Type[T]: + # This should generate an error because Self can't be used with + # methods that declare a non-Self type for "self". + x: Self + return cls + + # This should generate an error because Self can't be used in + # a static method. + @staticmethod + def stat_method1(a: Self) -> None: + # This should generate an error because Self can't be used in + # a static method. + x: Self diff --git a/packages/pyright-internal/src/tests/samples/self2.py b/packages/pyright-internal/src/tests/samples/self2.py new file mode 100644 index 000000000000..91e27d2d4a60 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/self2.py @@ -0,0 +1,195 @@ +# This sample tests the usage of the Self type. + +from typing import Callable, Dict, Generic, ParamSpec, Protocol, Type, TypeVar +from typing_extensions import Self +from dataclasses import dataclass + + +_P = ParamSpec("_P") +_R = TypeVar("_R") + + +class A(Generic[_P, _R]): + val: _R + + def __init__(self, callback: Callable[_P, _R]) -> None: + self.callback = callback + + def method1(self: Self) -> Self: + return self + + def method2(self) -> Self: + return self + + @classmethod + def method3(cls: Type[Self]) -> Type[Self]: + return cls + + @classmethod + def method4(cls) -> Type[Self]: + return cls + + +_T = TypeVar("_T") + + +class B(Generic[_T]): + def __init__(self, value: _T): + self.value = value + + +class Shape1: + def set_scale(self, scale: float) -> Self: + self.scale = scale + return self + + @classmethod + def from_config(cls, config: Dict[str, float]) -> Self: + return cls() + + +class Circle1(Shape1): + ... + + +x1 = Shape1().set_scale(3.4) +reveal_type(x1, expected_text="Shape1") + +x2 = Circle1().set_scale(3.4) +reveal_type(x2, expected_text="Circle1") + + +class Shape2: + def set_scale(self: Self, scale: float) -> Self: + self.scale = scale + return self + + @classmethod + def from_config(cls: Type[Self], config: Dict[str, float]) -> Self: + return cls() + + def difference(self: Self, other: Self) -> float: + ... + + def apply(self: Self, f: Callable[[Self], None]) -> None: + ... + + +class Circle2(Shape2): + ... + + +s2 = Shape2() +x3 = s2.set_scale(3.4) +reveal_type(x3, expected_text="Shape2") + +c2 = Circle2() +x4 = c2.set_scale(3.4) +reveal_type(x4, expected_text="Circle2") + +c2.difference(c2) +s2.difference(c2) +s2.difference(s2) + +# This should generate an error. +c2.difference(s2) + + +@dataclass +class LinkedList(Generic[_T]): + value: _T + next: Self | None = None + + +LinkedList[int](value=1, next=LinkedList[int](value=2)) + + +@dataclass +class OrdinalLinkedList(LinkedList[int]): + def ordinal_value(self) -> str: + return str(self.value) + + +# This should generate an error. +xs = OrdinalLinkedList(value=1, next=LinkedList[int](value=2)) + +if xs.next is not None: + xs.next = OrdinalLinkedList(value=3, next=None) + + # This should generate an error. + xs.next = LinkedList[int](value=3, next=None) + + +class Container(Generic[_T]): + value: _T + + def set_value(self, value: _T) -> Self: + ... + + +def object_with_concrete_type( + int_container: Container[int], str_container: Container[str] +) -> None: + reveal_type(int_container.set_value(0), expected_text="Container[int]") + reveal_type(str_container.set_value(""), expected_text="Container[str]") + + +def object_with_generic_type(container: Container[_T], value: _T) -> Container[_T]: + return container.set_value(value) + + +class ShapeProtocol(Protocol): + def set_scale(self, scale: float) -> Self: + ... + + +class ReturnSelf: + scale: float = 1.0 + + def set_scale(self, scale: float) -> Self: + self.scale = scale + return self + + +class ReturnConcreteShape: + scale: float = 1.0 + + def set_scale(self, scale: float) -> "ReturnConcreteShape": + self.scale = scale + return self + + +class BadReturnType: + scale: float = 1.0 + + def set_scale(self, scale: float) -> int: + self.scale = scale + return 42 + + +class ReturnDifferentClass: + scale: float = 1.0 + + def set_scale(self, scale: float) -> ReturnConcreteShape: + return ReturnConcreteShape() + + +def accepts_shape(shape: ShapeProtocol) -> None: + y = shape.set_scale(0.5) + reveal_type(y) + + +def main( + return_self_shape: ReturnSelf, + return_concrete_shape: ReturnConcreteShape, + bad_return_type: BadReturnType, + return_different_class: ReturnDifferentClass, +) -> None: + accepts_shape(return_self_shape) + accepts_shape(return_concrete_shape) + + # This should generate an error. + accepts_shape(bad_return_type) + + # This should generate an error. + accepts_shape(return_different_class) diff --git a/packages/pyright-internal/src/tests/samples/self3.py b/packages/pyright-internal/src/tests/samples/self3.py new file mode 100644 index 000000000000..c6a705247fd7 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/self3.py @@ -0,0 +1,9 @@ +# This sample tests the special-case handling of Self when comparing +# two functions whose signatures differ only in the Self scope. + + +class SomeClass: + def __str__(self) -> str: + ... + + __repr__ = __str__ diff --git a/packages/pyright-internal/src/tests/samples/self4.py b/packages/pyright-internal/src/tests/samples/self4.py new file mode 100644 index 000000000000..02b25cb4abbc --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/self4.py @@ -0,0 +1,18 @@ +# This sample tests the case where a method decorator uses an explicit +# type annotation for the "self" parameter. + +from typing import Callable, Generic, TypeVar + +_T = TypeVar("_T") + + +def my_generic_wrapper( + f: Callable[["MyClass[_T]"], str] +) -> Callable[["MyClass[_T]"], int]: + ... + + +class MyClass(Generic[_T]): + @my_generic_wrapper + def do_something(self) -> str: + ... diff --git a/packages/pyright-internal/src/tests/samples/self5.py b/packages/pyright-internal/src/tests/samples/self5.py new file mode 100644 index 000000000000..50bb53fd8f50 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/self5.py @@ -0,0 +1,26 @@ +# This sample tests the use of `Self` when used within a property +# or class property. + +from typing_extensions import Self + + +class A: + @property + def one(self) -> Self: + ... + + @classmethod + @property + def two(cls) -> type[Self]: + ... + + +class B(A): + ... + + +reveal_type(A().one, expected_text="A") +reveal_type(A.two, expected_text="Type[A]") + +reveal_type(B().one, expected_text="B") +reveal_type(B.two, expected_text="Type[B]") diff --git a/packages/pyright-internal/src/tests/samples/slots1.py b/packages/pyright-internal/src/tests/samples/slots1.py index 9320b52042ed..d7116514c6b5 100644 --- a/packages/pyright-internal/src/tests/samples/slots1.py +++ b/packages/pyright-internal/src/tests/samples/slots1.py @@ -45,12 +45,6 @@ def prop(self, val: int): pass -Slots1.bbb - -# This should generate an error -Slots1.ddd - - class Slots1_1(Slots1): __slots__ = ["ddd", "eee"] @@ -63,13 +57,6 @@ def __init__(self): self.fff = 1 -Slots1_1.ddd -Slots1_1.bbb - -# This should generate an error -Slots1_1.fff - - class NoSlots1_1(Slots1, NoSlots2): def __init__(self): self.bbb = 1 diff --git a/packages/pyright-internal/src/tests/samples/specialization1.py b/packages/pyright-internal/src/tests/samples/specialization1.py index a04cb2d4853e..06e78222397d 100644 --- a/packages/pyright-internal/src/tests/samples/specialization1.py +++ b/packages/pyright-internal/src/tests/samples/specialization1.py @@ -3,7 +3,7 @@ from typing import Generic, Iterable, List, TypeVar -class A(object): +class A: pass @@ -22,7 +22,7 @@ class Moo(Generic[_T1]): pass -class Foo(object): +class Foo: def __init__(self) -> None: ... @@ -50,8 +50,7 @@ def m2(self, b: Moo[B]) -> None: y.m2(b) -# This should generate an error: -# Type argument 'C' cannot be assigned to type variable '_T1' + def m3(c: Moo[C]): pass diff --git a/packages/pyright-internal/src/tests/samples/strings1.py b/packages/pyright-internal/src/tests/samples/strings1.py new file mode 100644 index 000000000000..a6f117b5e320 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/strings1.py @@ -0,0 +1,22 @@ +# This sample tests the reportImplicitStringConcatenation diagnostic check. + + +def func1(val: str): + pass + + +func1("first argument" "second argument") + +func1( + "This is the first argument, which contains " + "especially long text that could not fit into " + "one single line thus should be spread." +) + +func1( + ( + "This is the first argument, which contains " + "especially long text that could not fit into " + "one single line thus should be spread." + ) +) diff --git a/packages/pyright-internal/src/tests/samples/subscript2.py b/packages/pyright-internal/src/tests/samples/subscript2.py index 54a6ab0b7ac4..d1ff78ef8fd0 100644 --- a/packages/pyright-internal/src/tests/samples/subscript2.py +++ b/packages/pyright-internal/src/tests/samples/subscript2.py @@ -1,35 +1,35 @@ # This sample tests various forms of subscript expressions for # syntax and semantic (type) errors. -from typing import List, Literal, TypeVar +from typing import List, TypeVar _T = TypeVar("_T", list, tuple) def func1(p1: List[int], p2: _T): a1 = p1[0] - t_a1: Literal["int"] = reveal_type(a1) + reveal_type(a1, expected_text="int") a2 = p1[:] - t_a2: Literal["list[int]"] = reveal_type(a2) + reveal_type(a2, expected_text="list[int]") a3 = p1[1:] - t_a3: Literal["list[int]"] = reveal_type(a3) + reveal_type(a3, expected_text="list[int]") a4 = p1[1:2] - t_a4: Literal["list[int]"] = reveal_type(a4) + reveal_type(a4, expected_text="list[int]") a5 = p1[0:1:3] - t_a5: Literal["list[int]"] = reveal_type(a5) + reveal_type(a5, expected_text="list[int]") a6 = p1[:3] - t_a6: Literal["list[int]"] = reveal_type(a6) + reveal_type(a6, expected_text="list[int]") a7 = p1[::] - t_a7: Literal["list[int]"] = reveal_type(a7) + reveal_type(a7, expected_text="list[int]") a8 = p1[::2] - t_a8: Literal["list[int]"] = reveal_type(a8) + reveal_type(a8, expected_text="list[int]") # This should generate a syntax error. b1 = p1[0:1:3:4] @@ -39,17 +39,17 @@ def func1(p1: List[int], p2: _T): # This should generate a type error. c1 = p1[:,] - t_c1: Literal["Unknown"] = reveal_type(c1) + reveal_type(c1, expected_text="Unknown") # This should generate a type error. c2 = p1[:,:] - t_c2: Literal["Unknown"] = reveal_type(c2) + reveal_type(c2, expected_text="Unknown") # This should generate a type error. c3 = p1[1,] - t_c3: Literal["Unknown"] = reveal_type(c3) + reveal_type(c3, expected_text="Unknown") d1 = p2[0] - t_d1: Literal["Unknown"] = reveal_type(d1) + reveal_type(d1, expected_text="Unknown") diff --git a/packages/pyright-internal/src/tests/samples/subscript3.py b/packages/pyright-internal/src/tests/samples/subscript3.py index 3cd992ab04be..4520e5790ebc 100644 --- a/packages/pyright-internal/src/tests/samples/subscript3.py +++ b/packages/pyright-internal/src/tests/samples/subscript3.py @@ -1,7 +1,7 @@ # This sample tests subscript forms specified in PEP 637 - # keyword and unpacked args. -from typing import Any, Literal, Tuple, Union, overload +from typing import Any, Tuple, Union, overload class ClassA: @@ -42,7 +42,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N a_obj = ClassA() x1 = a_obj[1] -t_x1: Literal["int"] = reveal_type(x1) +reveal_type(x1, expected_text="int") a_obj[1] = 3 @@ -50,7 +50,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N a_obj[1] = 3.5 x2 = a_obj[1,] -t_x2: Literal["float"] = reveal_type(x2) +reveal_type(x2, expected_text="float") a_obj[1,] = 3.4 @@ -58,7 +58,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N a_obj[1,] = 3.5j x3 = a_obj[1,2] -t_x3: Literal["float"] = reveal_type(x3) +reveal_type(x3, expected_text="float") a_obj[1,2] = 4.5 @@ -66,7 +66,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N a_obj[1,2] = 3.5j x4 = a_obj[(1,)] -t_x4: Literal["float"] = reveal_type(x4) +reveal_type(x4, expected_text="float") a_obj[(1,)] = 3 @@ -74,7 +74,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N a_obj[(1,)] = 3.5j x6 = a_obj[1, v1=3] -t_x6: Literal["str"] = reveal_type(x6) +reveal_type(x6, expected_text="str") a_obj[1, v1=3] = "hi" @@ -83,7 +83,7 @@ def __setitem__(self, index: Any, value: Union[str, float], *, v1: int = 3) -> N x8 = a_obj[1, *val_list] -t_x8: Literal["float"] = reveal_type(x8) +reveal_type(x8, expected_text="float") a_obj[1, *val_list] = 4.3 @@ -102,7 +102,7 @@ def __getitem__(self, value: Tuple[()], *, v1: int) -> str: y1 = b_obj[1] y2 = b_obj[v1=3] -t_y2: Literal["str"] = reveal_type(y2) +reveal_type(y2, expected_text="str") # This should generate an error because v2 is not a named arg. y3 = b_obj[v2=3] @@ -118,7 +118,7 @@ def __setitem__(self, index: Any, value: float, **kwargs: int) -> None: c_obj = ClassC() z1 = c_obj[1, *val_list, **val_dict] -t_z1: Literal["complex"] = reveal_type(z1) +reveal_type(z1, expected_text="complex") c_obj[1, *val_list, **val_dict] = 4.3 @@ -127,7 +127,7 @@ def __setitem__(self, index: Any, value: float, **kwargs: int) -> None: z2 = c_obj[1, v1=3, v2=4] -t_z2: Literal["complex"] = reveal_type(z2) +reveal_type(z2, expected_text="complex") c_obj[1, v1=3, v2=4] = 4.3 diff --git a/packages/pyright-internal/src/tests/samples/super2.py b/packages/pyright-internal/src/tests/samples/super2.py index ecde688d44ff..22988c1a403c 100644 --- a/packages/pyright-internal/src/tests/samples/super2.py +++ b/packages/pyright-internal/src/tests/samples/super2.py @@ -1,7 +1,7 @@ # This sample tests the handling of the "super" call when # used with a two-argument form that specifies the "bind to" type. -from typing import Literal, Type, TypeVar +from typing import Type, TypeVar T = TypeVar("T", bound="A") @@ -20,10 +20,10 @@ def factoryB(cls): a1 = A.factory() -type_a1: Literal["A"] = reveal_type(a1) +reveal_type(a1, expected_text="A") b1 = B.factory() -type_b1: Literal["B"] = reveal_type(b1) +reveal_type(b1, expected_text="B") b2 = B.factoryB() -type_b2: Literal["B"] = reveal_type(b2) +reveal_type(b2, expected_text="B") diff --git a/packages/pyright-internal/src/tests/samples/super6.py b/packages/pyright-internal/src/tests/samples/super6.py new file mode 100644 index 000000000000..e0113f70a758 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/super6.py @@ -0,0 +1,17 @@ +# This sample tests the case where super().__new__(cls) is called +# and there is an inferred return type based on the cls type. + +from typing import NamedTuple + +FooBase = NamedTuple("FooBase", [("x", int)]) + + +class Foo(FooBase): + def __new__(cls): + obj = super().__new__(cls, x=1) + reveal_type(obj, expected_text="Self@Foo") + return obj + + +f = Foo() +reveal_type(f, expected_text="Foo") diff --git a/packages/pyright-internal/src/tests/samples/super7.py b/packages/pyright-internal/src/tests/samples/super7.py new file mode 100644 index 000000000000..84afae2aa895 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/super7.py @@ -0,0 +1,52 @@ +# This sample tests the use of super() with two arguments where the second +# argument is an instance. + + +class BaseClass: + def my_method(self, value: int) -> int: + ... + + +class SubClass(BaseClass): + def method_plain_super(self, value: int) -> int: + reveal_type(super(), expected_text="BaseClass") + return super().my_method(value) + + def method_super(self, value: int) -> int: + reveal_type(super(__class__, self), expected_text="BaseClass") + return super(__class__, self).my_method(value) + + def method_super_extra_arg(self, value: int) -> int: + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an error because the method is already bound. + return super(__class__, self).my_method(self, value) + + @classmethod + def classmethod_super(cls, value: int) -> int: + self = cls() + reveal_type(super(__class__, self), expected_text="BaseClass") + return super(__class__, self).my_method(value) + + @classmethod + def classmethod_super_extra_arg(cls, value: int) -> int: + self = cls() + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an errorr. + return super(__class__, self).my_method(self, value) + + @staticmethod + def staticmethod_super(value: int) -> int: + self = SubClass() + reveal_type(super(__class__, self), expected_text="BaseClass") + + return super(__class__, self).my_method(value) + + @staticmethod + def staticmethod_super_extra_arg(value: int) -> int: + self = SubClass() + reveal_type(super(__class__, self), expected_text="BaseClass") + + # This should generate an error. + return super(__class__, self).my_method(self, value) diff --git a/packages/pyright-internal/src/tests/samples/super8.py b/packages/pyright-internal/src/tests/samples/super8.py new file mode 100644 index 000000000000..a214f5437a48 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/super8.py @@ -0,0 +1,7 @@ +# This sample tests the case where super() is used within a metaclass +# __init__ method. + + +class Metaclass(type): + def __init__(self, name, bases, attrs): + super().__init__(name, bases, attrs) diff --git a/packages/pyright-internal/src/tests/samples/totalOrdering1.py b/packages/pyright-internal/src/tests/samples/totalOrdering1.py new file mode 100644 index 000000000000..dd0de416a08f --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/totalOrdering1.py @@ -0,0 +1,27 @@ +# This sample tests the support for functools.total_ordering. + +from functools import total_ordering + + +@total_ordering +class ClassA: + val1: int + + def __gt__(self, other: object) -> bool: + ... + + +a = ClassA() +b = ClassA() +a < b +a <= b +a > b +a >= b +a == b +a != b + +# This should generate an error because it doesn't declare +# any of the required ordering functions. +@total_ordering +class ClassB: + val1: int diff --git a/packages/pyright-internal/src/tests/samples/tryExcept1.py b/packages/pyright-internal/src/tests/samples/tryExcept1.py index 55a8be9c0778..92f712cc3e8c 100644 --- a/packages/pyright-internal/src/tests/samples/tryExcept1.py +++ b/packages/pyright-internal/src/tests/samples/tryExcept1.py @@ -1,8 +1,6 @@ # This sample tests the name binder's handling of # try/except/raise statements -from typing import Literal - def func1(): @@ -17,11 +15,11 @@ def func1(): def func2(x, y) -> bool: try: z = x / y - except Exception as e: - t1: Literal["Exception"] = reveal_type(e) - return False except (RuntimeError, NameError) as e: - t2: Literal["RuntimeError | NameError"] = reveal_type(e) + reveal_type(e, expected_text="RuntimeError | NameError") + return False + except Exception as e: + reveal_type(e, expected_text="Exception") return False except: raise Exception() diff --git a/packages/pyright-internal/src/tests/samples/tryExcept10.py b/packages/pyright-internal/src/tests/samples/tryExcept10.py new file mode 100644 index 000000000000..87632cb02b9a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tryExcept10.py @@ -0,0 +1,12 @@ +# This sample tests that type errors within a finally clause are +# property detected. + +def func1() -> None: + file = None + try: + raise ValueError() + except Exception: + return None + finally: + # This should generate an error. + file.name diff --git a/packages/pyright-internal/src/tests/samples/tryExcept7.py b/packages/pyright-internal/src/tests/samples/tryExcept7.py new file mode 100644 index 000000000000..cf386180f676 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tryExcept7.py @@ -0,0 +1,16 @@ +# This sample tests the syntax handling for Python 3.11 exception groups +# as described in PEP 654. + + +def func1(): + + try: + pass + + # This should generate an error if using Python 3.10 or earlier. + except* BaseException: + pass + + # This should generate an error if using Python 3.10 or earlier. + except*: + pass diff --git a/packages/pyright-internal/src/tests/samples/tryExcept8.py b/packages/pyright-internal/src/tests/samples/tryExcept8.py new file mode 100644 index 000000000000..5fc63d2ccd84 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tryExcept8.py @@ -0,0 +1,48 @@ +# This sample tests the detection of inaccessible exception handlers. + + +def func1() -> None: + pass + + +def func2(): + try: + func1() + except OSError: + pass + except Exception: + pass + # This should generate an error. + except PermissionError: + pass + + +def func3(): + try: + func1() + except OSError: + pass + # This should generate an error. + except (PermissionError, ProcessLookupError): + pass + # This should generate an error. + except (PermissionError, ConnectionAbortedError): + pass + + +def func4(): + try: + func1() + except OSError: + pass + except (UnboundLocalError, ConnectionAbortedError): + pass + + +def func5(): + try: + func1() + except OSError: + pass + except: + pass diff --git a/packages/pyright-internal/src/tests/samples/tryExcept9.py b/packages/pyright-internal/src/tests/samples/tryExcept9.py new file mode 100644 index 000000000000..a55d115dfefd --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tryExcept9.py @@ -0,0 +1,42 @@ +# This sample tests the case where a finally clause contains some conditional +# logic that narrows the type of an expression. This narrowed type should +# persist after the finally clause. + + +def func1(): + file = None + try: + file = open("test.txt") + except Exception: + return None + finally: + if file: + file.close() + + # This should evaluate to "TextIOWrapper", but the current + # logic is not able to evaluate different types for file + # based on whether it's an exception or non-exception case. + reveal_type(file, expected_text="TextIOWrapper | None") + + +def func2(): + file = None + try: + file = open("test.txt") + except Exception: + pass + finally: + if file: + file.close() + + reveal_type(file, expected_text="TextIOWrapper | None") + + +def func3(): + file = None + try: + file = open("test.txt") + finally: + pass + + reveal_type(file, expected_text="TextIOWrapper") diff --git a/packages/pyright-internal/src/tests/samples/tupleUnpack1.py b/packages/pyright-internal/src/tests/samples/tupleUnpack1.py new file mode 100644 index 000000000000..9915d485fcc6 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tupleUnpack1.py @@ -0,0 +1,58 @@ +# This sample tests the handling of Unpack[Tuple[...]] as described +# in PEP 646. + +from typing import Tuple, Union +from typing_extensions import Unpack + + +def func1(v1: Tuple[int, Unpack[Tuple[bool, bool]], str]): + reveal_type(v1, expected_text="Tuple[int, bool, bool, str]") + + +# This should generate an error because multiple unpacks. +def func2(v2: Tuple[int, Unpack[Tuple[bool, bool]], str, Unpack[Tuple[bool, bool]]]): + pass + + +def func3(v3: Tuple[int, Unpack[Tuple[bool, ...]], str]): + reveal_type(v3, expected_text="Tuple[int, *tuple[bool, ...], str]") + + +# This should generate an error because there are multiple unbounded tuples. +def func4(v4: Tuple[Unpack[Tuple[bool, ...]], ...]): + pass + + +# This should generate an error because there are multiple unbounded tuples. +def func5(v5: Tuple[Unpack[Tuple[Unpack[Tuple[bool, ...]]]], ...]): + pass + + +def func6(v6: Tuple[Unpack[Tuple[bool]], ...]): + reveal_type(v6, expected_text="Tuple[bool, ...]") + + +def func7(v7: Tuple[Unpack[Tuple[bool, Unpack[Tuple[int, float]]]]]): + reveal_type(v7, expected_text="Tuple[bool, int, float]") + + +def func8(v8: Union[Unpack[Tuple[Unpack[Tuple[bool, Unpack[Tuple[int, ...]]]]]]]): + reveal_type(v8, expected_text="bool | int") + + +# This should generate an error because unpack isn't allowed for simple parameters. +def func9(v9: Unpack[tuple[int, int]]): + pass + + +# This should generate an error because unpack isn't allowed for **kwargs parameters. +def func10(**v10: Unpack[tuple[int, int]]): + pass + + +def func11(*v11: Unpack[tuple[int, ...]]): + pass + + +def func12(*v11: Unpack[tuple[int, int]]): + pass diff --git a/packages/pyright-internal/src/tests/samples/tupleUnpack2.py b/packages/pyright-internal/src/tests/samples/tupleUnpack2.py new file mode 100644 index 000000000000..ea7a7c0dca96 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tupleUnpack2.py @@ -0,0 +1,56 @@ +# This sample tests the handling of *tuple[...] as described +# in PEP 646. This test is the same as tupleUnpack1.py but +# it uses the * syntax instead of the backward compatibility +# "Unpack" form. + +from typing import Union + + +def func1(v1: tuple[int, *tuple[bool, bool], str]): + reveal_type(v1, expected_text="tuple[int, bool, bool, str]") + + +# This should generate an error because multiple unpacks. +def func2(v2: tuple[int, *tuple[bool, bool], str, *tuple[bool, bool]]): + pass + + +def func3(v3: tuple[int, *tuple[bool, ...], str]): + reveal_type(v3, expected_text="tuple[int, *tuple[bool, ...], str]") + + +# This should generate an error because there are multiple unbounded tuples. +def func4(v4: tuple[*tuple[bool, ...], ...]): + pass + + +# This should generate an error because there are multiple unbounded tuples. +def func5(v5: tuple[*tuple[*tuple[bool, ...]], ...]): + pass + + +def func6(v6: tuple[*tuple[bool], ...]): + reveal_type(v6, expected_text="tuple[bool, ...]") + + +def func7(v7: tuple[*tuple[bool, *tuple[int, float]]]): + reveal_type(v7, expected_text="tuple[bool, int, float]") + + +def func8(v8: Union[*tuple[*tuple[bool, *tuple[int, ...]]]]): + reveal_type(v8, expected_text="bool | int") + +# This should generate an error because unpack isn't allowed for simple parameters. +def func9(v9: *tuple[int, int]): + pass + +# This should generate an error because unpack isn't allowed for **kwargs parameters. +def func10(**v10: *tuple[int, int]): + pass + +def func11(*v11: *tuple[int, ...]): + pass + +def func12(*v11: *tuple[int, int]): + pass + diff --git a/packages/pyright-internal/src/tests/samples/tuples1.py b/packages/pyright-internal/src/tests/samples/tuples1.py index c257a1d32c95..099778ccd33d 100644 --- a/packages/pyright-internal/src/tests/samples/tuples1.py +++ b/packages/pyright-internal/src/tests/samples/tuples1.py @@ -1,6 +1,6 @@ # This sample file tests various aspects of type analysis for tuples. -from typing import List, Literal, Tuple +from typing import List, Tuple, Union import os @@ -15,7 +15,12 @@ def func1() -> Tuple[int, int, int]: # This should generate an error because # of a tuple size mismatch. - b, c, d, e, = a + ( + b, + c, + d, + e, + ) = a return a @@ -114,23 +119,29 @@ def func11() -> float: return 3 + # Tests for assignment of tuple list that includes star # operator both with and without type annotations. def func12(): data = ["a", "b"] - data1 = *map(str.split, data), - data2: Tuple[List[str], ...] = *map(str.split, data), + data1 = (*map(str.split, data),) + data2: Tuple[List[str], ...] = (*map(str.split, data),) data3 = (*map(str.split, data),) data4: Tuple[List[str], ...] = (*map(str.split, data),) # Tests for index-out-of-range error. -def func13(a: Tuple[int, str], b: Tuple[()], c: Tuple[int, ...]): +def func13( + a: Tuple[int, str], + b: Tuple[()], + c: Tuple[int, ...], + d: Union[Tuple[int], Tuple[str, str], Tuple[int, ...]], +): v1 = a[0] - t_v1: Literal["int"] = reveal_type(v1) + reveal_type(v1, expected_text="int") v2 = a[1] - t_v2: Literal["str"] = reveal_type(v2) + reveal_type(v2, expected_text="str") # This should generate an error. v3 = a[2] @@ -139,25 +150,32 @@ def func13(a: Tuple[int, str], b: Tuple[()], c: Tuple[int, ...]): v4 = b[0] v5 = c[100] - t_v5: Literal["int"] = reveal_type(v5) + reveal_type(v5, expected_text="int") v6 = a[-2] - t_v6: Literal["int"] = reveal_type(v6) + reveal_type(v6, expected_text="int") v7 = a[-1] - t_v7: Literal["str"] = reveal_type(v7) + reveal_type(v7, expected_text="str") # This should generate an error. v8 = a[-3] - t_v8: Literal["int | str"] = reveal_type(v8) + reveal_type(v8, expected_text="int | str") v9 = c[-100] - t_v9: Literal["int"] = reveal_type(v9) + reveal_type(v9, expected_text="int") + + v10 = d[0] + + # This should generate one error. + v11 = d[1] + + # This should generate two errors. + v12 = d[2] # Test for construction using the tuple constructor def func14(): list1 = [1, 2, 3] v1 = tuple(list1) - t_v1: Literal["tuple[int, ...]"] = reveal_type(v1) - + reveal_type(v1, expected_text="tuple[int, ...]") diff --git a/packages/pyright-internal/src/tests/samples/tuples10.py b/packages/pyright-internal/src/tests/samples/tuples10.py index f111dbbb6122..30d0f88b3ab9 100644 --- a/packages/pyright-internal/src/tests/samples/tuples10.py +++ b/packages/pyright-internal/src/tests/samples/tuples10.py @@ -6,25 +6,25 @@ a1 = (1, 2) -t1: Literal["tuple[Literal[1], Literal[2]]"] = reveal_type(a1) +reveal_type(a1, expected_text="tuple[Literal[1], Literal[2]]") a2 = list((1, 2)) -t2: Literal["list[int]"] = reveal_type(a2) +reveal_type(a2, expected_text="list[int]") a3: List[Literal[1]] = list((1,)) -t3: Literal["list[Literal[1]]"] = reveal_type(a3) +reveal_type(a3, expected_text="list[Literal[1]]") def func1(v1: Tuple[Literal[1], ...], v2: Tuple[Literal[1]]): a4 = set(v1) - t4: Literal["set[Literal[1]]"] = reveal_type(a4) + reveal_type(a4, expected_text="set[Literal[1]]") a5 = set(v2) - t5: Literal["set[Literal[1]]"] = reveal_type(a5) + reveal_type(a5, expected_text="set[Literal[1]]") a6 = (1, "hi") -t6: Literal["tuple[Literal[1], Literal['hi']]"] = reveal_type(a6) +reveal_type(a6, expected_text="tuple[Literal[1], Literal['hi']]") v4 = set(a6) -t7: Literal["set[int | str]"] = reveal_type(v4) +reveal_type(v4, expected_text="set[int | str]") diff --git a/packages/pyright-internal/src/tests/samples/tuples12.py b/packages/pyright-internal/src/tests/samples/tuples12.py index 2d89b9498c94..e87eacd021e6 100644 --- a/packages/pyright-internal/src/tests/samples/tuples12.py +++ b/packages/pyright-internal/src/tests/samples/tuples12.py @@ -1,26 +1,31 @@ # This sample tests type inference for tuples that contain unpack # operators. -from typing import Literal - def func1(a: int, *args: int): v1 = (a, *args) - t1: Literal["tuple[int, ...]"] = reveal_type(v1) + reveal_type(v1, expected_text="tuple[int, *tuple[int, ...]]") def func2(a: int, *args: str): v1 = (a, *args) - t1: Literal["tuple[int | str, ...]"] = reveal_type(v1) + reveal_type(v1, expected_text="tuple[int, *tuple[str, ...]]") def func3(a: int, b: str, *args: str): - v1 = (*args, a, *args, b, *(a, b, a)) - t1: Literal["tuple[str | int, ...]"] = reveal_type(v1) + v1 = (a, b, *(a, b, a), *args, a, *args, b, *(a, b, a)) + reveal_type( + v1, expected_text="tuple[int, str, int, str, int, *tuple[str | int, ...]]" + ) + + +def func4(a: int, b: str, *args: str): + v1 = (b, *args, *(b, a)) + reveal_type(v1, expected_text="tuple[str, *tuple[str, ...], str, int]") -def func4(): +def func5(): a = 3.4 b = [1, 2, 3] v1 = (a, *b) - t1: Literal["tuple[float | int, ...]"] = reveal_type(v1) + reveal_type(v1, expected_text="tuple[float, *tuple[int, ...]]") diff --git a/packages/pyright-internal/src/tests/samples/tuples13.py b/packages/pyright-internal/src/tests/samples/tuples13.py index 7c4353e49175..82da99a43e12 100644 --- a/packages/pyright-internal/src/tests/samples/tuples13.py +++ b/packages/pyright-internal/src/tests/samples/tuples13.py @@ -1,26 +1,26 @@ # This sample tests indexing of tuples with slice expressions. -from typing import Literal, Tuple +from typing import Tuple def func1(val1: Tuple[int, str, None], val2: Tuple[int, ...]): x1 = val1[:2] - t1: Literal["tuple[int, str]"] = reveal_type(x1) + reveal_type(x1, expected_text="tuple[int, str]") x2 = val1[-3:2] - t2: Literal["tuple[int, str]"] = reveal_type(x2) + reveal_type(x2, expected_text="tuple[int, str]") x3 = val1[1:] - t3: Literal["tuple[str, None]"] = reveal_type(x3) + reveal_type(x3, expected_text="tuple[str, None]") x4 = val1[1:-1] - t4: Literal["tuple[str]"] = reveal_type(x4) + reveal_type(x4, expected_text="tuple[str]") x5 = val1[:-2] - t5: Literal["tuple[int]"] = reveal_type(x5) + reveal_type(x5, expected_text="tuple[int]") x6 = val1[0:100] - t6: Literal["Tuple[int | str | None, ...]"] = reveal_type(x6) + reveal_type(x6, expected_text="tuple[int | str | None, ...]") x7 = val2[:2] - t7: Literal["Tuple[int, ...]"] = reveal_type(x7) + reveal_type(x7, expected_text="tuple[int, ...]") diff --git a/packages/pyright-internal/src/tests/samples/tuples14.py b/packages/pyright-internal/src/tests/samples/tuples14.py index ba12ec9d8367..c7c8fceb10db 100644 --- a/packages/pyright-internal/src/tests/samples/tuples14.py +++ b/packages/pyright-internal/src/tests/samples/tuples14.py @@ -1,11 +1,11 @@ # This sample tests the special case of the tuple() constructor # when used with bidirectional type inference. -from typing import Literal, Tuple +from typing import Tuple tuple1: Tuple[int, ...] = tuple() -t1: Literal["tuple[int, ...]"] = reveal_type(tuple1) +reveal_type(tuple1, expected_text="tuple[int, ...]") tuple2: Tuple[str, int, complex] = tuple() -t2: Literal["tuple[str, int, complex]"] = reveal_type(tuple2) +reveal_type(tuple2, expected_text="tuple[str, int, complex]") diff --git a/packages/pyright-internal/src/tests/samples/tuples15.py b/packages/pyright-internal/src/tests/samples/tuples15.py index c3790f01bfbd..6b6c61d3afd9 100644 --- a/packages/pyright-internal/src/tests/samples/tuples15.py +++ b/packages/pyright-internal/src/tests/samples/tuples15.py @@ -1,16 +1,16 @@ # This sample tests the special-case handling of the __add__ operator # when two tuples of known types are added together. -from typing import Literal, Tuple +from typing import Tuple v1 = () + () -t1: Literal["tuple[()]"] = reveal_type(v1) +reveal_type(v1, expected_text="tuple[()]") def func1(a: Tuple[int, int, int], b: Tuple[str, str]): - t2: Literal["tuple[int, int, int, str, str]"] = reveal_type(a + b) + reveal_type(a + b, expected_text="tuple[int, int, int, str, str]") def func2(a: Tuple[int, int, int], b: Tuple[str, ...]): - t2: Literal["Tuple[int | str, ...]"] = reveal_type(a + b) + reveal_type(a + b, expected_text="tuple[int | str, ...]") diff --git a/packages/pyright-internal/src/tests/samples/tuples16.py b/packages/pyright-internal/src/tests/samples/tuples16.py new file mode 100644 index 000000000000..ca57eebb05b9 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/tuples16.py @@ -0,0 +1,14 @@ +# This sample tests the handling of bidirectional type inference +# for unions of tuples. + +from typing import Dict, Tuple, Union + + +# The following two unions are the same but declared in different orders. +TupleUnion1 = Union[Tuple[int, str], Tuple[int, str, Dict[str, Union[str, int]]]] +TupleUnion2 = Union[Tuple[int, str, Dict[str, Union[str, int]]], Tuple[int, str]] + +v1: TupleUnion1 = 1, "two", {"hey": "three"} +v2: TupleUnion2 = 1, "two", {"hey": "three"} +v3: TupleUnion1 = 1, "two" +v4: TupleUnion2 = 1, "two" diff --git a/packages/pyright-internal/src/tests/samples/tuples4.py b/packages/pyright-internal/src/tests/samples/tuples4.py index 8a32430440cb..2e848d35b8f0 100644 --- a/packages/pyright-internal/src/tests/samples/tuples4.py +++ b/packages/pyright-internal/src/tests/samples/tuples4.py @@ -1,5 +1,5 @@ # This sample tests the translation of a heterogenous tuple -# into an Interable. +# into an Iterable. from typing import Iterable, TypeVar, Union diff --git a/packages/pyright-internal/src/tests/samples/tuples7.py b/packages/pyright-internal/src/tests/samples/tuples7.py index d756dc6a7cc6..2ea624f3f088 100644 --- a/packages/pyright-internal/src/tests/samples/tuples7.py +++ b/packages/pyright-internal/src/tests/samples/tuples7.py @@ -1,7 +1,7 @@ # This sample tests handling of tuples and tracking # of specific types within a tuple. -from typing import List, Literal, Optional, Tuple, TypeVar +from typing import Generic, List, Optional, Tuple, TypeVar _T = TypeVar("_T") @@ -19,10 +19,10 @@ class ClassA(Tuple[int, str, int, _T]): cc1: int = c dd1: complex = d -t_A0: Literal["int"] = reveal_type(objA[0]) -t_A1: Literal["str"] = reveal_type(objA[1]) -t_A2: Literal["int"] = reveal_type(objA[2]) -t_A3: Literal["complex"] = reveal_type(objA[3]) +reveal_type(objA[0], expected_text="int") +reveal_type(objA[1], expected_text="str") +reveal_type(objA[2], expected_text="int") +reveal_type(objA[3], expected_text="complex") # This should generate an error because the trailing # comma turns the index value into a tuple. @@ -42,9 +42,9 @@ class ClassB(Tuple[_T, ...]): (x, y, z) = objB -t_x: Literal["complex"] = reveal_type(x) -t_y: Literal["complex"] = reveal_type(y) -t_z: Literal["complex"] = reveal_type(z) +reveal_type(x, expected_text="complex") +reveal_type(y, expected_text="complex") +reveal_type(z, expected_text="complex") xx2: complex = objB[0] yy2: complex = objB[1] @@ -53,4 +53,9 @@ class ClassB(Tuple[_T, ...]): def func1(lst: Optional[List[str]]) -> None: for item in lst or (): - t1: Literal["str"] = reveal_type(item) + reveal_type(item, expected_text="str") + + +class X(Generic[_T]): + def __init__(self): + self._x: Tuple[_T, ...] = () diff --git a/packages/pyright-internal/src/tests/samples/tuples8.py b/packages/pyright-internal/src/tests/samples/tuples8.py index c8ba941bff86..ccdb91dc315b 100644 --- a/packages/pyright-internal/src/tests/samples/tuples8.py +++ b/packages/pyright-internal/src/tests/samples/tuples8.py @@ -1,7 +1,7 @@ # This sample verifies that the "tuple" type is treated # analogously to "Tuple" type. -from typing import Iterable, Literal, Tuple, TypeVar +from typing import Iterable, Tuple, TypeVar _T = TypeVar("_T") @@ -95,4 +95,4 @@ def qux() -> None: foo = ["foo"] quux = baz(foo) for s in quux: - t1: Literal["str"] = reveal_type(s) + reveal_type(s, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/typeAlias1.py b/packages/pyright-internal/src/tests/samples/typeAlias1.py index 07520aae2901..aac7f712fa20 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias1.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias1.py @@ -23,8 +23,8 @@ class A: Value2 = 1 -t_value1: Literal["Type[Literal[1]]"] = reveal_type(A.Value1) -t_value2: Literal["int"] = reveal_type(A.Value2) +reveal_type(A.Value1, expected_text="Type[Literal[1]]") +reveal_type(A.Value2, expected_text="int") Alias1 = Literal[0, 1] diff --git a/packages/pyright-internal/src/tests/samples/typeAlias10.py b/packages/pyright-internal/src/tests/samples/typeAlias10.py index 77ecc936f5ec..93bbec90cf72 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias10.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias10.py @@ -1,15 +1,45 @@ -# This sample tests a recursive type alias used within -# a recursive function. +# This sample tests the handling of generic type alias where a type +# argument is not provided. -from typing import Dict, Union +# pyright: reportMissingTypeArgument=true +from typing import Any, Generic, TypeAlias, TypeVar -A = Union[str, Dict[str, "A"]] +_T = TypeVar("_T") -def foo(x: A): - if isinstance(x, str): - print(x) - else: - for _, v in x.items(): - foo(v) +class A(Generic[_T]): + ... + + +# This should generate an error if reportMissingTypeArgument is enabled. +B: TypeAlias = A + + +v1: B = A() + +# This should generate an error because B is already specialized. +v2: B[int] = A() + +# This should generate an error if reportMissingTypeArgument is enabled. +v3: A = A() + + +C = A[str] + + +# This should generate an error because C is already specialized. +v4: C[int] + + +class D(Generic[_T]): + def __getitem__(self, key: Any) -> int: + ... + + +D_Alias = D[_T] + +d: D_Alias[Any] = D() +item = d[0] + +x: int = D_Alias[Any]()[0] diff --git a/packages/pyright-internal/src/tests/samples/typeAlias11.py b/packages/pyright-internal/src/tests/samples/typeAlias11.py index efb5845dd3c8..b369a5c1ff47 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias11.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias11.py @@ -1,50 +1,37 @@ -# This sample tests the handling of generic type aliases that are -# defined in terms of other generic type aliases in a nested manner. +# This sample tests the simple aliasing of a generic class with no +# type arguments. -from typing import Awaitable, Callable, Generic, TypeVar +from typing import Generic, TypeVar, Union +import collections +from collections import OrderedDict -TSource = TypeVar("TSource") -TError = TypeVar("TError") -TResult = TypeVar("TResult") -TNext = TypeVar("TNext") +_T = TypeVar("_T") -class Context(Generic[TResult]): - Response: TResult +class ClassA(Generic[_T]): + def __init__(self, x: _T): + pass -class Result(Generic[TResult, TError]): - def map( - self, mapper: Callable[[Context[TResult]], TResult] - ) -> "Result[TResult, TError]": - return Result() +A = ClassA +reveal_type(A(3), expected_text="ClassA[int]") -HttpFuncResult = Result[Context[TResult], TError] -HttpFuncResultAsync = Awaitable[Result[Context[TResult], TError]] +TA1 = collections.OrderedDict +TA2 = OrderedDict -HttpFunc = Callable[ - [Context[TNext]], - HttpFuncResultAsync[TResult, TError], -] -HttpHandler = Callable[ - [ - HttpFunc[TNext, TResult, TError], - Context[TSource], - ], - HttpFuncResultAsync[TResult, TError], -] +TA1[int, int] +TA2[int, int] +TA3 = TA1 -async def run_async( - ctx: Context[TSource], - handler: HttpHandler[str, TResult, TError, TSource], -) -> Result[TResult, TError]: - result = Result[TResult, TError]() +TA3[int, int] - def mapper(x: Context[TResult]) -> TResult: - return x.Response - return result.map(mapper) +TA4 = Union[dict, OrderedDict] + +# This should generate two errors because the two types in TA4 +# are already specialized. +TA4[int, int] diff --git a/packages/pyright-internal/src/tests/samples/typeAlias12.py b/packages/pyright-internal/src/tests/samples/typeAlias12.py index 1a074226c3f7..ee4d7f0a72cd 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias12.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias12.py @@ -1,24 +1,20 @@ -# This sample verifies that a generic type alias with a Callable -# works correctly. +# This sample tests the handling of a generic type alias that uses +# a union that collapses to a single type when specialized. -# pyright: reportInvalidTypeVarUse=false +from typing import List, TypeVar, Union -from typing import Callable, Literal, TypeVar +V = TypeVar("V") +U = TypeVar("U") -T = TypeVar("T") -F = Callable[[T], T] +Alias = Union[V, U] -def f() -> F[T]: - def g(x: T) -> T: - ... +def fn(x: Alias[V, V]) -> V: + return x - return g +def fn2(x: List[Alias[V, V]]) -> List[V]: + return x -g = f() -v1 = g("foo") -t_v1: Literal["str"] = reveal_type(v1) -v2 = g(1) -t_v2: Literal["int"] = reveal_type(v2) +reveal_type(Alias[int, int], expected_text="Type[int]") diff --git a/packages/pyright-internal/src/tests/samples/typeAlias14.py b/packages/pyright-internal/src/tests/samples/typeAlias14.py deleted file mode 100644 index dd3bb31261eb..000000000000 --- a/packages/pyright-internal/src/tests/samples/typeAlias14.py +++ /dev/null @@ -1,51 +0,0 @@ -# This sample tests that certain type aliases cannot be used within -# call expressions. - -from typing import Callable, Optional, Tuple, Type, TypeVar, Union - - -T_Union = Union[int, float] - -# This should generate an error -T_Union(3) - -T_Callable = Callable[[int], None] - -# This should generate an error -T_Callable(1) - - -T_Type1 = Type[int] - -# This should generate an error -T_Type1(object) - -T_Type2 = type -T_Type2(object) - -T_Optional = Optional[str] - -# This should generate an error -T_Optional(3) - - -T_TypeVar = TypeVar("T_TypeVar") - -# This should generate an error -T_TypeVar() - - -T_Tuple1 = Tuple[int, ...] - -# This should generate an error -T_Tuple1([3, 4]) - - -I = int - -I(3) - - -T_Tuple2 = tuple[int, ...] - -T_Tuple2([3, 4, 5]) diff --git a/packages/pyright-internal/src/tests/samples/typeAlias3.py b/packages/pyright-internal/src/tests/samples/typeAlias3.py index 2fa2634aca5a..669e16f5ef49 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias3.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias3.py @@ -4,7 +4,7 @@ # pyright: strict -from typing import Callable, Generic, Literal, Tuple, Optional, TypeVar +from typing import Callable, Generic, Tuple, Optional, TypeVar from typing_extensions import ParamSpec T = TypeVar("T") @@ -25,10 +25,10 @@ def __new__(cls, value: T) -> "ClassA[T]": TypeAliasA = ClassA[T] a1 = ClassA(3.0) -t_a1: Literal["ClassA[float]"] = reveal_type(a1) +reveal_type(a1, expected_text="ClassA[float]") a2 = TypeAliasA(3.0) -t_a2: Literal["ClassA[float]"] = reveal_type(a2) +reveal_type(a2, expected_text="ClassA[float]") Func = Callable[P, T] AnyFunc = Func[P, int] diff --git a/packages/pyright-internal/src/tests/samples/typeAlias4.py b/packages/pyright-internal/src/tests/samples/typeAlias4.py index 772ef005c1c6..8189ca8a7c46 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias4.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias4.py @@ -1,7 +1,7 @@ # This sample tests the handling of the Python 3.9 # TypeAlias feature as documented in PEP 613. -from typing import Literal, Type, TypeAlias as TA, Union +from typing import Type, TypeAlias as TA, Union type1: TA = Union[int, str] @@ -53,6 +53,16 @@ def requires_string(a: str): ExplicitAlias: TA = int SimpleNonAlias: Type[int] = int -t1: Literal["Type[int]"] = reveal_type(SimpleAlias) -t2: Literal["Type[int]"] = reveal_type(ExplicitAlias) -t3: Literal["Type[int]"] = reveal_type(SimpleNonAlias) +reveal_type(SimpleAlias, expected_text="Type[int]") +reveal_type(ExplicitAlias, expected_text="Type[int]") +reveal_type(SimpleNonAlias, expected_text="Type[int]") + + +class ClassB: + my_type1: TA = int + + +def func1(): + # This should generate an error because type aliases are allowed + # only in classes or modules. + my_type1: TA = int diff --git a/packages/pyright-internal/src/tests/samples/typeAlias5.py b/packages/pyright-internal/src/tests/samples/typeAlias5.py index 592bc2964df5..3ac15ebf253a 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias5.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias5.py @@ -2,7 +2,7 @@ # TypeVars. from datetime import datetime -from typing import IO, List, Type, TypeVar, Union +from typing import IO, Dict, Generic, List, Type, TypeVar, Union _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") @@ -39,3 +39,12 @@ def __int__(self) -> int: # This should generate an error. v3: FooIsh[Type[Bar]] = 42 + + +MyTypeAlias = Dict[_T1, _T2] + + +class MyClass1(Generic[_T1, _T2]): + # This should generate an error because S and T are bound + # type variables. + MyTypeAlias = Dict[_T1, _T2] diff --git a/packages/pyright-internal/src/tests/samples/typeAlias6.py b/packages/pyright-internal/src/tests/samples/typeAlias6.py index d1e08956d7ee..dd3bb31261eb 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias6.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias6.py @@ -1,44 +1,51 @@ -# This sample tests Pyright's handling of recursive type aliases. +# This sample tests that certain type aliases cannot be used within +# call expressions. -from typing import Dict, List, TypeVar, Union +from typing import Callable, Optional, Tuple, Type, TypeVar, Union -MyTree = List[Union["MyTree", int]] -t1: MyTree = [1, 2, 3, [3, 4], [[3], 5]] +T_Union = Union[int, float] -# This should generate an error because a str is not allowed. -t2: MyTree = [3, ""] +# This should generate an error +T_Union(3) -# This should generate an error because a str is not allowed. -t3: MyTree = [1, 2, 3, [3, 4], [3, 4, 5, [3, "4"]]] +T_Callable = Callable[[int], None] -_T = TypeVar("_T") -GenericUnion = Union[int, _T] +# This should generate an error +T_Callable(1) -i1: GenericUnion[str] = "hi" -i1 = 3 -i2: GenericUnion[float] = 3 -# This should generate an error because str isn't compatible. -i2 = "hi" +T_Type1 = Type[int] -Foo = Union[bool, List["Foo"], Dict["Foo", "Foo"]] +# This should generate an error +T_Type1(object) -bar1: Foo = [True, [True, False]] -bar2: Foo = [True, [True], {True: False}] -bar3: Foo = {[True]: False} -bar4: Foo = {True: [False]} +T_Type2 = type +T_Type2(object) -# These should generate errors. -baz1: Foo = [True, ["True", False]] -baz2: Foo = [True, [True], {True: "False"}] -baz3: Foo = {["True"]: False} -baz4: Foo = {True: ["False"]} +T_Optional = Optional[str] -Json = Union[None, int, str, float, List["Json"], Dict[str, "Json"]] +# This should generate an error +T_Optional(3) + + +T_TypeVar = TypeVar("T_TypeVar") # This should generate an error -a1: Json = {"a": 1, "b": 3j} +T_TypeVar() + + +T_Tuple1 = Tuple[int, ...] # This should generate an error -a2: Json = [2, 3j] +T_Tuple1([3, 4]) + + +I = int + +I(3) + + +T_Tuple2 = tuple[int, ...] + +T_Tuple2([3, 4, 5]) diff --git a/packages/pyright-internal/src/tests/samples/typeAlias7.py b/packages/pyright-internal/src/tests/samples/typeAlias7.py index 82fe7653ff7e..efb5845dd3c8 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias7.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias7.py @@ -1,32 +1,50 @@ -# This sample tests Pyright's handling of recursive type aliases -# that are also generic. +# This sample tests the handling of generic type aliases that are +# defined in terms of other generic type aliases in a nested manner. -from typing import List, TypeVar, Union +from typing import Awaitable, Callable, Generic, TypeVar -_T1 = TypeVar("_T1", str, int) -_T2 = TypeVar("_T2") -GenericTypeAlias1 = List[Union["GenericTypeAlias1[_T1]", _T1]] +TSource = TypeVar("TSource") +TError = TypeVar("TError") +TResult = TypeVar("TResult") +TNext = TypeVar("TNext") -SpecializedTypeAlias1 = GenericTypeAlias1[str] -a1: SpecializedTypeAlias1 = ["hi", ["hi", "hi"]] +class Context(Generic[TResult]): + Response: TResult -# This should generate an error because int doesn't match the -# constraint of the TypeVar _T1. -SpecializedClass2 = GenericTypeAlias1[float] -b1: GenericTypeAlias1[str] = ["hi", "bye", [""], [["hi"]]] +class Result(Generic[TResult, TError]): + def map( + self, mapper: Callable[[Context[TResult]], TResult] + ) -> "Result[TResult, TError]": + return Result() -# This should generate an error. -b2: GenericTypeAlias1[str] = ["hi", [2.4]] +HttpFuncResult = Result[Context[TResult], TError] +HttpFuncResultAsync = Awaitable[Result[Context[TResult], TError]] -GenericTypeAlias2 = List[Union["GenericTypeAlias2[_T1, _T2]", _T1, _T2]] +HttpFunc = Callable[ + [Context[TNext]], + HttpFuncResultAsync[TResult, TError], +] -c2: GenericTypeAlias2[str, int] = [[3, ["hi"]], "hi"] +HttpHandler = Callable[ + [ + HttpFunc[TNext, TResult, TError], + Context[TSource], + ], + HttpFuncResultAsync[TResult, TError], +] -c3: GenericTypeAlias2[str, float] = [[3, ["hi", 3.4, [3.4]]], "hi"] -# This should generate an error because a float is a type mismatch. -c4: GenericTypeAlias2[str, int] = [[3, ["hi", 3, [3.4]]], "hi"] +async def run_async( + ctx: Context[TSource], + handler: HttpHandler[str, TResult, TError, TSource], +) -> Result[TResult, TError]: + result = Result[TResult, TError]() + + def mapper(x: Context[TResult]) -> TResult: + return x.Response + + return result.map(mapper) diff --git a/packages/pyright-internal/src/tests/samples/typeAlias8.py b/packages/pyright-internal/src/tests/samples/typeAlias8.py index b957757e2ebf..cd6e6efb2aa2 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias8.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias8.py @@ -1,26 +1,24 @@ -# This sample tests Pyright's handling of recursive type aliases. +# This sample verifies that a generic type alias with a Callable +# works correctly. -from typing import List, Literal, TypeAlias, Union +# pyright: reportInvalidTypeVarUse=false -# This should generate an error because the forward reference -# type needs to be in quotes. -GenericClass0 = List[Union[GenericClass0, int]] +from typing import Callable, TypeVar -# This should generate an error because the type alias directly -# refers to itself. -RecursiveUnion = Union["RecursiveUnion", int] +T = TypeVar("T") +F = Callable[[T], T] -a1: RecursiveUnion = 3 -# This should generate an error because the type alias refers -# to itself through a mutually-referential type alias. -MutualReference1 = Union["MutualReference2", int] -MutualReference2 = Union["MutualReference1", str] +def f() -> F[T]: + def g(x: T) -> T: + ... -# This should generate an error because the type alias refers -# to itself. -MutualReference3: TypeAlias = "MutualReference3" + return g -RecursiveType: TypeAlias = list[Union[str, "RecursiveType"]] -t1: Literal["Type[list[str | RecursiveType]]"] = reveal_type(RecursiveType) +g = f() +v1 = g("foo") +reveal_type(v1, expected_text="str") + +v2 = g(1) +reveal_type(v2, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/typeAlias9.py b/packages/pyright-internal/src/tests/samples/typeAlias9.py index eaaaa145a40d..f465da2e8326 100644 --- a/packages/pyright-internal/src/tests/samples/typeAlias9.py +++ b/packages/pyright-internal/src/tests/samples/typeAlias9.py @@ -1,51 +1,35 @@ -# This sample tests the handling of complex recursive types. +# This sample tests that generic type aliases are properly flagged as +# partially-unknown types if their type arguments are omitted. -# pyright: strict, reportUnusedVariable=false +# pyright: reportUnknownParameterType=true, reportMissingTypeArgument=false -from typing import Dict, List, Literal, Optional, Union +from typing import Dict, List, TypeVar +T = TypeVar("T") +Foo = List[T] -JSONArray = List["JSONType"] -JSONObject = Dict[str, "JSONType"] -JSONPrimitive = Union[str, float, int, bool, None] -JSONStructured = Union[JSONArray, JSONObject] +# This should generate an error because Foo is missing a type argument, +# so the type of `f` is partially unknown. +def foo1(f: Foo) -> None: + pass -JSONType = Union[JSONPrimitive, JSONStructured] +Bar = Foo -# Using type alias checking for list: -def f2(args: JSONStructured): - if isinstance(args, List): - t1: Literal[ - "List[str | float | int | bool | Type[List[JSONType]] | Dict[str, Type[str] | Type[float] | Type[int] | Type[bool] | Type[List[JSONType]] | Type[Dict[str, ...]] | None] | None]" - ] = reveal_type(args) - else: - t2: Literal[ - "Dict[str, Type[str] | Type[float] | Type[int] | Type[bool] | Type[List[str | float | int | bool | JSONArray | Dict[str, ...] | None]] | Type[Dict[str, ...]] | None]" - ] = reveal_type(args) - dargs: JSONObject = args +# This should generate an error because Bar doesn't specialize +# Foo appropriately. +def foo2(f: Bar) -> None: + pass -# Using type alias checking for dict: -def f3(args: JSONStructured): - if isinstance(args, Dict): - t1: Literal[ - "Dict[str, Type[str] | Type[float] | Type[int] | Type[bool] | Type[List[str | float | int | bool | JSONArray | Dict[str, ...] | None]] | Type[Dict[str, ...]] | None]" - ] = reveal_type(args) - else: - t2: Literal[ - "List[str | float | int | bool | Type[List[JSONType]] | Dict[str, Type[str] | Type[float] | Type[int] | Type[bool] | Type[List[JSONType]] | Type[Dict[str, ...]] | None] | None]" - ] = reveal_type(args) - largs: JSONArray = args +K = TypeVar("K") +V = TypeVar("V") -# Using type alias for "is None" narrowing: -LinkedList = Optional[tuple[int, "LinkedList"]] +Baz = Dict[K, V] -def g(xs: LinkedList): - while xs is not None: - x, rest = xs - yield x - xs = rest +# This should generate an error because Baz is only partially specialized. +def foo3(f: Baz[int]) -> None: + pass diff --git a/packages/pyright-internal/src/tests/samples/typeGuard1.py b/packages/pyright-internal/src/tests/samples/typeGuard1.py index ebeda0031c01..45eda46da91e 100644 --- a/packages/pyright-internal/src/tests/samples/typeGuard1.py +++ b/packages/pyright-internal/src/tests/samples/typeGuard1.py @@ -5,7 +5,7 @@ # pyright: reportMissingModuleSource=false import os -from typing import Any, List, Literal, Tuple, TypeVar, Union +from typing import Any, List, Tuple, TypeVar, Union from typing_extensions import TypeGuard _T = TypeVar("_T") @@ -17,9 +17,9 @@ def is_two_element_tuple(a: Tuple[_T, ...]) -> TypeGuard[Tuple[_T, _T]]: def func1(a: Tuple[int, ...]): if is_two_element_tuple(a): - t1: Literal["Tuple[int, int]"] = reveal_type(a) + reveal_type(a, expected_text="Tuple[int, int]") else: - t2: Literal["Tuple[int, ...]"] = reveal_type(a) + reveal_type(a, expected_text="Tuple[int, ...]") def is_string_list(val: List[Any], allow_zero_entries: bool) -> TypeGuard[List[str]]: @@ -30,14 +30,17 @@ def is_string_list(val: List[Any], allow_zero_entries: bool) -> TypeGuard[List[s def func2(a: List[Union[str, int]]): if is_string_list(a, True): - t1: Literal["List[str]"] = reveal_type(a) + reveal_type(a, expected_text="List[str]") else: - t2: Literal["List[str | int]"] = reveal_type(a) + reveal_type(a, expected_text="List[str | int]") # This should generate an error because TypeGuard # has no type argument. -def bad1(a: int) -> TypeGuard: +def bad1(a: int, b: object) -> TypeGuard: + # This is a runtime use of TypeGuard and shouldn't generate an error. + if b is TypeGuard: + return True return True diff --git a/packages/pyright-internal/src/tests/samples/typeGuard2.py b/packages/pyright-internal/src/tests/samples/typeGuard2.py index 9a30f25432ab..47bfd133ca91 100644 --- a/packages/pyright-internal/src/tests/samples/typeGuard2.py +++ b/packages/pyright-internal/src/tests/samples/typeGuard2.py @@ -3,7 +3,7 @@ # pyright: strict, reportMissingModuleSource=false -from typing import Any, Callable, List, Literal, Sequence, TypeVar, overload +from typing import Any, Callable, List, Sequence, TypeVar, overload from typing_extensions import TypeGuard @@ -43,16 +43,16 @@ def overloaded_filter( x1 = cb1(1) -t_x1: Literal["bool"] = reveal_type(x1) +reveal_type(x1, expected_text="bool") sf1 = simple_filter([], cb1) -t_sf1: Literal["List[object]"] = reveal_type(sf1) +reveal_type(sf1, expected_text="List[object]") sf2 = simple_filter([], cb2) -t_sf2: Literal["List[object]"] = reveal_type(sf2) +reveal_type(sf2, expected_text="List[object]") of1 = overloaded_filter([], cb1) -t_of1: Literal["Sequence[int]"] = reveal_type(of1) +reveal_type(of1, expected_text="Sequence[int]") of2 = overloaded_filter([], cb2) -t_of2: Literal["Sequence[object]"] = reveal_type(of2) +reveal_type(of2, expected_text="Sequence[object]") diff --git a/packages/pyright-internal/src/tests/samples/typeGuard3.py b/packages/pyright-internal/src/tests/samples/typeGuard3.py new file mode 100644 index 000000000000..58147363128a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeGuard3.py @@ -0,0 +1,98 @@ +# This sample tests the StrictTypeGuard form. + +from typing import Any, Literal, Mapping, Sequence, TypeVar, Union +from typing_extensions import StrictTypeGuard + + +def is_str1(val: Union[str, int]) -> StrictTypeGuard[str]: + return isinstance(val, str) + + +def func1(val: Union[str, int]): + if is_str1(val): + reveal_type(val, expected_text="str") + else: + reveal_type(val, expected_text="int") + + +def is_true(o: object) -> StrictTypeGuard[Literal[True]]: + ... + + +def func2(val: bool): + if not is_true(val): + reveal_type(val, expected_text="bool") + else: + reveal_type(val, expected_text="Literal[True]") + + reveal_type(val, expected_text="bool") + + +def is_list(val: object) -> StrictTypeGuard[list[Any]]: + return isinstance(val, list) + + +def func3(val: dict[str, str] | list[str] | list[int] | Sequence[int]): + if is_list(val): + reveal_type(val, expected_text="list[str] | list[int] | list[Any]") + else: + reveal_type(val, expected_text="dict[str, str] | Sequence[int]") + + +def func4(val: dict[str, str] | list[str] | list[int] | tuple[int]): + if is_list(val): + reveal_type(val, expected_text="list[str] | list[int]") + else: + reveal_type(val, expected_text="dict[str, str] | tuple[int]") + + +_K = TypeVar("_K") +_V = TypeVar("_V") + + +def is_dict(val: Mapping[_K, _V]) -> StrictTypeGuard[dict[_K, _V]]: + return isinstance(val, dict) + + +def func5(val: dict[_K, _V] | Mapping[_K, _V]): + if not is_dict(val): + reveal_type(val, expected_text="Mapping[_K@func5, _V@func5]") + else: + reveal_type(val, expected_text="dict[_K@func5, _V@func5]") + + +def is_cardinal_direction(val: str) -> StrictTypeGuard[Literal["N", "S", "E", "W"]]: + return val in ("N", "S", "E", "W") + + +def func6(direction: Literal["NW", "E"]): + if is_cardinal_direction(direction): + reveal_type(direction, expected_text="Literal['E']") + else: + reveal_type(direction, expected_text="Literal['NW']") + + +class Animal: + ... + + +class Kangaroo(Animal): + ... + + +class Koala(Animal): + ... + + +T = TypeVar("T") + + +def is_marsupial(val: Animal) -> StrictTypeGuard[Kangaroo | Koala]: + return isinstance(val, Kangaroo | Koala) + + +# This should generate an error because list[T] isn't assignable to list[T | None]. +def has_no_nones( + val: list[T | None], +) -> StrictTypeGuard[list[T]]: + return None not in val diff --git a/packages/pyright-internal/src/tests/samples/typeIgnore4.py b/packages/pyright-internal/src/tests/samples/typeIgnore4.py new file mode 100644 index 000000000000..99bdae16c671 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeIgnore4.py @@ -0,0 +1,10 @@ +# This sample tests the reportUnnecessaryTypeIgnoreComment diagnostic check +# as applied to the entire file. + +a: str = 3 # type: ignore + +# This should emit an error if reportUnnecessaryTypeComment is enabled +b: str = "" # type: ignore + +# This should emit an error if reportUnnecessaryTypeComment is enabled +c: int = 3 # type: ignore diff --git a/packages/pyright-internal/src/tests/samples/typeIgnore5.py b/packages/pyright-internal/src/tests/samples/typeIgnore5.py new file mode 100644 index 000000000000..2cfea889674d --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeIgnore5.py @@ -0,0 +1,7 @@ +# This sample tests the reportUnnecessaryTypeIgnoreComment diagnostic check +# as applied to individual lines. + +# This should generate an error if reportUnnecessaryTypeIgnoreComment is enabled. +# type: ignore + +b: str = "" diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowing5.py b/packages/pyright-internal/src/tests/samples/typeNarrowing5.py index b31bb4d24a27..da7e65a0fef4 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowing5.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowing5.py @@ -2,18 +2,18 @@ # where the source contains Unknown or Any type # arguments. -from typing import Any, Dict, Literal +from typing import Any, Dict def func1(struct: Dict[Any, Any]): a1: Dict[str, Any] = struct - t1: Literal["Dict[str, Any]"] = reveal_type(a1) + reveal_type(a1, expected_text="Dict[str, Any]") def func2(struct: Any): a1: Dict[Any, str] = struct - t1: Literal["Dict[Any, str]"] = reveal_type(a1) + reveal_type(a1, expected_text="Dict[Any, str]") if isinstance(struct, Dict): a2: Dict[str, Any] = struct - t2: Literal["Dict[str, Any]"] = reveal_type(a2) + reveal_type(a2, expected_text="Dict[str, Any]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowing6.py b/packages/pyright-internal/src/tests/samples/typeNarrowing6.py index b6507d6ac41a..166611ba17e1 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowing6.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowing6.py @@ -2,8 +2,6 @@ # is narrowed is "reset" when part of the member access expression # is reassigned. -from typing import Literal - class Foo1: val0: int @@ -19,11 +17,11 @@ def func(a: bool): foo2.val1 = 0 foo2.val2.val0 = 4 - t1: Literal["Literal[0]"] = reveal_type(foo2.val1) - t2: Literal["Literal[4]"] = reveal_type(foo2.val2.val0) + reveal_type(foo2.val1, expected_text="Literal[0]") + reveal_type(foo2.val2.val0, expected_text="Literal[4]") if a: foo2 = Foo2() - t3: Literal["int"] = reveal_type(foo2.val1) - t4: Literal["int"] = reveal_type(foo2.val2.val0) + reveal_type(foo2.val1, expected_text="int") + reveal_type(foo2.val2.val0, expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowing7.py b/packages/pyright-internal/src/tests/samples/typeNarrowing7.py index 25032b337dc4..3070ec3253fa 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowing7.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowing7.py @@ -1,6 +1,6 @@ # This sample tests type narrowing for index operations. -from typing import Dict, List, Literal, Optional, Union +from typing import Dict, List, Optional, Union class Foo: @@ -9,57 +9,57 @@ class Foo: def func1(v1: List[Optional[complex]]): if v1[0] and v1[1]: - t_v1_0: Literal["complex"] = reveal_type(v1[0]) - t_v1_1: Literal["complex"] = reveal_type(v1[1]) - t_v1_2: Literal["complex | None"] = reveal_type(v1[2]) + reveal_type(v1[0], expected_text="complex") + reveal_type(v1[1], expected_text="complex") + reveal_type(v1[2], expected_text="complex | None") v1[0], v1[1] = None, None - t_v1_0_updated1: Literal["None"] = reveal_type(v1[0]) - t_v1_1_updated1: Literal["None"] = reveal_type(v1[1]) + reveal_type(v1[0], expected_text="None") + reveal_type(v1[1], expected_text="None") v1[0], v1[1] = 1, 2 - t_v1_0_updated2: Literal["Literal[1]"] = reveal_type(v1[0]) - t_v1_1_updated2: Literal["Literal[2]"] = reveal_type(v1[1]) + reveal_type(v1[0], expected_text="Literal[1]") + reveal_type(v1[1], expected_text="Literal[2]") v1 = [] - t_v1_0_updated3: Literal["complex | None"] = reveal_type(v1[0]) + reveal_type(v1[0], expected_text="complex | None") i = 1 if v1[i]: - t_v1_i: Literal["complex | None"] = reveal_type(v1[i]) + reveal_type(v1[i], expected_text="complex | None") foo = Foo() if foo.val[0][2]: - t_foo_val_0_2: Literal["str"] = reveal_type(foo.val[0][2]) - t_foo_val_1_2: Literal["str | None"] = reveal_type(foo.val[1][2]) + reveal_type(foo.val[0][2], expected_text="str") + reveal_type(foo.val[1][2], expected_text="str | None") foo.val = [] - t_foo_val_0_2_updated: Literal["str | None"] = reveal_type(foo.val[0][2]) + reveal_type(foo.val[0][2], expected_text="str | None") def func2(v1: List[Union[Dict[str, str], List[str]]]): if isinstance(v1[0], dict): - t_v1_0: Literal["Dict[str, str]"] = reveal_type(v1[0]) - t_v1_1: Literal["Dict[str, str] | List[str]"] = reveal_type(v1[1]) + reveal_type(v1[0], expected_text="Dict[str, str]") + reveal_type(v1[1], expected_text="Dict[str, str] | List[str]") def func3(): v1: Dict[str, int] = {} - t_v1_0: Literal["int"] = reveal_type(v1["x1"]) + reveal_type(v1["x1"], expected_text="int") v1["x1"] = 3 - t_v1_1: Literal["Literal[3]"] = reveal_type(v1["x1"]) + reveal_type(v1["x1"], expected_text="Literal[3]") v1[f"x2"] = 5 - t_v1_2: Literal["int"] = reveal_type(v1["x2"]) + reveal_type(v1["x2"], expected_text="int") v1 = {} - t_v1_3: Literal["int"] = reveal_type(v1["x1"]) + reveal_type(v1["x1"], expected_text="int") v2: Dict[str, Dict[str, int]] = {} - t_v2_0: Literal["int"] = reveal_type(v2["y1"]["y2"]) + reveal_type(v2["y1"]["y2"], expected_text="int") v2["y1"]["y2"] = 3 - t_v2_1: Literal["Literal[3]"] = reveal_type(v2["y1"]["y2"]) + reveal_type(v2["y1"]["y2"], expected_text="Literal[3]") v2["y1"] = {} - t_v2_2: Literal["int"] = reveal_type(v2["y1"]["y2"]) + reveal_type(v2["y1"]["y2"], expected_text="int") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py index eb57bef429b4..7674675aadb0 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingCallable1.py @@ -1,7 +1,7 @@ # This sample tests the type engine's narrowing logic for # callable expressions. -from typing import Callable, Literal, Optional, Type, TypeVar, Union +from typing import Callable, Optional, Type, TypeVar, Union class CallableObj: @@ -35,10 +35,10 @@ def g(a: Optional[Callable[[int], int]]): a(3) -T = TypeVar("T") +_T1 = TypeVar("_T1") -def test1(arg: Union[T, Callable[[], T]]) -> T: +def test1(arg: Union[_T1, Callable[[], _T1]]) -> _T1: if callable(arg): return arg() return arg @@ -51,13 +51,13 @@ def bar(self) -> None: def test2(o: Foo) -> None: if callable(o): - t_1: Literal[""] = reveal_type(o) + reveal_type(o, expected_text="") # This should generate an error o.foo() o.bar() r1 = o(1, 2, 3) - t_r1: Literal["Unknown"] = reveal_type(r1) + reveal_type(r1, expected_text="Unknown") else: o.bar() @@ -65,14 +65,14 @@ def test2(o: Foo) -> None: o(1, 2, 3) -T = TypeVar("T", int, str, Callable[[], int], Callable[[], str]) +_T2 = TypeVar("_T2", int, str, Callable[[], int], Callable[[], str]) -def test3(v: T) -> T: +def test3(v: _T2) -> Union[_T2, int, str]: if callable(v): - t1: Literal["() -> int | () -> str"] = reveal_type(v) - t2: Literal["int* | str*"] = reveal_type(v()) + reveal_type(v, expected_text="(() -> int) | (() -> str)") + reveal_type(v(), expected_text="int* | str*") return v() else: - t3: Literal["int* | str*"] = reveal_type(v) + reveal_type(v, expected_text="int* | str*") return v diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py new file mode 100644 index 000000000000..ca917c3a1b92 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingFalsy1.py @@ -0,0 +1,50 @@ +# This sample tests type narrowing for falsy and truthy values. + +from typing import Iterable, List, Literal, Optional, Union + + +class A: + ... + + +class B: + def __bool__(self) -> bool: + ... + + +class C: + def __bool__(self) -> Literal[False]: + ... + + +class D: + def __bool__(self) -> Literal[True]: + ... + + +def func1(x: Union[int, List[int], A, B, C, D, None]) -> None: + if x: + reveal_type(x, expected_text="int | List[int] | A | B | D") + else: + reveal_type(x, expected_text="int | List[int] | B | C | None") + + +def func2(maybe_int: Optional[int]): + if bool(maybe_int): + reveal_type(maybe_int, expected_text="int") + else: + reveal_type(maybe_int, expected_text="int | None") + + +def func3(maybe_a: Optional[A]): + if bool(maybe_a): + reveal_type(maybe_a, expected_text="A") + else: + reveal_type(maybe_a, expected_text="None") + + +def func4(foo: Iterable[int]) -> None: + if foo: + reveal_type(foo, expected_text="Iterable[int]") + else: + reveal_type(foo, expected_text="Iterable[int]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py index 9495211b51ed..4df0e43f7b02 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIn1.py @@ -1,6 +1,6 @@ # This sample tests type narrowing for the "in" operator. -from typing import Optional, Union +from typing import Literal, Optional, Union import random @@ -38,3 +38,34 @@ def verify_none(p: None) -> None: # This should generate an error because y should # be narrowed to an int. verify_str(y) + + +def func1(x: Optional[Union[int, str]], y: Literal[1, 2, "b"], b: int): + if x in (1, 2, "a"): + reveal_type(x, expected_text="Literal[1, 2, 'a']") + + if x in (1, "2"): + reveal_type(x, expected_text="Literal[1, '2']") + + if x in (1, None): + reveal_type(x, expected_text="Literal[1] | None") + + if x in (1, b, "a"): + reveal_type(x, expected_text="int | Literal['a']") + + if y in (1, b, "a"): + reveal_type(y, expected_text="Literal[1, 2]") + + if y in (1, "a"): + reveal_type(y, expected_text="Literal[1]") + + if y in (1, "b"): + reveal_type(y, expected_text="Literal[1, 'b']") + + +def func2(a: Literal[1, 2, 3]): + x = (1, 2) + if a in x: + reveal_type(a, expected_text="Literal[1, 2]") + else: + reveal_type(a, expected_text="Literal[1, 2, 3]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py index ec32106b17b7..1b0c9ae6a8a0 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsNone1.py @@ -4,7 +4,7 @@ # pyright: strict, reportUnusedVariable=false -from typing import Literal, Optional, TypeVar +from typing import Optional, TypeVar def func1(x: Optional[int]): @@ -30,18 +30,18 @@ def func1(x: Optional[int]): def func2(val: _T1) -> _T1: if val is not None: - t1: Literal["str*"] = reveal_type(val) + reveal_type(val, expected_text="str*") return val else: - t2: Literal["None*"] = reveal_type(val) + reveal_type(val, expected_text="None*") return val def func3(x: object): if x is None: - t1: Literal["None"] = reveal_type(x) + reveal_type(x, expected_text="None") else: - t2: Literal["object"] = reveal_type(x) + reveal_type(x, expected_text="object") _T2 = TypeVar("_T2") @@ -49,8 +49,8 @@ def func3(x: object): def func4(x: _T2) -> _T2: if x is None: - t1: Literal["None*"] = reveal_type(x) + reveal_type(x, expected_text="None*") raise ValueError() else: - t2: Literal["_T2@func4"] = reveal_type(x) + reveal_type(x, expected_text="_T2@func4") return x diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py new file mode 100644 index 000000000000..d9dec6cce291 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsNoneTuple1.py @@ -0,0 +1,42 @@ +# This sample tests the type narrowing case for unions of tuples +# where one or more of the entries is tested against type None. + +from typing import TypeVar, Tuple, Union + +_T1 = TypeVar("_T1") + + +def func1(a: Union[Tuple[_T1, None], Tuple[None, str]]) -> Tuple[_T1, None]: + if a[1] is None: + reveal_type(a, expected_text="Tuple[_T1@func1, None]") + return a + else: + reveal_type(a, expected_text="Tuple[None, str]") + raise ValueError() + + +_T2 = TypeVar("_T2", bound=Union[None, int]) + + +def func2(a: Union[Tuple[_T2, None], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[_T2@func2, None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[_T2@func2, None]") + + +_T3 = TypeVar("_T3", None, int) + + +def func3(a: Union[Tuple[_T3, None], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[_T3@func3, None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[_T3@func3, None]") + + +def func4(a: Union[Tuple[Union[int, None]], Tuple[None, str]]): + if a[0] is None: + reveal_type(a, expected_text="Tuple[int | None] | Tuple[None, str]") + else: + reveal_type(a, expected_text="Tuple[int | None]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py index f91567b8ce99..6e4e16588bcc 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance1.py @@ -1,6 +1,6 @@ # This sample exercises the type analyzer's isinstance type narrowing logic. -from typing import List, Literal, Optional, Sized, Type, TypeVar, Union, Any +from typing import List, Optional, Sized, Type, TypeVar, Union, Any class UnrelatedClass: @@ -120,13 +120,15 @@ def func6(ty: Type[T]) -> Type[T]: # Test the handling of protocol classes that support runtime checking. def func7(a: Union[List[int], int]): if isinstance(a, Sized): - t1: Literal["List[int]"] = reveal_type(a) + reveal_type(a, expected_text="List[int]") else: - t2: Literal["int"] = reveal_type(a) + reveal_type(a, expected_text="int") + # Test handling of member access expressions whose types change based # on isinstance checks. + class Base1: ... @@ -143,8 +145,8 @@ class Sub2(Base1): def handler(node: Base1) -> Any: if isinstance(node, Sub1): - t1: Literal["str"] = reveal_type(node.value) + reveal_type(node.value, expected_text="str") elif isinstance(node, Sub2): - t2: Literal["Base1"] = reveal_type(node.value) + reveal_type(node.value, expected_text="Base1") if isinstance(node.value, Sub1): - t3: Literal["Sub1"] = reveal_type(node.value) + reveal_type(node.value, expected_text="Sub1") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py index 8d17b3cee337..1da3a0c8cf7f 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance3.py @@ -2,7 +2,7 @@ # narrowing in the case where there is no overlap between the # value type and the test type. -from typing import Literal, Type +from typing import Type, TypeVar class A: @@ -25,13 +25,13 @@ def func1(val: A): # This should generate an error val.c_val - t1: Literal[""] = reveal_type(val) + reveal_type(val, expected_text="") if isinstance(val, C): val.a_val val.b_val val.c_val - t2: Literal[" and C>"] = reveal_type(val) + reveal_type(val, expected_text=" and C>") else: val.a_val @@ -39,7 +39,7 @@ def func1(val: A): # This should generate an error val.b_val - t3: Literal["A"] = reveal_type(val) + reveal_type(val, expected_text="A") def func2(val: Type[A]): @@ -50,15 +50,15 @@ def func2(val: Type[A]): # This should generate an error val.c_val - t1: Literal["Type[]"] = reveal_type(val) + reveal_type(val, expected_text="Type[]") if issubclass(val, C): val.a_val val.b_val val.c_val - t2: Literal[ - "Type[ and C>]" - ] = reveal_type(val) + reveal_type( + val, expected_text="Type[ and C>]" + ) else: val.a_val @@ -66,4 +66,13 @@ def func2(val: Type[A]): # This should generate an error val.b_val - t3: Literal["Type[A]"] = reveal_type(val) + reveal_type(val, expected_text="Type[A]") + + +_T1 = TypeVar("_T1", bound=A) + + +def func3(val: _T1) -> _T1: + if isinstance(val, B): + return val + return val diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py index 60c9032222e3..7dc7804d8e04 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance4.py @@ -1,7 +1,7 @@ # This sample checks the handling of callable types that are narrowed # to a particular type using an isinstance type narrowing test. -from typing import Callable, Literal, Union +from typing import Callable, Protocol, Union, runtime_checkable class Foo: @@ -9,34 +9,42 @@ def __call__(self, arg: int, bar: str) -> None: raise NotImplementedError -class Bar: +@runtime_checkable +class Bar(Protocol): def __call__(self, arg: int) -> None: raise NotImplementedError -class Baz: +@runtime_checkable +class Baz(Protocol): def __call__(self, arg: str) -> None: raise NotImplementedError def check_callable1(val: Union[Callable[[int, str], None], Callable[[int], None]]): if isinstance(val, Foo): - t1: Literal["Foo"] = reveal_type(val) + reveal_type(val, expected_text="Foo") else: - t2: Literal["(_p0: int) -> None"] = reveal_type(val) + # This doesn't get narrowed because `Foo` is not a runtime checkable protocol. + reveal_type(val, expected_text="((int, str) -> None) | ((int) -> None)") def check_callable2(val: Union[Callable[[int, str], None], Callable[[int], None]]): if isinstance(val, Bar): - t1: Literal["Bar"] = reveal_type(val) + reveal_type(val, expected_text="Bar") else: - t2: Literal["(_p0: int, _p1: str) -> None"] = reveal_type(val) + reveal_type(val, expected_text="(int, str) -> None") def check_callable3(val: Union[Callable[[int, str], None], Callable[[int], None]]): if isinstance(val, Baz): - t1: Literal["Never"] = reveal_type(val) + reveal_type(val, expected_text="Never") else: - t2: Literal["(_p0: int, _p1: str) -> None | (_p0: int) -> None"] = reveal_type( - val - ) + reveal_type(val, expected_text="((int, str) -> None) | ((int) -> None)") + + +def check_callable4(val: Union[type, Callable[[int], None]]): + if isinstance(val, type): + reveal_type(val, expected_text="type") + else: + reveal_type(val, expected_text="(int) -> None") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py index d5050ad15ef1..45968226717c 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance5.py @@ -1,7 +1,7 @@ # This sample tests isinstance type narrowing when the class list # includes "Callable". -from typing import Callable, List, Literal, Sequence, TypeVar, Union +from typing import Callable, List, Sequence, TypeVar, Union class A: @@ -26,13 +26,14 @@ class D(C): def func1(obj: Union[Callable[[int, str], int], List[int], A, B, C, D, TCall1]): if isinstance(obj, (Callable, Sequence, C)): - t1: Literal[ - "(_p0: int, _p1: str) -> int | List[int] | B | C | D | TCall1@func1" - ] = reveal_type(obj) + reveal_type( + obj, + expected_text="((int, str) -> int) | List[int] | B | C | D | TCall1@func1", + ) else: - t2: Literal["A"] = reveal_type(obj) + reveal_type(obj, expected_text="A") if isinstance(obj, Callable): - t3: Literal["(_p0: int, _p1: str) -> int | B | TCall1@func1"] = reveal_type(obj) + reveal_type(obj, expected_text="((int, str) -> int) | B | TCall1@func1") else: - t4: Literal["List[int] | C | D | A"] = reveal_type(obj) + reveal_type(obj, expected_text="List[int] | C | D | A") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py new file mode 100644 index 000000000000..c97db1cabae4 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingIsinstance6.py @@ -0,0 +1,77 @@ +# This sample tests the case where isinstance or issubclass is used to +# narrow the type of a specialized class to a subclass where the type +# arguments are implied by the type arguments of the wider class. + +from typing import Any, Generic, Iterable, Sequence, Type, TypeVar, Union + +_T1 = TypeVar("_T1") + + +class SomeClass(Generic[_T1]): + ... + + +class OtherClass(SomeClass[_T1]): + ... + + +def func1(a: SomeClass[int], b: Union[SomeClass[str], SomeClass[complex]]) -> None: + if isinstance(a, OtherClass): + reveal_type(a, expected_text="OtherClass[int]") + + if isinstance(b, OtherClass): + reveal_type(b, expected_text="OtherClass[str] | OtherClass[complex]") + + +def func2( + a: Type[SomeClass[int]], b: Union[Type[SomeClass[str]], Type[SomeClass[complex]]] +) -> None: + if issubclass(a, OtherClass): + reveal_type(a, expected_text="Type[OtherClass[int]]") + + if issubclass(b, OtherClass): + reveal_type( + b, expected_text="Type[OtherClass[str]] | Type[OtherClass[complex]]" + ) + + +def func3(value: Iterable[_T1]) -> Sequence[_T1] | None: + if isinstance(value, Sequence): + return value + + +_T2 = TypeVar("_T2", bound=float, covariant=True) + + +class Parent1(Generic[_T2]): + pass + + +class Child1(Parent1[_T2]): + pass + + +def func4(var: Parent1[int]): + if isinstance(var, Child1): + reveal_type(var, expected_text="Child1[int]") + + +def func5(var: Parent1[Any]): + if isinstance(var, Child1): + reveal_type(var, expected_text="Child1[Any]") + + +_T3 = TypeVar("_T3", float, str) + + +class Parent2(Generic[_T3]): + pass + + +class Child2(Parent2[_T3]): + pass + + +def func6(var: Parent2[int]): + if isinstance(var, Child2): + reveal_type(var, expected_text="Child2[float]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py index 3a0dc7c2ca5a..50e488440f38 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral1.py @@ -4,43 +4,32 @@ from typing import Literal, Union -def requires_a(p1: Literal["a"]): - pass - - -def requires_bc(p1: Literal["b", "c"]): - pass - - def func_1(p1: Literal["a", "b", "c"]): if p1 != "b": if p1 == "c": + reveal_type(p1, expected_text="Literal['c']") pass else: - requires_a(p1) + reveal_type(p1, expected_text="Literal['a']") if p1 != "a": - requires_bc(p1) + reveal_type(p1, expected_text="Literal['c', 'b']") else: - requires_a(p1) + reveal_type(p1, expected_text="Literal['a']") if "a" != p1: - requires_bc(p1) + reveal_type(p1, expected_text="Literal['c', 'b']") else: - requires_a(p1) - - -def requires_7(p1: Literal[7]): - pass + reveal_type(p1, expected_text="Literal['a']") def func2(p1: Literal[1, 4, 7]): if 4 == p1 or 1 == p1: - pass + reveal_type(p1, expected_text="Literal[4, 1]") else: - requires_7(p1) + reveal_type(p1, expected_text="Literal[7]") def func3(a: Union[int, None]): if a == 1 or a == 2: - t1: Literal["Literal[1, 2]"] = reveal_type(a) + reveal_type(a, expected_text="Literal[1, 2]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py index 8b6a32573c34..f10d8215431f 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteral2.py @@ -56,8 +56,8 @@ class MyEnum(Enum): def func5(x: Union[MyEnum, str]): if x is MyEnum.ZERO: - t1: Literal["Literal[MyEnum.ZERO]"] = reveal_type(x) + reveal_type(x, expected_text="Literal[MyEnum.ZERO]") elif x is MyEnum.ONE: - t2: Literal["Literal[MyEnum.ONE]"] = reveal_type(x) + reveal_type(x, expected_text="Literal[MyEnum.ONE]") else: - t3: Literal["str"] = reveal_type(x) + reveal_type(x, expected_text="str") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py index 8826751ba795..d62d865620d3 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingLiteralMember1.py @@ -8,61 +8,113 @@ class A: kind: Literal["A"] kind_class: ClassVar[Literal["A"]] d: Literal[1, 2, 3] + is_a: Literal[True] class B: kind: Literal["B"] kind_class: ClassVar[Literal["B"]] d: Literal[3, 4, 5] + is_a: Literal[False] class C: kind: str kind_class: str c: int + is_a: bool class D: kind: Literal[1, 2, 3] -def foo_obj1(c: Union[A, B]): +def eq_obj1(c: Union[A, B]): if c.kind == "A": - tc1: Literal["A"] = reveal_type(c) + reveal_type(c, expected_text="A") else: - tc2: Literal["B"] = reveal_type(c) + reveal_type(c, expected_text="B") -def foo_obj2(c: Union[A, B]): +def is_obj1_1(c: Union[A, B]): + if c.kind is "A": + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") + + +def is_obj1_2(c: Union[A, B]): + if c.is_a is False: + reveal_type(c, expected_text="B") + else: + reveal_type(c, expected_text="A") + + +def eq_obj2(c: Union[A, B]): if c.kind != "A": - tc1: Literal["B"] = reveal_type(c) + reveal_type(c, expected_text="B") else: - tc2: Literal["A"] = reveal_type(c) + reveal_type(c, expected_text="A") + +def is_obj2(c: Union[A, B]): + if c.kind is not "A": + reveal_type(c, expected_text="A | B") + else: + reveal_type(c, expected_text="A | B") -def foo_obj3(c: Union[A, B, C]): + +def eq_obj3(c: Union[A, B, C]): if c.kind == "A": - tc1: Literal["A | B | C"] = reveal_type(c) + reveal_type(c, expected_text="A | C") else: - tc2: Literal["A | B | C"] = reveal_type(c) + reveal_type(c, expected_text="B | C") -def foo_obj4(c: Union[A, B]): +def is_obj3(c: Union[A, B, C]): + if c.kind is "A": + reveal_type(c, expected_text="A | B | C") + else: + reveal_type(c, expected_text="A | B | C") + + +def eq_obj4(c: Union[A, B]): if c.d == 1: - tc1: Literal["A"] = reveal_type(c) + reveal_type(c, expected_text="A") elif c.d == 3: - tc2: Literal["A | B"] = reveal_type(c) + reveal_type(c, expected_text="A | B") + +def is_obj4(c: Union[A, B]): + if c.d is 1: + reveal_type(c, expected_text="A | B") + elif c.d is 3: + reveal_type(c, expected_text="A | B") -def foo_obj5(d: D): + +def eq_obj5(d: D): if d.kind == 1: - td1: Literal["D"] = reveal_type(d) + reveal_type(d, expected_text="D") elif d.kind == 2: - td2: Literal["D"] = reveal_type(d) + reveal_type(d, expected_text="D") + +def is_obj5(d: D): + if d.kind is 1: + reveal_type(d, expected_text="D") + elif d.kind is 2: + reveal_type(d, expected_text="D") -def foo_class2(c: Union[Type[A], Type[B]]): + +def eq_class2(c: Union[Type[A], Type[B]]): if c.kind_class == "A": - tc1: Literal["Type[A]"] = reveal_type(c) + reveal_type(c, expected_text="Type[A]") + else: + reveal_type(c, expected_text="Type[B]") + + +def is_class2(c: Union[Type[A], Type[B]]): + if c.kind_class is "A": + reveal_type(c, expected_text="Type[A] | Type[B]") else: - tc2: Literal["Type[B]"] = reveal_type(c) + reveal_type(c, expected_text="Type[A] | Type[B]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py new file mode 100644 index 000000000000..de8f2236b3ca --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingLocalConst1.py @@ -0,0 +1,136 @@ +# This sample tests the case where a local (constant) variable that +# is assigned a narrowing expression can be used in a type guard condition. +# These are sometimes referred to as "aliased conditional expressions". + + +from typing import Optional, Union +import random + + +class A: + a: int + + +class B: + b: int + + +def func1(x: Union[A, B]) -> None: + is_a = not not isinstance(x, A) + + if not is_a: + reveal_type(x, expected_text="B") + else: + reveal_type(x, expected_text="A") + + +def func2(x: Union[A, B]) -> None: + is_a = isinstance(x, A) + + if random.random() < 0.5: + x = B() + + if is_a: + reveal_type(x, expected_text="B | A") + else: + reveal_type(x, expected_text="B | A") + + +def func3(x: Optional[int]): + is_number = x != None + + if is_number: + reveal_type(x, expected_text="int") + else: + reveal_type(x, expected_text="None") + + +def func4() -> Optional[A]: + return A() if random.random() < 0.5 else None + + +maybe_a1 = func4() +is_a1 = maybe_a1 + +if is_a1: + reveal_type(maybe_a1, expected_text="A") +else: + reveal_type(maybe_a1, expected_text="None") + +maybe_a2 = func4() + + +def func5(): + global maybe_a2 + maybe_a2 = False + + +is_a2 = maybe_a2 + +if is_a2: + reveal_type(maybe_a2, expected_text="A | None") +else: + reveal_type(maybe_a2, expected_text="A | None") + + +def func6(x: Union[A, B]) -> None: + is_a = isinstance(x, A) + + for y in range(1): + if is_a: + reveal_type(x, expected_text="A | B") + else: + reveal_type(x, expected_text="A | B") + + if random.random() < 0.5: + x = B() + + +def get_string() -> str: + ... + + +def get_optional_string() -> Optional[str]: + ... + + +def func7(val: Optional[str] = None): + val = get_optional_string() + + val_is_none = val is None + + if val_is_none: + val = get_string() + + reveal_type(val, expected_text="str") + + +def func8(val: Optional[str] = None): + val = get_optional_string() + + val_is_none = val is None + + val = get_optional_string() + + if val_is_none: + val = get_string() + + reveal_type(val, expected_text="str | None") + + +def func9(var: Optional[str] = None): + if var_not_None := not (var is None): + reveal_type(var, expected_text="str") + + reveal_type(var, expected_text="str | None") + + if var_not_None: + reveal_type(var, expected_text="str") + + if 1 > 1 + 2: + var = None + else: + var = "a" + "b" + + if var_not_None: + reveal_type(var, expected_text="Literal['ab'] | None") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py index 408b521ade29..7fdba94d898a 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTuple1.py @@ -11,13 +11,13 @@ def func1(m: Msg): if m[0] == 1: - t1: Literal["Tuple[Literal[1], str]"] = reveal_type(m) + reveal_type(m, expected_text="Tuple[Literal[1], str]") else: - t2: Literal["Tuple[Literal[2], float]"] = reveal_type(m) + reveal_type(m, expected_text="Tuple[Literal[2], float]") def func2(m: Msg): if m[0] != 1: - t1: Literal["Tuple[Literal[2], float]"] = reveal_type(m) + reveal_type(m, expected_text="Tuple[Literal[2], float]") else: - t2: Literal["Tuple[Literal[1], str]"] = reveal_type(m) + reveal_type(m, expected_text="Tuple[Literal[1], str]") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py new file mode 100644 index 000000000000..1f6cde3ec7d3 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTupleLength1.py @@ -0,0 +1,47 @@ +# This sample tests type narrowing of tuples based on len(x) test. + +from typing import Tuple, TypeVar, Union + + +def func1(val: Union[Tuple[int], Tuple[int, int], Tuple[str, str]]): + if len(val) == 1: + reveal_type(val, expected_text="Tuple[int]") + else: + reveal_type(val, expected_text="Tuple[int, int] | Tuple[str, str]") + + if len(val) != 2: + reveal_type(val, expected_text="Tuple[int]") + else: + reveal_type(val, expected_text="Tuple[int, int] | Tuple[str, str]") + + +def func2(val: Union[Tuple[int], Tuple[int, ...]]): + if len(val) == 1: + reveal_type(val, expected_text="Tuple[int] | Tuple[int, ...]") + else: + reveal_type(val, expected_text="Tuple[int, ...]") + + if len(val) != 2: + reveal_type(val, expected_text="Tuple[int] | Tuple[int, ...]") + else: + reveal_type(val, expected_text="Tuple[int, ...]") + + +def func3(val: Union[Tuple[int], Tuple[()]]): + if len(val) == 0: + reveal_type(val, expected_text="Tuple[()]") + else: + reveal_type(val, expected_text="Tuple[int]") + + +_T1 = TypeVar("_T1", bound=Tuple[int]) +_T2 = TypeVar("_T2", bound=Tuple[str, str]) + + +def func4(val: Union[_T1, _T2]) -> Union[_T1, _T2]: + if len(val) == 1: + reveal_type(val, expected_text="_T1@func4") + else: + reveal_type(val, expected_text="_T2@func4") + + return val diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py index 72ef15d66f7a..1f37d1b5b9a8 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTypeIs1.py @@ -1,7 +1,7 @@ # This sample exercises the type analyzer's type narrowing # logic for tests of the form "type(X) is Y" or "type(X) is not Y". -from typing import Any, Dict, Generic, Literal, Optional, TypeVar, Union, final +from typing import Any, Dict, Generic, Optional, TypeVar, Union, final def func1(a: Union[str, int]) -> int: @@ -44,9 +44,9 @@ class B(A): def func4(a: Union[str, A]): if type(a) is B: - t1: Literal["B"] = reveal_type(a) + reveal_type(a, expected_text="B") else: - t2: Literal["str | A"] = reveal_type(a) + reveal_type(a, expected_text="str | A") T = TypeVar("T") @@ -66,7 +66,7 @@ class D: def func5(x: E[T]) -> None: if type(x) is C: - t1: Literal["C[T@func5]"] = reveal_type(x) + reveal_type(x, expected_text="C[T@func5]") @final @@ -79,8 +79,17 @@ class BFinal: pass -def foo(c: Union[AFinal, BFinal]) -> None: - if type(c) is AFinal: - t1: Literal["AFinal"] = reveal_type(c) +def func6(val: Union[AFinal, BFinal]) -> None: + if type(val) is AFinal: + reveal_type(val, expected_text="AFinal") else: - t2: Literal["BFinal"] = reveal_type(c) + reveal_type(val, expected_text="BFinal") + + +def func7(val: Any): + if type(val) is int: + reveal_type(val, expected_text="int") + else: + reveal_type(val, expected_text="Any") + + reveal_type(val, expected_text="int | Any") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py index c26a29c53e7f..d69a6e984823 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict1.py @@ -1,7 +1,7 @@ # This sample tests type narrowing for TypedDict types based # on whether a key is in or not in the dict. -from typing import Literal, TypedDict, Union, final +from typing import TypedDict, Union, final @final @@ -29,37 +29,37 @@ class TD4(TypedDict): def f1(p: Union[TD1, TD2]): if "b" in p: - tp1: Literal["TD1"] = reveal_type(p) + reveal_type(p, expected_text="TD1") else: - tp2: Literal["TD2"] = reveal_type(p) + reveal_type(p, expected_text="TD2") def f2(p: Union[TD1, TD2]): if "b" not in p: - tp1: Literal["TD2"] = reveal_type(p) + reveal_type(p, expected_text="TD2") else: - tp2: Literal["TD1"] = reveal_type(p) + reveal_type(p, expected_text="TD1") def f3(p: Union[TD1, TD3]): if "d" in p: - tp1: Literal["TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD3") else: - tp2: Literal["TD1 | TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD1 | TD3") def f4(p: Union[TD1, TD3]): if "d" not in p: - tp1: Literal["TD1 | TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD1 | TD3") else: - tp2: Literal["TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD3") def f5(p: Union[TD1, TD3]): if "a" in p: - tp1: Literal["TD1 | TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD1 | TD3") else: - tp2: Literal["TD3"] = reveal_type(p) + reveal_type(p, expected_text="TD3") def f6(p: Union[TD1, TD2, TD3]): @@ -70,14 +70,14 @@ def f6(p: Union[TD1, TD2, TD3]): if "c" in p: v3 = p["c"] - t_v3: Literal["str"] = reveal_type(v3) + reveal_type(v3, expected_text="str") if "a" in p and "d" in p: v4 = p["a"] - t_v4: Literal["int"] = reveal_type(v4) + reveal_type(v4, expected_text="int") v5 = p["d"] - t_v5: Literal["str"] = reveal_type(v5) + reveal_type(v5, expected_text="str") # This should generate two errors, one for TD1 and another for TD2. v6 = p["d"] @@ -94,6 +94,6 @@ def f8(p: TD3): def f9(p: Union[TD1, TD4]): if "b" in p: - tp1: Literal["TD1 | TD4"] = reveal_type(p) + reveal_type(p, expected_text="TD1 | TD4") else: - tp2: Literal["TD4"] = reveal_type(p) + reveal_type(p, expected_text="TD4") diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py index e1a731c22f48..abed423c48a3 100644 --- a/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict2.py @@ -25,12 +25,11 @@ class OtherEvent(TypedDict): def process_event(event: Event) -> None: if event["tag"] == "new-job": - t1: Literal["NewJobEvent"] = reveal_type(event) + reveal_type(event, expected_text="NewJobEvent") event["job_name"] elif event["tag"] == 2: - t2: Literal["CancelJobEvent"] = reveal_type(event) + reveal_type(event, expected_text="CancelJobEvent") event["job_id"] else: - t3: Literal["OtherEvent"] = reveal_type(event) + reveal_type(event, expected_text="OtherEvent") event["message"] - diff --git a/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py new file mode 100644 index 000000000000..25d169390128 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typeNarrowingTypedDict3.py @@ -0,0 +1,45 @@ +# This sample tests assignment-based narrowing for TypedDict values. + +from typing import TypedDict + + +class MyDict1(TypedDict, total=False): + key1: int + key2: str + + +my_dict1: MyDict1 = {"key1": 1} +my_dict1["key1"] + +# This should generate an error because "key2" isn't included in the +# narrowed type. +my_dict1["key2"] + +if "key2" in my_dict1: + my_dict1["key2"] + + +class MyDict2(TypedDict, total=False): + key3: MyDict1 + key4: MyDict1 + key5: MyDict1 + + +my_dict2: MyDict2 = {"key3": {"key1": 3}, "key4": {}} + +my_dict2["key3"] +my_dict2["key4"] + +# This should generate an error because "key5" isn't included in the +# narrowed type. +my_dict2["key5"] + +my_dict2["key3"]["key1"] + +# This should generate an error because "key2" isn't included in the +# narrowed type. +my_dict2["key3"]["key2"] + +# This should generate an error because "key4" isn't included in the +# narrowed type. +my_dict2["key4"]["key1"] diff --git a/packages/pyright-internal/src/tests/samples/typePromotions1.py b/packages/pyright-internal/src/tests/samples/typePromotions1.py index 9fc08d350d2c..99c38e0bfd16 100644 --- a/packages/pyright-internal/src/tests/samples/typePromotions1.py +++ b/packages/pyright-internal/src/tests/samples/typePromotions1.py @@ -1,6 +1,9 @@ # This sample tests handling of special-cased "type promotions". +from typing import NewType + + def func1(float_val: float, int_val: int): v1: float = int_val v2: complex = float_val @@ -10,3 +13,18 @@ def func1(float_val: float, int_val: int): def func2(mem_view_val: memoryview, byte_array_val: bytearray): v1: bytes = mem_view_val v2: bytes = byte_array_val + + +class IntSubclass(int): + ... + + +def func3(x: IntSubclass) -> float: + return x + + +IntNewType = NewType("IntNewType", int) + + +def func4(x: IntNewType) -> float: + return x diff --git a/packages/pyright-internal/src/tests/samples/typeVar11.py b/packages/pyright-internal/src/tests/samples/typeVar11.py index 3513a0ca06c5..a5f3a1896d65 100644 --- a/packages/pyright-internal/src/tests/samples/typeVar11.py +++ b/packages/pyright-internal/src/tests/samples/typeVar11.py @@ -9,4 +9,4 @@ def combine(set1: Set[_L1], set2: Set[_L1]) -> None: x = set1 | set2 - t1: Literal["Set[Literal['foo', 'bar']]"] = reveal_type(x) + reveal_type(x, expected_text="set[Literal['foo', 'bar']]") diff --git a/packages/pyright-internal/src/tests/samples/typeVar3.py b/packages/pyright-internal/src/tests/samples/typeVar3.py index 5d532cde5d2f..2238f8fac318 100644 --- a/packages/pyright-internal/src/tests/samples/typeVar3.py +++ b/packages/pyright-internal/src/tests/samples/typeVar3.py @@ -1,6 +1,6 @@ # This sample tests various diagnostics related to TypeVar usage. -from typing import Callable, Generic, List, Literal, Optional, TypeVar +from typing import Callable, Generic, List, Optional, TypeVar import typing _T = TypeVar("_T") @@ -69,7 +69,7 @@ class InnerClass3(Generic[_T]): def foo() -> Callable[[T], T]: def inner(v: T) -> T: - t_v: Literal["T@foo"] = reveal_type(v) + reveal_type(v, expected_text="T@foo") return v return inner diff --git a/packages/pyright-internal/src/tests/samples/typeVar7.py b/packages/pyright-internal/src/tests/samples/typeVar7.py index 3ea6305a077a..65cbefa3f279 100644 --- a/packages/pyright-internal/src/tests/samples/typeVar7.py +++ b/packages/pyright-internal/src/tests/samples/typeVar7.py @@ -2,7 +2,7 @@ # with constrained types properly generate errors. It tests # both class-defined and function-defined type variables. -from typing import Generic, Literal, TypeVar +from typing import Generic, TypeVar class Foo: @@ -132,7 +132,7 @@ def custom_add(a: _T3, b: _T4) -> float: if isinstance(a, str): return 0 c = a + b - t1: Literal["float* | int*"] = reveal_type(c) + reveal_type(c, expected_text="float* | int*") return c diff --git a/packages/pyright-internal/src/tests/samples/typeVar8.py b/packages/pyright-internal/src/tests/samples/typeVar8.py index 699025286f15..b4d3854c610d 100644 --- a/packages/pyright-internal/src/tests/samples/typeVar8.py +++ b/packages/pyright-internal/src/tests/samples/typeVar8.py @@ -1,7 +1,7 @@ # This sample tests the handling of a TypeVar symbol that is # not representing another type. -from typing import Literal, TypeVar, Union +from typing import TypeVar, Union T = TypeVar("T") @@ -15,8 +15,8 @@ def func1(x: bool, a: T, b: S) -> Union[T, S]: - t1: Literal["str"] = reveal_type(T.__name__) - t2: Literal["str"] = reveal_type(S.__name__) + reveal_type(T.__name__, expected_text="str") + reveal_type(S.__name__, expected_text="str") # This should generate an error a.__name__ diff --git a/packages/pyright-internal/src/tests/samples/typeVar9.py b/packages/pyright-internal/src/tests/samples/typeVar9.py index 220a35fa3a38..d90925e19035 100644 --- a/packages/pyright-internal/src/tests/samples/typeVar9.py +++ b/packages/pyright-internal/src/tests/samples/typeVar9.py @@ -64,14 +64,21 @@ def f7(v1: _T_Bound): ... -# Bound TypeVars as type arguments are exempt. +# Bound TypeVars as type arguments are exempt when used in an +# input parameter annotation. def f8(v1: List[_T_Bound]): ... +# Bound TypeVars as type arguments are not exempt when used in a +# return annotation. +def f9() -> List[_T_Bound]: + ... + + # TypeVars used as type args to a generic type alias are exempt. MyCallable = Callable[[_T], _T] -def f9() -> MyCallable[_T]: +def f10() -> MyCallable[_T]: ... diff --git a/packages/pyright-internal/src/tests/samples/typedDict12.py b/packages/pyright-internal/src/tests/samples/typedDict12.py index 09275624e5d0..b91e920e5e49 100644 --- a/packages/pyright-internal/src/tests/samples/typedDict12.py +++ b/packages/pyright-internal/src/tests/samples/typedDict12.py @@ -15,10 +15,19 @@ class Foo(TypedDict, total=False): v1: Optional[str] = foo.get("bar") v2: str = foo.get("bar", "") + v3: Union[str, int] = foo.get("bar", 3) v4: str = foo.setdefault("bar", "1") -v5: Union[str, int] = foo.setdefault("bar", 3) + +# This should generate an error. +foo.setdefault("bar", 3) + +# This should generate an error. +foo.setdefault("bar") + +# This should generate an error. +foo.setdefault("baz", "") v6: str = foo.pop("bar") v7: str = foo.pop("bar", "none") diff --git a/packages/pyright-internal/src/tests/samples/typedDict15.py b/packages/pyright-internal/src/tests/samples/typedDict15.py new file mode 100644 index 000000000000..d484fdc79dc1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typedDict15.py @@ -0,0 +1,53 @@ +# This sample tests the type compatibility checks when the source +# is a TypedDict and the dest is a protocol. + +from typing import Protocol, TypeVar, TypedDict + + +class HasName(Protocol): + name: str + + +class SupportsClear(Protocol): + def clear(self) -> None: + ... + + +_T = TypeVar("_T") + + +class SupportsUpdate(Protocol): + def update(self: _T, __m: _T) -> None: + ... + + +class B(TypedDict): + name: str + + +def print_name(x: HasName): + print(x.name) + + +my_typed_dict: B = {"name": "my name"} + +# This should generate an error. The "name" +# attribute of a TypedDict can't be accessed +# through a member access expression. +print_name(my_typed_dict) + + +def do_clear(x: SupportsClear): + x.clear() + + +# This should generate an error. Although a "dict" +# class supports clear, a TypedDict does not. +do_clear(my_typed_dict) + + +def do_update(x: SupportsUpdate): + x.update(x) + + +do_update(my_typed_dict) diff --git a/packages/pyright-internal/src/tests/samples/typedDict16.py b/packages/pyright-internal/src/tests/samples/typedDict16.py new file mode 100644 index 000000000000..9ef5a1f94806 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typedDict16.py @@ -0,0 +1,103 @@ +# This sample tests that type compatibility between TypedDicts. + +from typing import List, TypedDict, final + + +class TD0(TypedDict): + key: str + + +class TD1(TD0): + value: str + + +class TD2(TypedDict): + key: str + value: str + + +v1: TD2 = TD1(key="", value="") +v2: TD1 = TD2(key="", value="") + +v3 = [v2] +v4: List[TD2] = v3 +v5 = [v1] +v6: List[TD1] = v5 + + +class TD10(TypedDict, total=False): + key: str + + +class TD11(TD10): + value: str + + +class TD12(TypedDict): + key: str + value: str + + +# This should generate an error. +v10: TD12 = TD11(key="", value="") + +# This should generate an error. +v11: TD11 = TD12(key="", value="") + + +v12 = [v10] +# This should generate an error. +v13: List[TD10] = v12 + +v14 = [v11] +# This should generate an error. +v15: List[TD12] = v14 + + +class TD20(TypedDict): + key: str + value: str + + +class TD21(TypedDict): + key: str + value: str + extra: str + + +# This should generate an error. +v20: TD21 = TD20(key="", value="") + +v21: TD20 = TD21(key="", value="", extra="") + + +v22 = [v20] +# This should generate an error. +v23: List[TD20] = v22 + +v24: List[TD20] = [v21] +# This should generate an error. +v25: List[TD21] = v24 + + +@final +class TD30(TypedDict): + value: str + +@final +class TD31(TypedDict): + value: str + +class TD32(TypedDict): + value: str + + +v30: TD30 = TD31(value="") +v31: TD31 = TD30(value="") + +# This should generate an error because of a @final mismatch. +v32: TD32 = TD30(value="") + +# This should generate an error because of a @final mismatch. +v33: TD30 = TD32(value="") + diff --git a/packages/pyright-internal/src/tests/samples/typedDict17.py b/packages/pyright-internal/src/tests/samples/typedDict17.py new file mode 100644 index 000000000000..202cbd2d4fa4 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/typedDict17.py @@ -0,0 +1,24 @@ +# This sample tests the handling of the "|" and "|=" operators +# for TypedDicts. + +from typing import TypedDict + + +class Person(TypedDict, total=False): + name: str + age: int + + +person: Person = {} + +person.update({"name": "Michael"}) + +person |= {"name": "Michael"} +person = person | {"name": "Michael"} + + +# This should generate an error. +person |= {"name": "Michael", "other": 1} + +# This should generate an error. +person = person | {"name": 1} diff --git a/packages/pyright-internal/src/tests/samples/typedDict5.py b/packages/pyright-internal/src/tests/samples/typedDict5.py index e2bb418283bc..ff830fa6292a 100644 --- a/packages/pyright-internal/src/tests/samples/typedDict5.py +++ b/packages/pyright-internal/src/tests/samples/typedDict5.py @@ -44,4 +44,7 @@ class Movie4(TypedDict, total=True): movie6["name"] = "goodbye" movie7 = {"name": "hello", "year": 1971} -movie7["name"] = "goodbye" \ No newline at end of file +movie7["name"] = "goodbye" + +movie8: Movie2 = {"year": 1981, "name": "test"} +movie8["year"] = 1982 diff --git a/packages/pyright-internal/src/tests/samples/typedDict7.py b/packages/pyright-internal/src/tests/samples/typedDict7.py index 82b570b16434..e365e41ba317 100644 --- a/packages/pyright-internal/src/tests/samples/typedDict7.py +++ b/packages/pyright-internal/src/tests/samples/typedDict7.py @@ -1,15 +1,24 @@ # This sample tests synthesized get methods in TypedDict classes. -from typing import TypedDict +from typing import TypedDict, Union -UserType1 = TypedDict("UserType", {"name": str, "age": int}, total=False) +UserType1 = TypedDict("UserType1", {"name": str, "age": int}, total=False) user1: UserType1 = {"name": "Bob", "age": 40} name1: str = user1.get("name", "n/a") age1: int = user1.get("age", 42) -UserType2 = TypedDict("UserType", name=str, age=int) +UserType2 = TypedDict("UserType2", name=str, age=int) user2: UserType2 = {"name": "Bob", "age": 40} -name2: str = user2.get("name") +name2: Union[str, None] = user2.get("name") + +# This should generate an error. +name3: str = user2.get("name") + age2: int = user2.get("age", 42) + +age3: Union[int, str] = user2.get("age", "42") + +# This should generate an error. +age4: int = user2.get("age", "42") diff --git a/packages/pyright-internal/src/tests/samples/unbound4.py b/packages/pyright-internal/src/tests/samples/unbound4.py index 4ab7d918787c..bdd175f7ed15 100644 --- a/packages/pyright-internal/src/tests/samples/unbound4.py +++ b/packages/pyright-internal/src/tests/samples/unbound4.py @@ -2,9 +2,6 @@ # a function does not propagate beyond that function to callers. -from typing import Literal - - def func1(): # This should generate an error return a @@ -12,7 +9,7 @@ def func1(): # This should not. b = func1() -tb1: Literal["Unknown"] = reveal_type(b) +reveal_type(b, expected_text="Unknown") def func2(val: int): @@ -25,4 +22,4 @@ def func2(val: int): # This should not. c = func2(36) -tc1: Literal["int | Unknown"] = reveal_type(c) +reveal_type(c, expected_text="int | Unknown") diff --git a/packages/pyright-internal/src/tests/samples/unions1.py b/packages/pyright-internal/src/tests/samples/unions1.py index 4560969d8488..0a9d9de7e75e 100644 --- a/packages/pyright-internal/src/tests/samples/unions1.py +++ b/packages/pyright-internal/src/tests/samples/unions1.py @@ -1,12 +1,7 @@ # This sample tests the alternative syntax for unions as # documented in PEP 604. -from typing import Callable, Generic, Literal, TypeVar, Union - - -def foo1(a: int): - if isinstance(a, int | str | bytes): - return 3 +from typing import Callable, Generic, TypeVar, Union def foo2(a: int | str): @@ -41,8 +36,8 @@ def helper(value: T) -> T | None: class Baz(Generic[T]): qux: T | None - t1: Literal["str | None"] = reveal_type(helper(a)) - t2: Literal["str | None"] = reveal_type(Baz[str].qux) + reveal_type(helper(a), expected_text="str | None") + reveal_type(Baz[str].qux, expected_text="str | None") T = TypeVar("T") diff --git a/packages/pyright-internal/src/tests/samples/unions3.py b/packages/pyright-internal/src/tests/samples/unions3.py index d9da04ff250a..4473ac715663 100644 --- a/packages/pyright-internal/src/tests/samples/unions3.py +++ b/packages/pyright-internal/src/tests/samples/unions3.py @@ -3,7 +3,7 @@ # with a __or__ or __ror__ method defined. -from typing import Literal, Type, TypeVar +from typing import Type, TypeVar class ClassWithNoMeta1: @@ -15,8 +15,8 @@ class ClassWithNoMeta2: NoMetaUnion = ClassWithNoMeta1 | ClassWithNoMeta2 -tf1: Literal["Type[ClassWithNoMeta1] | Type[ClassWithNoMeta2]"] = reveal_type( - NoMetaUnion +reveal_type( + NoMetaUnion, expected_text="Type[ClassWithNoMeta1] | Type[ClassWithNoMeta2]" ) _T = TypeVar("_T") diff --git a/packages/pyright-internal/src/tests/samples/unions4.py b/packages/pyright-internal/src/tests/samples/unions4.py new file mode 100644 index 000000000000..4bee96b60f65 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/unions4.py @@ -0,0 +1,20 @@ +# This sample tests the incorrect usage of Union types. + +from typing import Union + +x = Union[int, str] + + +# This should generate an error. +y = Union[int] + +z = Union + + +# This should generate an error. +def func1() -> Union: + ... + + +# This should generate an error. +var1: Union diff --git a/packages/pyright-internal/src/tests/samples/unions5.py b/packages/pyright-internal/src/tests/samples/unions5.py new file mode 100644 index 000000000000..4ff46579fdb2 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/unions5.py @@ -0,0 +1,43 @@ +# This sample tests the handling of runtime union expressions that +# are used in contexts other than a type annotation. + +from typing import Union + + +class Foo: + a: int + + +class Bar: + a: int + + +# This should generate an error +a1: type[Foo] | type[Bar] = Foo | Bar + +print(a1.a) +a1() + +# This should generate an error +a2: type[Foo] | type[Bar] = Union[Foo, Bar] + +print(a2.a) +a2() + + +b1 = Foo | Bar + +# This should generate an error +print(b1.a) + +# This should generate an error +b1() + + +b2 = Union[Foo, Bar] + +# This should generate an error +print(b2.a) + +# This should generate an error +b2() diff --git a/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py b/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py index 78aad531daed..784d67a00c6a 100644 --- a/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py +++ b/packages/pyright-internal/src/tests/samples/unnecessaryIsInstance1.py @@ -1,6 +1,13 @@ # This sample tests unnecessary isinstance error reporting. -from typing import ClassVar, Literal, Protocol, TypedDict, Union, runtime_checkable +from typing import ( + ClassVar, + Protocol, + Type, + TypedDict, + Union, + runtime_checkable, +) from unknown_import import CustomClass1 @@ -63,7 +70,22 @@ class ClassC: def func3(obj: BaseClass): if isinstance(obj, (ClassA, ClassB)): - t_1: Literal["ClassA | ClassB"] = reveal_type(obj) + reveal_type(obj, expected_text="ClassA | ClassB") if isinstance(obj, (ClassA, ClassB, ClassC)): - t_2: Literal["ClassA | ClassB"] = reveal_type(obj) + reveal_type(obj, expected_text="ClassA | ClassB") + + +class A: + pass + + +class B(A): + pass + + +def func4(a: A, cls: Type[A]) -> None: + isinstance(a, cls) + + # This should generate an error because it's always true. + isinstance(a, A) diff --git a/packages/pyright-internal/src/tests/samples/unpack1.py b/packages/pyright-internal/src/tests/samples/unpack1.py index faa1d4add36a..dbfeb48e9f08 100644 --- a/packages/pyright-internal/src/tests/samples/unpack1.py +++ b/packages/pyright-internal/src/tests/samples/unpack1.py @@ -1,6 +1,6 @@ # This sample tests the type checker's handling of the unpack operator. -# pyright: strict +# pyright: strictListInference=true class Foo: @@ -36,3 +36,21 @@ def int_only(a: int): x2 = 2, *(1, 2, 3) x3 = *(1, 2, 3), 2 + + +[d1, *e1, f1] = [1, 2, 3, 4] +reveal_type(e1, expected_text="list[int]") + +[*d2, e2, f2] = [1, 2, 3, 4] +reveal_type(d2, expected_text="list[int]") + +[d3, e3, *f3] = (1, 2, 3, 4) +reveal_type(f3, expected_text="list[int]") + +[g1, g2, g3] = (1, 2, 3) + +# This should generate an error. +[g1, g2, g3, g4] = (1, 2, 3) + +# This should generate an error. +[g1, g2] = (1, 2, 3) diff --git a/packages/pyright-internal/src/tests/samples/unpack4.py b/packages/pyright-internal/src/tests/samples/unpack4.py new file mode 100644 index 000000000000..d3954ef64358 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/unpack4.py @@ -0,0 +1,15 @@ +# This sample tests the handling of multiple unpack operators in a +# star expression. + +a = [1, 2] +b = ["3", "4"] + +# This should generate an error for versions of Python <3.9 +for x in *a, *b: + print(x) + +c = *a, *b +print(c) + +# This should always generate an error. +*a, *b = (1, 2, ) diff --git a/packages/pyright-internal/src/tests/samples/function12.py b/packages/pyright-internal/src/tests/samples/unreachable1.py similarity index 99% rename from packages/pyright-internal/src/tests/samples/function12.py rename to packages/pyright-internal/src/tests/samples/unreachable1.py index bec124b9fba8..5e54698f33e0 100644 --- a/packages/pyright-internal/src/tests/samples/function12.py +++ b/packages/pyright-internal/src/tests/samples/unreachable1.py @@ -44,7 +44,7 @@ def method5(self): def func2(): func1() - # This should not be marked unreachable because NotImplementeError + # This should not be marked unreachable because NotImplementedError # is special-cased. return 3 diff --git a/packages/pyright-internal/src/tests/samples/unusedVariable1.py b/packages/pyright-internal/src/tests/samples/unusedVariable1.py new file mode 100644 index 000000000000..df6b1c0fb9ac --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/unusedVariable1.py @@ -0,0 +1,21 @@ +# This sample tests the reportUnusedVariable diagnostic check. + + +def func1(a: int): + x = 4 + + # This should generate an error if reportUnusedVariable is enabled. + y = x + + _z = 4 + + _ = 2 + + __z__ = 5 + + if x + 1: + # This should generate an error if reportUnusedVariable is enabled. + z = 3 + else: + # This should generate an error if reportUnusedVariable is enabled. + z = 5 diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar1.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar1.py index e49a4d0749ff..1c7e7bb5dd55 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar1.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar1.py @@ -3,7 +3,7 @@ # pyright: reportMissingModuleSource=false -from typing import Generic, List, Literal, Tuple, TypeVar, Union +from typing import Generic, List, Tuple, TypeVar, Union from typing_extensions import TypeVarTuple, Unpack @@ -13,7 +13,7 @@ class ClassA(Generic[_T, Unpack[_Xs]]): def __init__(self, *args: Unpack[_Xs]) -> None: - t1: Literal["tuple[*_Xs@ClassA]"] = reveal_type(args) + reveal_type(args, expected_text="tuple[*_Xs@ClassA]") # This should generate an error def func2(self) -> Union[_Xs]: diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar10.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar10.py new file mode 100644 index 000000000000..72aeb686bea1 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar10.py @@ -0,0 +1,65 @@ +# This sample tests the handling of variadic type variables when used +# in conjunction with unpacked tuples. + +from __future__ import annotations +from typing import Any, Generic, NewType, Tuple, TypeVar, Union +from typing_extensions import TypeVarTuple, Unpack + +DType = TypeVar("DType") +Shape = TypeVarTuple("Shape") + +Batch = NewType("Batch", int) +Height = NewType("Height", int) +Width = NewType("Width", int) +Channels = NewType("Channels", int) + + +class Array(Generic[DType, Unpack[Shape]]): + def __abs__(self) -> Array[DType, Unpack[Shape]]: + ... + + def __add__( + self, other: Array[DType, Unpack[Shape]] + ) -> Array[DType, Unpack[Shape]]: + ... + + +def process_batch_channels(x: Array[Batch, Unpack[Tuple[Any, ...]], Channels]) -> None: + ... + + +def expect_variadic_array1(x: Array[Batch, Unpack[Shape]]) -> Union[Unpack[Shape]]: + ... + + +def expect_variadic_array2(x: Array[Batch, Unpack[Tuple[Any, ...]]]) -> None: + ... + + +def expect_precise_array(x: Array[Batch, Height, Width, Channels]) -> None: + ... + + +def func1(x: Array[Batch, Height, Width, Channels]): + process_batch_channels(x) + + expect_precise_array(x) + + +def func2(y: Array[Batch, Channels]): + process_batch_channels(y) + + # This should generate an error because the type args don't match. + expect_precise_array(y) + + +def func3(z: Array[Batch]): + # This should generate an error because Channels is missing + process_batch_channels(z) + + +def func4(y: Array[Any, Unpack[Tuple[Any, ...]]]): + reveal_type(y, expected_text="Array[Any, *tuple[Any, ...]]") + expect_variadic_array1(y) + expect_variadic_array2(y) + expect_precise_array(y) diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar11.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar11.py new file mode 100644 index 000000000000..00e68cf7fe00 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar11.py @@ -0,0 +1,72 @@ +# This sample tests packing and unpacking operations with +# variadic type variables. It is the same as variadicTypeVar4.py +# except that it uses the * operator rather than Unpack. + +# pyright: reportMissingModuleSource=false + +from typing import Generic, NewType, Tuple, Union +from typing_extensions import TypeVarTuple + + +Shape = TypeVarTuple("Shape") + + +class Array(Generic[*Shape]): + def __init__(self, *shape: *Shape): + self.shape = shape + + def __abs__(self) -> "Array[*Shape]": + ... + + def __add__(self, other: "Array[*Shape]") -> "Array[*Shape]": + ... + + +Height = NewType("Height", int) +Width = NewType("Width", int) +x: Array[Height, Width] = Array(Height(480), Width(640)) +reveal_type(x.shape, expected_text="tuple[Height, Width]") +reveal_type(abs(x), expected_text="Array[Height, Width]") +reveal_type(x + abs(x), expected_text="Array[Height, Width]") + + +_Xs = TypeVarTuple("_Xs") + + +def func1(a: Tuple[*_Xs], b: Tuple[*_Xs]) -> Union[*_Xs]: + ... + + +def func2(a: Tuple[int, *_Xs], b: Tuple[int, *_Xs]) -> Union[*_Xs]: + ... + + +def func3(p1: Tuple[int], p2: Tuple[int, str]): + # This should generate an error + v1 = func1(p1, p2) + + # This should generate an error + v2 = func2(p1, p2) + + v3 = func2(p2, p2) + reveal_type(v3, expected_text="str") + + v4 = func2((3, "hi"), p2) + reveal_type(v4, expected_text="str") + + # This should generate an error + v5 = func2((3, 3), p2) + + +def func4(a: int, *args: *_Xs, **kwargs: str) -> Tuple[int, *_Xs]: + ... + + +c1 = func4(4, 5.4, 6j, b="3", c="5") +reveal_type(c1, expected_text="Tuple[int, float, complex]") + +c2 = func4(4, b="3", c="5") +reveal_type(c2, expected_text="Tuple[int]") + +# This should generate an error. +c3 = func4(b="3", c="5") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar12.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar12.py new file mode 100644 index 000000000000..1c84f381723e --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar12.py @@ -0,0 +1,26 @@ +# This sample tests the case where a variadic TypeVar is used in +# conjunction with a keyword-only parameter. It also tests protocol +# invariance validation when a TypeVarTuple is used in the protocol +# along with a non-variadic TypeVar. + +# pyright: strict + +from typing import Protocol, TypeVar +from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +class CallbackA(Protocol[*Ts, T]): + def __call__(self, *args: *Ts, keyed: T) -> tuple[Unpack[Ts], T]: + ... + + +def example(a: int, b: str, *, keyed: bool) -> tuple[int, str, bool]: + return (a, b, keyed) + + +a: CallbackA[int, str, bool] = example + +reveal_type(a, expected_text="(a: int, b: str, *, keyed: bool) -> tuple[int, str, bool]") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar13.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar13.py new file mode 100644 index 000000000000..ff8342c246f3 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar13.py @@ -0,0 +1,35 @@ +# This sample tests the case where a variadic TypeVar is unpacked +# in a call expression that invokes a call that accepts an unpacked +# TypeVarTuple. + +from typing import Protocol, TypeVar +from typing_extensions import TypeVarTuple, Unpack + + +T = TypeVar("T") +Ts = TypeVarTuple("Ts") + + +class CallbackPosOnly(Protocol[Unpack[Ts]]): + def __call__(self, *args: *Ts) -> tuple[Unpack[Ts]]: + ... + + +def invoke_posonly(fn: CallbackPosOnly[Unpack[Ts]], *args: *Ts) -> tuple[Unpack[Ts]]: + return fn(*args) + + +class CallbackKeyed(Protocol[Unpack[Ts]]): + def __call__(self, *args: *Ts, keyed: bool) -> tuple[Unpack[Ts]]: + ... + + +def invoke_keyed(fn: CallbackKeyed[Unpack[Ts]], *args: *Ts) -> tuple[Unpack[Ts]]: + return fn(*args, keyed=True) + + +def invoke_keyed_should_fail(fn: CallbackKeyed[Unpack[Ts]], *args: *Ts) -> tuple[Unpack[Ts]]: + # This should generate an error because "keyed" should + # be interpreted as a keyword-only parameter. + return fn(*args, True) + diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar14.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar14.py new file mode 100644 index 000000000000..d42f51381f59 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar14.py @@ -0,0 +1,55 @@ +# This sample tests the matching of a traditional *args parameter +# and a *args unpacked Tuple to a *args TypeVarTuple. + +from typing import Callable, TypeVar +from typing_extensions import TypeVarTuple + +Ts = TypeVarTuple('Ts') +R = TypeVar('R') + +def call_with_params(func: Callable[[*Ts], R], *params: *Ts) -> R: + # This should generate an error because it's missing a *. + func(params) + + return func(*params) + +def callback1(*args: int) -> int: + ... + +def callback2(*args: *tuple[int, int]) -> int: + ... + +call_with_params(callback1) +call_with_params(callback1, 1, 2, 3) + +# This should generate an error. +call_with_params(callback1, "1") + +# This should generate an error. +call_with_params(callback2) + +call_with_params(callback2, 1, 1) + +# This should generate an error. +call_with_params(callback2, 1, "") + +def callback3(*args: *tuple[int, *tuple[str, ...], int]) -> int: + ... + +# This should generate an error. +call_with_params(callback3) + +call_with_params(callback3, 1, 2) + +call_with_params(callback3, 1, "hi", 2) + +call_with_params(callback3, 1, "hi", "hi", 2) + +# This should generate an error. +call_with_params(callback3, 1, 1, 2) + + +class Foo: + @classmethod + def foo(cls, *shape: *Ts) -> tuple[*Ts]: + ... diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar3.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar3.py index 2caf2eb5aeac..82c0aa983569 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar3.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar3.py @@ -3,7 +3,7 @@ # pyright: reportMissingModuleSource=false -from typing import Generic, List, Literal, Sequence, Tuple, TypeVar, Union +from typing import Generic, List, Sequence, Tuple, TypeVar, Union from typing_extensions import TypeVarTuple, Unpack @@ -14,7 +14,7 @@ class Array(Generic[Unpack[_Xs]]): def __init__(self, *args: Unpack[_Xs]) -> None: self.x: Tuple[Unpack[_Xs]] = args - t1: Literal["tuple[*_Xs@Array]"] = reveal_type(args) + reveal_type(args, expected_text="tuple[*_Xs@Array]") # This should generate an error because _Xs is not unpacked. def foo(self, *args: _Xs) -> None: @@ -22,7 +22,7 @@ def foo(self, *args: _Xs) -> None: def linearize(value: Array[Unpack[_Xs]]) -> Sequence[Union[Unpack[_Xs]]]: - t1: Literal["Array[*_Xs@linearize]"] = reveal_type(value) + reveal_type(value, expected_text="Array[*_Xs@linearize]") return [] @@ -31,27 +31,27 @@ def array_to_tuple(value: Array[Unpack[_Xs]]) -> Tuple[complex, Unpack[_Xs]]: def func1(x: Array[int, str, str, float], y: Array[()]): - t1: Literal["Array[int, str, str, float]"] = reveal_type(x) + reveal_type(x, expected_text="Array[int, str, str, float]") - t2: Literal["Array[()]"] = reveal_type(y) + reveal_type(y, expected_text="Array[()]") a1 = Array(3, 3.5, "b") - t3: Literal["Array[int, float, str]"] = reveal_type(a1) + reveal_type(a1, expected_text="Array[int, float, str]") a2 = linearize(a1) - t4: Literal["Sequence[int | float | str]"] = reveal_type(a2) + reveal_type(a2, expected_text="Sequence[int | float | str]") b1 = Array() - t5: Literal["Array[()]"] = reveal_type(b1) + reveal_type(b1, expected_text="Array[()]") b2 = linearize(b1) - t6: Literal["Sequence[Unknown]"] = reveal_type(b2) + reveal_type(b2, expected_text="Sequence[Unknown]") e = array_to_tuple(x) - t7: Literal["Tuple[complex, int, str, str, float]"] = reveal_type(e) + reveal_type(e, expected_text="Tuple[complex, int, str, str, float]") f = array_to_tuple(y) - t8: Literal["Tuple[complex]"] = reveal_type(f) + reveal_type(f, expected_text="Tuple[complex]") class ArrayIntStr(Array[int, str, _T]): diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar4.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar4.py index 1228e63effcd..83e66f7883e7 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar4.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar4.py @@ -3,7 +3,7 @@ # pyright: reportMissingModuleSource=false -from typing import Generic, Literal, NewType, Tuple, Union +from typing import Generic, NewType, Tuple, Union from typing_extensions import TypeVarTuple, Unpack @@ -24,9 +24,9 @@ def __add__(self, other: "Array[Unpack[Shape]]") -> "Array[Unpack[Shape]]": Height = NewType("Height", int) Width = NewType("Width", int) x: Array[Height, Width] = Array(Height(480), Width(640)) -t1: Literal["tuple[Height, Width]"] = reveal_type(x.shape) -t2: Literal["Array[Height, Width]"] = reveal_type(abs(x)) -t3: Literal["Array[Height, Width]"] = reveal_type(x + abs(x)) +reveal_type(x.shape, expected_text="tuple[Height, Width]") +reveal_type(abs(x), expected_text="Array[Height, Width]") +reveal_type(x + abs(x), expected_text="Array[Height, Width]") _Xs = TypeVarTuple("_Xs") @@ -48,10 +48,10 @@ def func3(p1: Tuple[int], p2: Tuple[int, str]): v2 = func2(p1, p2) v3 = func2(p2, p2) - t_v3: Literal["str"] = reveal_type(v3) + reveal_type(v3, expected_text="str") v4 = func2((3, "hi"), p2) - t_v4: Literal["str"] = reveal_type(v4) + reveal_type(v4, expected_text="str") # This should generate an error v5 = func2((3, 3), p2) @@ -62,10 +62,10 @@ def func4(a: int, *args: Unpack[_Xs], **kwargs: str) -> Tuple[int, Unpack[_Xs]]: c1 = func4(4, 5.4, 6j, b="3", c="5") -t_c1: Literal["Tuple[int, float, complex]"] = reveal_type(c1) +reveal_type(c1, expected_text="Tuple[int, float, complex]") c2 = func4(4, b="3", c="5") -t_c2: Literal["Tuple[int]"] = reveal_type(c2) +reveal_type(c2, expected_text="Tuple[int]") # This should generate an error. c3 = func4(b="3", c="5") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar5.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar5.py index 3a9c50e60fbb..665c9ae2c4e3 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar5.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar5.py @@ -3,7 +3,7 @@ # pyright: reportMissingModuleSource=false -from typing import Any, Callable, Literal, Protocol, Union +from typing import Any, Callable, Protocol, Union from typing_extensions import TypeVarTuple, Unpack _Xs = TypeVarTuple("_Xs") @@ -46,9 +46,9 @@ def callback7(a: int, b: str, c: str, d: str, *args: Any) -> int: c1 = func1(callback1) -t_c1: Literal["() -> int"] = reveal_type(c1) +reveal_type(c1, expected_text="() -> int") c1_1 = c1() -t_c1_1: Literal["int"] = reveal_type(c1_1) +reveal_type(c1_1, expected_text="int") # This should generate an error. c2 = func1(callback2) @@ -57,9 +57,9 @@ def callback7(a: int, b: str, c: str, d: str, *args: Any) -> int: c3 = func2(callback3) c4 = func1(callback4) -t_c4: Literal["(_p0: complex, _p1: str) -> int"] = reveal_type(c4) +reveal_type(c4, expected_text="(complex, str) -> int") c4_1 = c4(3j, "hi") -t_c4_1: Literal["int"] = reveal_type(c4_1) +reveal_type(c4_1, expected_text="int") # This should generate an error. c4_2 = c4(3j) @@ -68,19 +68,19 @@ def callback7(a: int, b: str, c: str, d: str, *args: Any) -> int: c4_3 = c4(3j, "hi", 4) c5 = func1(callback5) -t_c5: Literal["(_p0: *_Xs@callback5) -> int"] = reveal_type(c5) +reveal_type(c5, expected_text="(*_Xs@callback5) -> int") -# This should generate an error. c6_1 = func1(callback6) +reveal_type(c6_1, expected_text="(*Any) -> int") -# This should generate an error. c6_2 = func2(callback6) +reveal_type(c6_2, expected_text="(int, *Any) -> int") -# This should generate an error. c7_1 = func1(callback7) +reveal_type(c7_1, expected_text="(str, str, str, *Any) -> int") -# This should generate an error. c7_2 = func2(callback7) +reveal_type(c7_2, expected_text="(int, str, str, str, *Any) -> int") class CallbackA(Protocol[Unpack[_Xs]]): @@ -93,7 +93,7 @@ def func3(func: CallbackA[Unpack[_Xs]]) -> Callable[[Unpack[_Xs]], int]: d1 = func3(callback1) -t_d1: Literal["() -> int"] = reveal_type(d1) +reveal_type(d1, expected_text="() -> int") # This should generate an error. d2 = func3(callback2) @@ -102,9 +102,9 @@ def func3(func: CallbackA[Unpack[_Xs]]) -> Callable[[Unpack[_Xs]], int]: d3 = func3(callback3) d4 = func3(callback4) -t_d4: Literal["(_p0: complex, _p1: str) -> int"] = reveal_type(d4) +reveal_type(d4, expected_text="(complex, str) -> int") d4_1 = d4(3j, "hi") -t_d4_1: Literal["int"] = reveal_type(d4_1) +reveal_type(d4_1, expected_text="int") # This should generate an error. d4_2 = d4(3j) @@ -122,10 +122,10 @@ def callback8(a: int, b: str, c: complex, d: int) -> int: d5_1 = func4(callback1) -t_d5_1: Literal["() -> int"] = reveal_type(d5_1) +reveal_type(d5_1, expected_text="() -> int") # This should generate an error. d5_2 = func4(callback4) d5_3 = func4(callback8) -t_d5_3: Literal["(_p0: int, _p1: str, _p2: complex) -> int"] = reveal_type(d5_3) +reveal_type(d5_3, expected_text="(int, str, complex) -> int") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar6.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar6.py index 2659e881f0d1..ec0bd3409f00 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar6.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar6.py @@ -3,7 +3,7 @@ # pyright: reportMissingModuleSource=false, reportMissingTypeArgument=true -from typing import Dict, Generic, Literal, Optional, Tuple, TypeVar, Union +from typing import Dict, Generic, Optional, Tuple, TypeVar, Union from typing_extensions import TypeVarTuple, Unpack _Xs = TypeVarTuple("_Xs") @@ -57,13 +57,13 @@ def func1(a: Alias4[_T, Unpack[_Xs]]) -> Union[_T, Unpack[_Xs]]: z1 = func1(Array(3, 4, "hi", 3j)) -t_z1: Literal["int | str | complex"] = reveal_type(z1) +reveal_type(z1, expected_text="int | str | complex") # This should generate an error. z2 = func1(Array(3, 4.3, "hi", 3j)) z3 = func1(Array(3.5, 4)) -t_z3: Literal["float"] = reveal_type(z3) +reveal_type(z3, expected_text="float") Alias6 = Tuple[int, Unpack[_Xs]] @@ -71,8 +71,8 @@ def func1(a: Alias4[_T, Unpack[_Xs]]) -> Union[_T, Unpack[_Xs]]: # The type annotation for y will generate an error if # reportMissingTypeArgument is enabled. def func2(x: Alias6[float, bool], y: Alias6, z: Alias6[()]): - t_x: Literal["Tuple[int, float, bool]"] = reveal_type(x) + reveal_type(x, expected_text="Tuple[int, float, bool]") - t_y: Literal["Tuple[int, *_Xs@Alias6]"] = reveal_type(y) + reveal_type(y, expected_text="Tuple[int, *_Xs@Alias6]") - t_z: Literal["Tuple[int]"] = reveal_type(z) + reveal_type(z, expected_text="Tuple[int]") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar8.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar8.py index 440ce150bc58..b0d8c1d606d1 100644 --- a/packages/pyright-internal/src/tests/samples/variadicTypeVar8.py +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar8.py @@ -2,7 +2,7 @@ # pyright: reportMissingModuleSource=false -from typing import List, Literal, TypeVar, Union +from typing import List, TypeVar, Union from typing_extensions import TypeVarTuple, Unpack @@ -41,10 +41,10 @@ def func7(a: List[Union[Unpack[_Xs]]]) -> Union[Unpack[_Xs]]: def test1(a: int, b: str, c: List[int], d: Union[complex, str]): v1_1 = func1(a) - t_v1_1: Literal["int"] = reveal_type(v1_1) + reveal_type(v1_1, expected_text="int") v1_2 = func1(d) - t_v1_2: Literal["complex | str"] = reveal_type(v1_2) + reveal_type(v1_2, expected_text="complex | str") # --------- @@ -53,30 +53,30 @@ def test1(a: int, b: str, c: List[int], d: Union[complex, str]): # variadic) TypeVar matching within a Union. So behavior # is likely to vary between type checkers here. v2_1 = func2(a) - t_v2_1: Literal["int"] = reveal_type(v2_1) + reveal_type(v2_1, expected_text="int") v2_2 = func2(d) - t_v2_2: Literal["str | complex"] = reveal_type(v2_2) + reveal_type(v2_2, expected_text="str | complex") # --------- v3_1 = func3(a) - t_v3_1: Literal["int"] = reveal_type(v3_1) + reveal_type(v3_1, expected_text="int") # This should generate an error v3_2 = func3(d) v3_3 = func3(b) - t_v3_3: Literal["str"] = reveal_type(v3_3) + reveal_type(v3_3, expected_text="str") # --------- # This behavior isn't defined by PEP 646 or PEP 484. v4_1 = func4(a) - t_v4_1: Literal["int"] = reveal_type(v4_1) + reveal_type(v4_1, expected_text="int") v4_2 = func4(d) - t_v4_2: Literal["str | complex"] = reveal_type(v4_2) + reveal_type(v4_2, expected_text="complex | str") # --------- @@ -84,7 +84,7 @@ def test1(a: int, b: str, c: List[int], d: Union[complex, str]): v5_1 = func5(a) v5_2 = func5(a, a) - t_v5_2: Literal["int"] = reveal_type(v5_2) + reveal_type(v5_2, expected_text="int") # This should generate an error v5_3 = func5(a, b) @@ -95,22 +95,22 @@ def test1(a: int, b: str, c: List[int], d: Union[complex, str]): # --------- v6_1 = func6(a) - t_v6_1: Literal["int"] = reveal_type(v6_1) + reveal_type(v6_1, expected_text="int") v6_2 = func6(a, b) - t_v6_2: Literal["int | str"] = reveal_type(v6_2) + reveal_type(v6_2, expected_text="int | str") v6_3 = func6(a, b, d) - t_v6_3: Literal["int | str | complex"] = reveal_type(v6_3) + reveal_type(v6_3, expected_text="int | str | complex") # --------- v7_1 = func7([a]) - t_v7_1: Literal["int"] = reveal_type(v7_1) + reveal_type(v7_1, expected_text="int") x: List[Union[int, str]] = [a, b] v7_2 = func7(x) - t_v7_2: Literal["int | str"] = reveal_type(v7_2) + reveal_type(v7_2, expected_text="int | str") v7_3 = func7([a, b, d]) - t_v7_3: Literal["int | str | complex"] = reveal_type(v7_3) + reveal_type(v7_3, expected_text="int | str | complex") diff --git a/packages/pyright-internal/src/tests/samples/variadicTypeVar9.py b/packages/pyright-internal/src/tests/samples/variadicTypeVar9.py new file mode 100644 index 000000000000..555f344e710a --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/variadicTypeVar9.py @@ -0,0 +1,38 @@ +# This sample tests the handling of variadic type variables used +# in generic type aliases and with suffixes. + +from typing import Callable, Generic, TypeVar +from typing_extensions import TypeVarTuple, Unpack + + +P = TypeVarTuple("P") +T = TypeVar("T", covariant=True) + + +class Call(Generic[Unpack[P]]): + def __init__(self, *args: Unpack[P]) -> None: + self.args = args + + +class Return(Generic[T]): + def __init__(self, /, result: T) -> None: + self.result = result + + +TailRec = Call[Unpack[P]] | Return[T] + + +def tail_rec( + fn: Callable[[Unpack[P]], TailRec[Unpack[P], T]] +) -> Callable[[Unpack[P]], T]: + ... + + +@tail_rec +def factorial(n: int, acc: int) -> TailRec[int, int, int]: + if n <= 0: + return Return(acc) + return Call(n - 1, acc * n) + + +reveal_type(factorial, expected_text="(int, int) -> int") diff --git a/packages/pyright-internal/src/tests/samples/with1.py b/packages/pyright-internal/src/tests/samples/with1.py index 6d94e42001ab..e6deab3dafb7 100644 --- a/packages/pyright-internal/src/tests/samples/with1.py +++ b/packages/pyright-internal/src/tests/samples/with1.py @@ -1,6 +1,7 @@ # This sample tests various forms of the 'with' statement. -from typing import Any, Optional, TypeVar +from typing import Any, Generic, Optional, TypeVar +from typing_extensions import Self _T1 = TypeVar("_T1") @@ -97,3 +98,20 @@ async def test2(): async with a1 as foo: pass + + +class Class5(Generic[_T1]): + async def __aenter__(self) -> Self: + return self + + async def __aexit__(self, *args: Any) -> None: + return None + + +class Class6(Class5[int]): + ... + + +async def do(): + async with Class6() as f: + reveal_type(f, expected_text="Class6") diff --git a/packages/pyright-internal/src/tests/samples/with3.py b/packages/pyright-internal/src/tests/samples/with3.py index f24d3ea97607..39be41d52fd3 100644 --- a/packages/pyright-internal/src/tests/samples/with3.py +++ b/packages/pyright-internal/src/tests/samples/with3.py @@ -30,9 +30,37 @@ class A: def test2() -> None: some_dict = dict() - some_string = "HELLO" with suppress(KeyError): print(some_dict["missing_key"]) - print(some_string.lower()) + # This should generate an error because the + # code is reachable. + return 1 + + +def test3(cm: suppress) -> None: + some_dict = dict() + + with cm: + print(some_dict["missing_key"]) + + # This should generate an error because the + # code is reachable. + return 1 + + +class CMFactory: + def get_cm(self) -> suppress: + return suppress() + + +def test4() -> None: + some_dict = dict() + + with CMFactory().get_cm(): + print(some_dict["missing_key"]) + + # This should generate an error because the + # code is reachable. + return 1 diff --git a/packages/pyright-internal/src/tests/samples/with5.py b/packages/pyright-internal/src/tests/samples/with5.py new file mode 100644 index 000000000000..49af9e6e98f3 --- /dev/null +++ b/packages/pyright-internal/src/tests/samples/with5.py @@ -0,0 +1,33 @@ +# This sample tests the case of a context manager within a try/except block. + +from typing import Optional, ContextManager + + +def create_context() -> ContextManager[str]: + ... + + +def possible_exception() -> None: + ... + + +def func1(): + x: Optional[str] = None + ctx: Optional[str] = None + try: + with create_context() as ctx: + x = "0" + possible_exception() + except Exception: + reveal_type(x, expected_text="Literal['0'] | None") + reveal_type(ctx, expected_text="str | None") + + +def func2(): + ctx: Optional[str] = None + try: + with create_context() as ctx: + possible_exception() + return + except Exception: + reveal_type(ctx, expected_text="str | None") diff --git a/packages/pyright-internal/src/tests/testState.test.ts b/packages/pyright-internal/src/tests/testState.test.ts index d07d59469724..9ecc5ee723c7 100644 --- a/packages/pyright-internal/src/tests/testState.test.ts +++ b/packages/pyright-internal/src/tests/testState.test.ts @@ -10,10 +10,9 @@ import assert from 'assert'; import { combinePaths, comparePathsCaseSensitive, getFileName, normalizeSlashes } from '../common/pathUtils'; import { compareStringsCaseSensitive } from '../common/stringUtils'; -import { parseTestData } from './harness/fourslash/fourSlashParser'; import { Range } from './harness/fourslash/fourSlashTypes'; import { runFourSlashTestContent } from './harness/fourslash/runner'; -import { TestState } from './harness/fourslash/testState'; +import { parseAndGetTestState } from './harness/fourslash/testState'; import * as factory from './harness/vfs/factory'; test('Create', () => { @@ -42,7 +41,7 @@ test('Multiple files', () => { //// pass `; - const state = parseAndGetTestState(code).state; + const state = parseAndGetTestState(code, factory.srcFolder).state; assert.equal(state.cwd(), normalizeSlashes('/')); assert(state.fs.existsSync(normalizeSlashes(combinePaths(factory.srcFolder, 'file1.py')))); @@ -112,7 +111,7 @@ test('Configuration', () => { //// pass `; - const state = parseAndGetTestState(code).state; + const state = parseAndGetTestState(code, factory.srcFolder).state; assert.equal(state.cwd(), normalizeSlashes('/')); assert(state.fs.existsSync(normalizeSlashes(combinePaths(factory.srcFolder, 'file1.py')))); @@ -191,7 +190,7 @@ test('IgnoreCase', () => { //// pass `; - const state = parseAndGetTestState(code).state; + const state = parseAndGetTestState(code, factory.srcFolder).state; assert(state.fs.existsSync(normalizeSlashes(combinePaths(factory.srcFolder, 'FILE1.py')))); }); @@ -578,10 +577,3 @@ helper.verifyDiagnostics({ runFourSlashTestContent(factory.srcFolder, 'unused.py', code); }); - -function parseAndGetTestState(code: string) { - const data = parseTestData(factory.srcFolder, code, 'test.py'); - const state = new TestState(normalizeSlashes('/'), data); - - return { data, state }; -} diff --git a/packages/pyright-internal/src/tests/testUtils.ts b/packages/pyright-internal/src/tests/testUtils.ts index aff3a048df17..6280298d3ade 100644 --- a/packages/pyright-internal/src/tests/testUtils.ts +++ b/packages/pyright-internal/src/tests/testUtils.ts @@ -38,6 +38,7 @@ export interface FileAnalysisResult { warnings: Diagnostic[]; infos: Diagnostic[]; unusedCodes: Diagnostic[]; + deprecateds: Diagnostic[]; } export interface FileParseResult { @@ -116,7 +117,9 @@ export function buildAnalyzerFileInfo( isInPyTypedPackage: false, isTypingExtensionsStubFile: false, isBuiltInStubFile: false, + isIPythonMode: false, accessedSymbolMap: new Map(), + typingSymbolAliases: new Map(), }; return fileInfo; @@ -143,6 +146,7 @@ export function bindSampleFile(fileName: string, configOptions = new ConfigOptio warnings: fileInfo.diagnosticSink.getWarnings(), infos: fileInfo.diagnosticSink.getInformation(), unusedCodes: fileInfo.diagnosticSink.getUnusedCode(), + deprecateds: fileInfo.diagnosticSink.getDeprecated(), }; } @@ -184,6 +188,7 @@ export function typeAnalyzeSampleFiles( warnings: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Warning), infos: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Information), unusedCodes: diagnostics.filter((diag) => diag.category === DiagnosticCategory.UnusedCode), + deprecateds: diagnostics.filter((diag) => diag.category === DiagnosticCategory.Deprecated), }; return analysisResult; } else { @@ -196,6 +201,7 @@ export function typeAnalyzeSampleFiles( warnings: [], infos: [], unusedCodes: [], + deprecateds: [], }; return analysisResult; } @@ -223,7 +229,8 @@ export function validateResults( errorCount: number, warningCount = 0, infoCount?: number, - unusedCode?: number + unusedCode?: number, + deprecated?: number ) { assert.strictEqual(results.length, 1); assert.strictEqual(results[0].errors.length, errorCount); @@ -236,4 +243,8 @@ export function validateResults( if (unusedCode !== undefined) { assert.strictEqual(results[0].unusedCodes.length, unusedCode); } + + if (deprecated !== undefined) { + assert.strictEqual(results[0].deprecateds.length, deprecated); + } } diff --git a/packages/pyright-internal/src/tests/tokenizer.test.ts b/packages/pyright-internal/src/tests/tokenizer.test.ts index 844961195f1b..0828b6950ad1 100644 --- a/packages/pyright-internal/src/tests/tokenizer.test.ts +++ b/packages/pyright-internal/src/tests/tokenizer.test.ts @@ -90,6 +90,15 @@ test('InvalidWithNewLine', () => { assert.equal((results.tokens.getItemAt(3) as NewLineToken).newLineType, NewLineType.LineFeed); }); +test('InvalidIndent', () => { + const t = new Tokenizer(); + const results = t.tokenize('\tpass\n'); + assert.equal(results.tokens.count, 4 + _implicitTokenCountNoImplicitNewLine); + + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + assert.equal(results.tokens.getItemAt(1).type, TokenType.Keyword); +}); + test('ParenNewLines', () => { const t = new Tokenizer(); const results = t.tokenize('\n(\n(\n)\n)\n)\n'); @@ -315,7 +324,7 @@ test('PunctuationTokens', () => { test('IndentDedent', () => { const t = new Tokenizer(); const results = t.tokenize('test\n' + ' i1\n' + ' i2 # \n' + ' # \n' + ' \ti3\n' + '\ti4\n' + ' i1'); - assert.equal(results.tokens.count, 15 + _implicitTokenCount); + assert.equal(results.tokens.count, 16 + _implicitTokenCount); assert.equal(results.tokens.getItemAt(0).type, TokenType.Identifier); assert.equal(results.tokens.getItemAt(1).type, TokenType.NewLine); @@ -329,17 +338,19 @@ test('IndentDedent', () => { assert.equal((results.tokens.getItemAt(7) as IndentToken).indentAmount, 8); assert.equal(results.tokens.getItemAt(8).type, TokenType.Identifier); assert.equal(results.tokens.getItemAt(9).type, TokenType.NewLine); - assert.equal(results.tokens.getItemAt(10).type, TokenType.Identifier); - assert.equal(results.tokens.getItemAt(11).type, TokenType.NewLine); - assert.equal(results.tokens.getItemAt(12).type, TokenType.Dedent); - assert.equal((results.tokens.getItemAt(12) as DedentToken).indentAmount, 2); - assert.equal((results.tokens.getItemAt(12) as DedentToken).matchesIndent, true); + assert.equal(results.tokens.getItemAt(10).type, TokenType.Indent); + assert.equal((results.tokens.getItemAt(10) as IndentToken).isIndentAmbiguous, true); + assert.equal(results.tokens.getItemAt(11).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(12).type, TokenType.NewLine); assert.equal(results.tokens.getItemAt(13).type, TokenType.Dedent); - assert.equal((results.tokens.getItemAt(13) as DedentToken).indentAmount, 1); - assert.equal((results.tokens.getItemAt(13) as DedentToken).matchesIndent, false); - assert.equal(results.tokens.getItemAt(14).type, TokenType.Identifier); - assert.equal(results.tokens.getItemAt(15).type, TokenType.NewLine); - assert.equal(results.tokens.getItemAt(16).type, TokenType.EndOfStream); + assert.equal((results.tokens.getItemAt(13) as DedentToken).indentAmount, 2); + assert.equal((results.tokens.getItemAt(13) as DedentToken).matchesIndent, true); + assert.equal(results.tokens.getItemAt(14).type, TokenType.Dedent); + assert.equal((results.tokens.getItemAt(14) as DedentToken).indentAmount, 1); + assert.equal((results.tokens.getItemAt(14) as DedentToken).matchesIndent, false); + assert.equal(results.tokens.getItemAt(15).type, TokenType.Identifier); + assert.equal(results.tokens.getItemAt(16).type, TokenType.NewLine); + assert.equal(results.tokens.getItemAt(17).type, TokenType.EndOfStream); }); test('IndentDedentParen', () => { @@ -364,46 +375,52 @@ test('IndentDedentParen', () => { test('Strings: simple', () => { const t = new Tokenizer(); const results = t.tokenize(' "a"'); - assert.equal(results.tokens.count, 1 + _implicitTokenCount); + assert.equal(results.tokens.count, 3 + _implicitTokenCount); - const stringToken = results.tokens.getItemAt(0) as StringToken; + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + const stringToken = results.tokens.getItemAt(1) as StringToken; assert.equal(stringToken.type, TokenType.String); assert.equal(stringToken.length, 3); assert.equal(stringToken.escapedValue, 'a'); assert.equal(stringToken.flags, StringTokenFlags.DoubleQuote); + assert.equal(results.tokens.getItemAt(2).type, TokenType.NewLine); }); test('Strings: unclosed', () => { const t = new Tokenizer(); const results = t.tokenize(' "string" """line1\n#line2"""\t\'un#closed'); - assert.equal(results.tokens.count, 3 + _implicitTokenCount); + assert.equal(results.tokens.count, 5 + _implicitTokenCount); const ranges = [ [1, 8], [10, 18], [29, 10], ]; - for (let i = 0; i < ranges.length; i += 1) { - assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); - assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); - assert.equal(results.tokens.getItemAt(i).type, TokenType.String); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i + 1).type, TokenType.String); } + assert.equal(results.tokens.getItemAt(5).type, TokenType.Dedent); }); test('Strings: escaped across multiple lines', () => { const t = new Tokenizer(); const results = t.tokenize(' "a\\\nb" \'c\\\r\nb\''); - assert.equal(results.tokens.count, 2 + _implicitTokenCount); + assert.equal(results.tokens.count, 4 + _implicitTokenCount); const ranges = [ [1, 6], [8, 7], ]; - for (let i = 0; i < ranges.length; i += 1) { - assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); - assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); - assert.equal(results.tokens.getItemAt(i).type, TokenType.String); + assert.equal(results.tokens.getItemAt(0).type, TokenType.Indent); + for (let i = 0; i < ranges.length; i++) { + assert.equal(results.tokens.getItemAt(i + 1).start, ranges[i][0]); + assert.equal(results.tokens.getItemAt(i + 1).length, ranges[i][1]); + assert.equal(results.tokens.getItemAt(i + 1).type, TokenType.String); } + assert.equal(results.tokens.getItemAt(5).type, TokenType.EndOfStream); }); test('Strings: block next to regular, double-quoted', () => { @@ -415,7 +432,7 @@ test('Strings: block next to regular, double-quoted', () => { [0, 8], [8, 8], ]; - for (let i = 0; i < ranges.length; i += 1) { + for (let i = 0; i < ranges.length; i++) { assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); assert.equal(results.tokens.getItemAt(i).type, TokenType.String); @@ -431,7 +448,7 @@ test('Strings: block next to block, double-quoted', () => { [0, 6], [6, 2], ]; - for (let i = 0; i < ranges.length; i += 1) { + for (let i = 0; i < ranges.length; i++) { assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); assert.equal(results.tokens.getItemAt(i).type, TokenType.String); @@ -444,7 +461,7 @@ test('Strings: unclosed sequence of quotes', () => { assert.equal(results.tokens.count, 1 + _implicitTokenCount); const ranges = [[0, 5]]; - for (let i = 0; i < ranges.length; i += 1) { + for (let i = 0; i < ranges.length; i++) { assert.equal(results.tokens.getItemAt(i).start, ranges[i][0]); assert.equal(results.tokens.getItemAt(i).length, ranges[i][1]); assert.equal(results.tokens.getItemAt(i).type, TokenType.String); @@ -1241,7 +1258,7 @@ test('Underscore numbers', () => { const isIntegers = [true, true, false, true, false, true]; assert.equal(results.tokens.count, 6 + _implicitTokenCount); - for (let i = 0; i < lengths.length; i += 1) { + for (let i = 0; i < lengths.length; i++) { assert.equal(results.tokens.getItemAt(i).type, TokenType.Number); assert.equal(results.tokens.getItemAt(i).length, lengths[i]); assert.equal((results.tokens.getItemAt(i) as NumberToken).isInteger, isIntegers[i]); @@ -1314,7 +1331,7 @@ test('Operators', () => { ]; assert.equal(results.tokens.count - _implicitTokenCount, lengths.length); assert.equal(results.tokens.count - _implicitTokenCount, operatorTypes.length); - for (let i = 0; i < lengths.length; i += 1) { + for (let i = 0; i < lengths.length; i++) { const t = results.tokens.getItemAt(i); assert.equal(t.type, TokenType.Operator, `${t.type} at ${i} is not an operator`); assert.equal((t as OperatorToken).operatorType, operatorTypes[i]); @@ -1438,40 +1455,40 @@ test('Identifiers1', () => { test('TypeIgnoreAll1', () => { const t = new Tokenizer(); const results = t.tokenize('\n#type:ignore\n"test"'); - assert.equal(results.typeIgnoreAll, true); + assert(results.typeIgnoreAll); }); test('TypeIgnoreAll2', () => { const t = new Tokenizer(); const results = t.tokenize('\n# type: ignore ssss\n'); - assert.equal(results.typeIgnoreAll, true); + assert(results.typeIgnoreAll); }); test('TypeIgnoreAll3', () => { const t = new Tokenizer(); const results = t.tokenize('\n# type: ignoressss\n'); - assert.equal(results.typeIgnoreAll, false); + assert(!results.typeIgnoreAll); }); test('TypeIgnoreAll3', () => { const t = new Tokenizer(); const results = t.tokenize('\n"hello"\n# type: ignore\n'); - assert.equal(results.typeIgnoreAll, false); + assert(!results.typeIgnoreAll); }); test('TypeIgnoreLine1', () => { const t = new Tokenizer(); const results = t.tokenize('\na = 3 # type: ignore\n"test" # type:ignore'); - assert.equal(Object.keys(results.typeIgnoreLines).length, 2); - assert.equal(results.typeIgnoreLines[1], true); - assert.equal(results.typeIgnoreLines[2], true); + assert.equal(results.typeIgnoreLines.size, 2); + assert(results.typeIgnoreLines.has(1)); + assert(results.typeIgnoreLines.has(2)); }); test('TypeIgnoreLine2', () => { const t = new Tokenizer(); const results = t.tokenize('a = 3 # type: ignores\n"test" # type:ignore'); - assert.equal(Object.keys(results.typeIgnoreLines).length, 1); - assert.equal(results.typeIgnoreLines[1], true); + assert.equal(results.typeIgnoreLines.size, 1); + assert(results.typeIgnoreLines.has(1)); assert.equal(results.tokens.getItemAtPosition(0), 0); assert.equal(results.tokens.getItemAtPosition(1), 0); @@ -1499,3 +1516,19 @@ test('Constructor', () => { assert.equal(results.tokens.getItemAt(1).type, TokenType.Identifier); assert.equal(results.tokens.getItemAt(1).length, 11); }); + +test('Normalization', () => { + const t = new Tokenizer(); + const results = t.tokenize('ℝ 𝕽'); + assert.equal(results.tokens.count, 2 + _implicitTokenCount); + + let idToken = results.tokens.getItemAt(0) as IdentifierToken; + assert.equal(idToken.type, TokenType.Identifier); + assert.equal(idToken.length, 1); + assert.equal(idToken.value, 'R'); + + idToken = results.tokens.getItemAt(1) as IdentifierToken; + assert.equal(idToken.type, TokenType.Identifier); + assert.equal(idToken.length, 2); + assert.equal(idToken.value, 'R'); +}); diff --git a/packages/pyright-internal/src/tests/typeEvaluator1.test.ts b/packages/pyright-internal/src/tests/typeEvaluator1.test.ts index 984251e9967b..de414661fd75 100644 --- a/packages/pyright-internal/src/tests/typeEvaluator1.test.ts +++ b/packages/pyright-internal/src/tests/typeEvaluator1.test.ts @@ -46,6 +46,7 @@ test('Builtins1', () => { 'DeprecationWarning', 'EOFError', 'Ellipsis', + 'EncodingWarning', 'EnvironmentError', 'Exception', 'FileExistsError', @@ -99,13 +100,16 @@ test('Builtins1', () => { 'Warning', 'WindowsError', 'ZeroDivisionError', + '__build_class__', '__import__', '__loader__', '__name__', '__package__', '__spec__', 'abs', + 'aiter', 'all', + 'anext', 'any', 'ascii', 'bin', @@ -292,6 +296,12 @@ test('TypeNarrowingIsNone2', () => { TestUtils.validateResults(analysisResults, 0); }); +test('TypeNarrowingIsNoneTuple1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsNoneTuple1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('TypeNarrowingLiteral1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLiteral1.py']); @@ -340,12 +350,24 @@ test('TypeNarrowingIsinstance5', () => { TestUtils.validateResults(analysisResults, 0); }); +test('TypeNarrowingIsinstance6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('TypeNarrowingIsinstance7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIsinstance7.py']); TestUtils.validateResults(analysisResults, 0); }); +test('TypeNarrowingTupleLength1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTupleLength1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('TypeNarrowingIn1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingIn1.py']); @@ -376,12 +398,30 @@ test('TypeNarrowingTypedDict2', () => { TestUtils.validateResults(analysisResults, 0); }); +test('TypeNarrowingTypedDict3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingTypedDict3.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + test('typeNarrowingCallable1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingCallable1.py']); TestUtils.validateResults(analysisResults, 2); }); +test('TypeNarrowingFalsy1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingFalsy1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TypeNarrowingLocalConst1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeNarrowingLocalConst1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('ReturnTypes1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['returnTypes1.py']); @@ -415,7 +455,7 @@ test('Expressions2', () => { test('Expressions3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['expressions3.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 0); }); test('Expressions4', () => { @@ -457,7 +497,7 @@ test('Expressions9', () => { test('Unpack1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unpack1.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 4); }); test('Unpack2', () => { @@ -480,6 +520,20 @@ test('Unpack3', () => { TestUtils.validateResults(analysisResults38, 0); }); +test('Unpack4', () => { + const configOptions = new ConfigOptions('.'); + + // Analyze with Python 3.8 settings. + configOptions.defaultPythonVersion = PythonVersion.V3_8; + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['unpack4.py'], configOptions); + TestUtils.validateResults(analysisResults38, 2); + + // Analyze with Python 3.9 settings. + configOptions.defaultPythonVersion = PythonVersion.V3_9; + const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['unpack4.py'], configOptions); + TestUtils.validateResults(analysisResults39, 1); +}); + test('Lambda1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda1.py']); @@ -504,44 +558,92 @@ test('Lambda4', () => { TestUtils.validateResults(analysisResults, 1); }); -test('Function1', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function1.py']); +test('Lambda5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda5.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 0); }); -test('Function2', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function2.py']); +test('Lambda6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['lambda6.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Call1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Call2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call2.py']); TestUtils.validateResults(analysisResults, 11); }); -test('Function3', () => { +test('Call3', () => { const configOptions = new ConfigOptions('.'); // Analyze with Python 3.7 settings. This will generate more errors. configOptions.defaultPythonVersion = PythonVersion.V3_7; - const analysisResults37 = TestUtils.typeAnalyzeSampleFiles(['function3.py'], configOptions); - TestUtils.validateResults(analysisResults37, 30); + const analysisResults37 = TestUtils.typeAnalyzeSampleFiles(['call3.py'], configOptions); + TestUtils.validateResults(analysisResults37, 32); // Analyze with Python 3.8 settings. configOptions.defaultPythonVersion = PythonVersion.V3_8; - const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['function3.py'], configOptions); - TestUtils.validateResults(analysisResults38, 17); + const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['call3.py'], configOptions); + TestUtils.validateResults(analysisResults38, 18); }); -test('Function4', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function4.py']); +test('Call4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call4.py']); TestUtils.validateResults(analysisResults, 0); }); -test('Function5', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function5.py']); +test('Call5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call5.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Call6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call6.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Call7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['call7.py']); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('Function1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function1.py']); TestUtils.validateResults(analysisResults, 0); }); +test('Function2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Function3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function3.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Function4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('Function6', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function6.py']); @@ -551,73 +653,67 @@ test('Function6', () => { test('Function7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function7.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 0); }); test('Function8', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function8.py']); - TestUtils.validateResults(analysisResults, 3); + TestUtils.validateResults(analysisResults, 0); }); test('Function9', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function9.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 2); }); test('Function10', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function10.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 0); }); test('Function11', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function11.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 0); }); -test('Function12', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function12.py']); +test('KwargsUnpack1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['kwargsUnpack1.py']); + + TestUtils.validateResults(analysisResults, 11); +}); + +test('Unreachable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unreachable1.py']); TestUtils.validateResults(analysisResults, 0, 0, 0, 2); }); -test('Function13', () => { +test('FunctionMember1', () => { // Analyze with reportFunctionMemberAccess disabled. - const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['function13.py']); + const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['functionMember1.py']); TestUtils.validateResults(analysisResult1, 0); // Analyze with reportFunctionMemberAccess enabled. const configOptions = new ConfigOptions('.'); configOptions.diagnosticRuleSet.reportFunctionMemberAccess = 'error'; - const analysisResult2 = TestUtils.typeAnalyzeSampleFiles(['function13.py'], configOptions); + const analysisResult2 = TestUtils.typeAnalyzeSampleFiles(['functionMember1.py'], configOptions); TestUtils.validateResults(analysisResult2, 3); }); -test('Function14', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function14.py']); - - TestUtils.validateResults(analysisResults, 0); -}); +test('FunctionMember2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['functionMember2.py']); -test('Function15', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function15.py']); - - TestUtils.validateResults(analysisResults, 0); -}); - -test('Function16', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['function16.py']); - - TestUtils.validateResults(analysisResults, 4); + TestUtils.validateResults(analysisResults, 3); }); test('Annotations1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations1.py']); - TestUtils.validateResults(analysisResults, 4); + TestUtils.validateResults(analysisResults, 6); }); test('Annotations2', () => { @@ -644,6 +740,12 @@ test('Annotations5', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Annotations6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotations6.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + test('AnnotatedVar1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['annotatedVar1.py']); @@ -710,6 +812,12 @@ test('CodeFlow5', () => { TestUtils.validateResults(analysisResults, 0); }); +test('CapturedVariable1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['capturedVariable1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + test('Properties1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['properties1.py']); @@ -741,12 +849,14 @@ test('Properties5', () => { }); test('Properties6', () => { + const configOptions = new ConfigOptions('.'); + // Analyze with reportPropertyTypeMismatch enabled. - const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['properties6.py']); + configOptions.diagnosticRuleSet.reportPropertyTypeMismatch = 'error'; + const analysisResult1 = TestUtils.typeAnalyzeSampleFiles(['properties6.py'], configOptions); TestUtils.validateResults(analysisResult1, 2); // Analyze with reportPropertyTypeMismatch disabled. - const configOptions = new ConfigOptions('.'); configOptions.diagnosticRuleSet.reportPropertyTypeMismatch = 'none'; const analysisResult2 = TestUtils.typeAnalyzeSampleFiles(['properties6.py'], configOptions); TestUtils.validateResults(analysisResult2, 0); @@ -788,6 +898,12 @@ test('Properties12', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Properties13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['properties13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Operators1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operators1.py']); @@ -824,6 +940,18 @@ test('Operators6', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Operators7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operators7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Operators8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['operators8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Optional1', () => { const configOptions = new ConfigOptions('.'); @@ -875,7 +1003,7 @@ test('Optional2', () => { test('Tuples1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuples1.py']); - TestUtils.validateResults(analysisResults, 11); + TestUtils.validateResults(analysisResults, 14); }); test('Tuples2', () => { @@ -905,7 +1033,7 @@ test('Tuples5', () => { test('Tuples6', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuples6.py']); - TestUtils.validateResults(analysisResults, 8); + TestUtils.validateResults(analysisResults, 9); }); test('Tuples7', () => { @@ -962,10 +1090,16 @@ test('Tuples15', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Tuples16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tuples16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('NamedTuples1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuples1.py']); - TestUtils.validateResults(analysisResults, 11); + TestUtils.validateResults(analysisResults, 12); }); test('NamedTuples2', () => { @@ -986,10 +1120,22 @@ test('NamedTuples4', () => { TestUtils.validateResults(analysisResults, 0); }); +test('NamedTuples5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuples5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('NamedTuples6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['namedTuples6.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + test('Slots1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['slots1.py']); - TestUtils.validateResults(analysisResults, 4); + TestUtils.validateResults(analysisResults, 2); }); test('Slots2', () => { @@ -997,3 +1143,117 @@ test('Slots2', () => { TestUtils.validateResults(analysisResults, 3); }); + +test('Parameters1', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.diagnosticRuleSet.reportMissingParameterType = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['parameters1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMissingParameterType = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['parameters1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 1); +}); + +test('Self1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self1.py']); + + TestUtils.validateResults(analysisResults, 12); +}); + +test('Self2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self2.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('Self3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self3.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Self5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['self5.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('UnusedVariable1', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.diagnosticRuleSet.reportUnusedVariable = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['unusedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportUnusedVariable = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['unusedVariable1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 3); +}); + +test('Descriptor1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['descriptor1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('Partial1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial1.py']); + + TestUtils.validateResults(analysisResults, 17); +}); + +test('Partial2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['partial2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('TotalOrdering1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['totalOrdering1.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('TupleUnpack1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack1.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('TupleUnpack2', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack2.py'], configOptions); + TestUtils.validateResults(analysisResults1, 20); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['tupleUnpack2.py'], configOptions); + TestUtils.validateResults(analysisResults2, 5); +}); + +test('PseudoGeneric1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['pseudoGeneric1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('LiteralString1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literalString1.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + +test('ParamInference1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['paramInference1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); diff --git a/packages/pyright-internal/src/tests/typeEvaluator2.test.ts b/packages/pyright-internal/src/tests/typeEvaluator2.test.ts index d34f3e7a96e4..69289e4635bf 100644 --- a/packages/pyright-internal/src/tests/typeEvaluator2.test.ts +++ b/packages/pyright-internal/src/tests/typeEvaluator2.test.ts @@ -15,7 +15,7 @@ import * as TestUtils from './testUtils'; test('CallbackProtocol1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol1.py']); - TestUtils.validateResults(analysisResults, 6); + TestUtils.validateResults(analysisResults, 8); }); test('CallbackProtocol2', () => { @@ -39,9 +39,27 @@ test('CallbackProtocol4', () => { test('CallbackProtocol5', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol5.py']); + TestUtils.validateResults(analysisResults, 3); +}); + +test('CallbackProtocol6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol6.py']); + TestUtils.validateResults(analysisResults, 2); }); +test('CallbackProtocol7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('CallbackProtocol8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callbackProtocol8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Assignment1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment1.py']); @@ -51,7 +69,7 @@ test('Assignment1', () => { test('Assignment2', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment2.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 3); }); test('Assignment3', () => { @@ -96,6 +114,12 @@ test('Assignment9', () => { TestUtils.validateResults(analysisResults, 1); }); +test('Assignment10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignment10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('AugmentedAssignment1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['augmentedAssignment1.py']); @@ -108,6 +132,12 @@ test('AugmentedAssignment2', () => { TestUtils.validateResults(analysisResults, 3); }); +test('AugmentedAssignment3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['augmentedAssignment3.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + test('Super1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super1.py']); @@ -138,6 +168,35 @@ test('Super5', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Super6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Super7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super7.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Super8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['super8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('MissingSuper1', () => { + const configOptions = new ConfigOptions('.'); + + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['missingSuper1.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMissingSuperCall = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['missingSuper1.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + test('NewType1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['newType1.py']); @@ -178,9 +237,15 @@ test('isInstance2', () => { }); test('isInstance3', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['isinstance3.py']); + const configOptions = new ConfigOptions('.'); - TestUtils.validateResults(analysisResults, 2); + configOptions.defaultPythonVersion = PythonVersion.V3_9; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['isinstance3.py'], configOptions); + TestUtils.validateResults(analysisResults1, 4); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['isinstance3.py'], configOptions); + TestUtils.validateResults(analysisResults2, 1); }); test('isInstance4', () => { @@ -264,7 +329,7 @@ test('Assert1', () => { test('RevealedType1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['revealedType1.py']); - TestUtils.validateResults(analysisResults, 0, 0, 3); + TestUtils.validateResults(analysisResults, 2, 0, 7); }); test('NameBindings1', () => { @@ -408,7 +473,7 @@ test('GenericTypes19', () => { test('GenericTypes20', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes20.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 1); }); test('GenericTypes21', () => { @@ -697,6 +762,108 @@ test('GenericTypes66', () => { TestUtils.validateResults(analysisResults, 1); }); +test('GenericTypes67', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes67.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericTypes68', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes68.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes69', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes69.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes70', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes70.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes71', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.diagnosticRuleSet.strictParameterNoneValue = false; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes71.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); + + configOptions.diagnosticRuleSet.strictParameterNoneValue = true; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes71.py'], configOptions); + TestUtils.validateResults(analysisResults, 5); +}); + +test('GenericTypes72', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes72.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes73', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes73.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes74', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes74.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes75', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes75.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericTypes76', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes76.py']); + + TestUtils.validateResults(analysisResults, 5); +}); + +test('GenericTypes77', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes77.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericTypes78', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes78.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('GenericTypes79', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes79.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes80', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes80.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes81', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes81.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('GenericTypes82', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['genericTypes82.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Protocol1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol1.py']); @@ -819,6 +986,50 @@ test('Protocol20', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Protocol21', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol21.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol22', () => { + const configOptions = new ConfigOptions('.'); + configOptions.diagnosticRuleSet.reportInvalidTypeVarUse = 'error'; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol22.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol23', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol23.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol24', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol24.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Protocol25', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol25.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Protocol26', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol26.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Protocol27', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['protocol27.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + test('TypedDict1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict1.py']); @@ -858,7 +1069,7 @@ test('TypedDict6', () => { test('TypedDict7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict7.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 2); }); test('TypedDict8', () => { @@ -888,7 +1099,7 @@ test('TypedDict11', () => { test('TypedDict12', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict12.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 3); }); test('TypedDict13', () => { @@ -902,3 +1113,21 @@ test('TypedDict14', () => { TestUtils.validateResults(analysisResults, 1); }); + +test('TypedDict15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict15.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('TypedDict16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict16.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + +test('TypedDict17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typedDict17.py']); + + TestUtils.validateResults(analysisResults, 2); +}); diff --git a/packages/pyright-internal/src/tests/typeEvaluator3.test.ts b/packages/pyright-internal/src/tests/typeEvaluator3.test.ts index 45fe140c66f5..d546aace57f7 100644 --- a/packages/pyright-internal/src/tests/typeEvaluator3.test.ts +++ b/packages/pyright-internal/src/tests/typeEvaluator3.test.ts @@ -33,7 +33,7 @@ test('Ellipsis1', () => { test('Generators1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generators1.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 9); }); test('Generators2', () => { @@ -93,7 +93,7 @@ test('Generators10', () => { test('Generators11', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generators11.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 2); }); test('Generators12', () => { @@ -108,6 +108,24 @@ test('Generators13', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Generators14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generators14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Generators15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generators15.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Await1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['await1.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Coroutines1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines1.py']); @@ -120,6 +138,17 @@ test('Coroutines2', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Coroutines3', () => { + const configOptions = new ConfigOptions('.'); + + // This functionality is deprecated in Python 3.11, so the type no longer + // exists in typing.pyi after that point. + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['coroutines3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 0); +}); + test('Loops1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops1.py']); @@ -180,6 +209,36 @@ test('Loops10', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Loops11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops11.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Loops12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops12.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('Loops13', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops13.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loops14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops14.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Loops15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['loops15.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('ForLoop1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['forLoop1.py']); @@ -228,6 +287,18 @@ test('ListComprehension6', () => { TestUtils.validateResults(analysisResults, 4); }); +test('ListComprehension7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['listComprehension7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('ListComprehension8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['listComprehension8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('SetComprehension1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['setComprehension1.py']); @@ -264,6 +335,18 @@ test('Literals5', () => { TestUtils.validateResults(analysisResults, 2); }); +test('Literals6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals6.py']); + + TestUtils.validateResults(analysisResults, 26); +}); + +test('Literals7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['literals7.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('TypeAlias1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias1.py']); @@ -287,56 +370,53 @@ test('TypeAlias4', () => { configOptions.defaultPythonVersion = PythonVersion.V3_9; const analysisResults3_9 = TestUtils.typeAnalyzeSampleFiles(['typeAlias4.py'], configOptions); - TestUtils.validateResults(analysisResults3_9, 7); + TestUtils.validateResults(analysisResults3_9, 8); configOptions.defaultPythonVersion = PythonVersion.V3_10; const analysisResults3_10 = TestUtils.typeAnalyzeSampleFiles(['typeAlias4.py'], configOptions); - TestUtils.validateResults(analysisResults3_10, 6); + TestUtils.validateResults(analysisResults3_10, 7); }); test('TypeAlias5', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias5.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 3); }); test('TypeAlias6', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias6.py']); - TestUtils.validateResults(analysisResults, 9); + TestUtils.validateResults(analysisResults, 6); }); test('TypeAlias7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias7.py']); - TestUtils.validateResults(analysisResults, 3); + TestUtils.validateResults(analysisResults, 0); }); test('TypeAlias8', () => { - const configOptions = new ConfigOptions('.'); - - configOptions.defaultPythonVersion = PythonVersion.V3_10; - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias8.py'], configOptions); + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias8.py']); - TestUtils.validateResults(analysisResults, 4); + TestUtils.validateResults(analysisResults, 0); }); test('TypeAlias9', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias9.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 3); }); test('TypeAlias10', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias10.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 4); }); test('TypeAlias11', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias11.py']); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 2); }); test('TypeAlias12', () => { @@ -345,20 +425,59 @@ test('TypeAlias12', () => { TestUtils.validateResults(analysisResults, 0); }); -test('TypeAlias13', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias13.pyi']); +test('RecursiveTypeAlias1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias1.py']); + + TestUtils.validateResults(analysisResults, 14); +}); + +test('RecursiveTypeAlias2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('RecursiveTypeAlias3', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 4); +}); + +test('RecursiveTypeAlias4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias4.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias5.pyi']); TestUtils.validateResults(analysisResults, 2); }); -test('TypeAlias14', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias14.py']); +test('RecursiveTypeAlias6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias6.py']); - TestUtils.validateResults(analysisResults, 6); + TestUtils.validateResults(analysisResults, 0); }); -test('TypeAlias15', () => { - const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeAlias15.py']); +test('RecursiveTypeAlias7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias7.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('RecursiveTypeAlias9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['recursiveTypeAlias9.py']); TestUtils.validateResults(analysisResults, 0); }); @@ -391,7 +510,7 @@ test('Classes2', () => { // Turn on errors. configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes2.py'], configOptions); - TestUtils.validateResults(analysisResults, 21); + TestUtils.validateResults(analysisResults, 22); }); test('Classes3', () => { @@ -416,7 +535,7 @@ test('Classes5', () => { // Turn on errors. configOptions.diagnosticRuleSet.reportIncompatibleVariableOverride = 'error'; analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes5.py'], configOptions); - TestUtils.validateResults(analysisResults, 23); + TestUtils.validateResults(analysisResults, 24); }); test('Classes6', () => { @@ -431,6 +550,25 @@ test('Classes7', () => { TestUtils.validateResults(analysisResults, 1); }); +test('Classes8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes8.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Classes9', () => { + const configOptions = new ConfigOptions('.'); + + // By default, optional diagnostics are ignored. + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes9.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Turn on errors. + configOptions.diagnosticRuleSet.reportIncompatibleMethodOverride = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['classes9.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + test('Enums1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enums1.py']); @@ -467,6 +605,20 @@ test('Enums6', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Enums7', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enums7.py']); + + // Note: This should be 4 errors, but a change to typeshed's enum.pyi + // broke one of the tests. + TestUtils.validateResults(analysisResults, 3); +}); + +test('Enums8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['enums8.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('TypeGuard1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeGuard1.py']); @@ -479,12 +631,24 @@ test('TypeGuard2', () => { TestUtils.validateResults(analysisResults, 0); }); +test('TypeGuard3', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeGuard3.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + test('Never1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['never1.py']); TestUtils.validateResults(analysisResults, 0); }); +test('Never2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['never2.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + test('TypePromotions1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typePromotions1.py']); @@ -494,7 +658,7 @@ test('TypePromotions1', () => { test('Index1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['index1.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 7); }); test('ProtocolModule2', () => { @@ -506,7 +670,7 @@ test('ProtocolModule2', () => { test('VariadicTypeVar1', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar1.py'], configOptions); TestUtils.validateResults(analysisResults, 12); }); @@ -514,7 +678,7 @@ test('VariadicTypeVar1', () => { test('VariadicTypeVar2', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar2.py'], configOptions); TestUtils.validateResults(analysisResults, 13); }); @@ -522,7 +686,7 @@ test('VariadicTypeVar2', () => { test('VariadicTypeVar3', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar3.py'], configOptions); TestUtils.validateResults(analysisResults, 7); }); @@ -530,7 +694,7 @@ test('VariadicTypeVar3', () => { test('VariadicTypeVar4', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar4.py'], configOptions); TestUtils.validateResults(analysisResults, 4); }); @@ -538,15 +702,15 @@ test('VariadicTypeVar4', () => { test('VariadicTypeVar5', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar5.py'], configOptions); - TestUtils.validateResults(analysisResults, 13); + TestUtils.validateResults(analysisResults, 8); }); test('VariadicTypeVar6', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar6.py'], configOptions); TestUtils.validateResults(analysisResults, 8); }); @@ -554,7 +718,7 @@ test('VariadicTypeVar6', () => { test('VariadicTypeVar7', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar7.py'], configOptions); TestUtils.validateResults(analysisResults, 6); }); @@ -562,11 +726,59 @@ test('VariadicTypeVar7', () => { test('VariadicTypeVar8', () => { const configOptions = new ConfigOptions('.'); - configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.defaultPythonVersion = PythonVersion.V3_11; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar8.py'], configOptions); TestUtils.validateResults(analysisResults, 4); }); +test('VariadicTypeVar9', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar9.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('VariadicTypeVar10', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar10.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + +test('VariadicTypeVar11', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar11.py'], configOptions); + TestUtils.validateResults(analysisResults, 4); +}); + +test('VariadicTypeVar12', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar12.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('VariadicTypeVar13', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar13.py'], configOptions); + TestUtils.validateResults(analysisResults, 1); +}); + +test('VariadicTypeVar14', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['variadicTypeVar14.py'], configOptions); + TestUtils.validateResults(analysisResults, 6); +}); + test('Match1', () => { const configOptions = new ConfigOptions('.'); @@ -580,7 +792,7 @@ test('Match2', () => { configOptions.defaultPythonVersion = PythonVersion.V3_10; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match2.py'], configOptions); - TestUtils.validateResults(analysisResults, 0); + TestUtils.validateResults(analysisResults, 2); }); test('Match3', () => { @@ -588,7 +800,7 @@ test('Match3', () => { configOptions.defaultPythonVersion = PythonVersion.V3_10; const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match3.py'], configOptions); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 5); }); test('Match4', () => { @@ -623,6 +835,35 @@ test('Match7', () => { TestUtils.validateResults(analysisResults, 2); }); +test('Match8', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match8.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Match9', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['match9.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); +}); + +test('Match10', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + configOptions.diagnosticRuleSet.reportMatchNotExhaustive = 'none'; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['match10.py'], configOptions); + TestUtils.validateResults(analysisResults1, 0); + + configOptions.diagnosticRuleSet.reportMatchNotExhaustive = 'error'; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['match10.py'], configOptions); + TestUtils.validateResults(analysisResults2, 4); +}); + test('List1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['list1.py']); TestUtils.validateResults(analysisResults, 0); @@ -641,7 +882,7 @@ test('Comparison1', () => { test('EmptyContainers1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['emptyContainers1.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 3); }); test('InitSubclass1', () => { @@ -662,6 +903,12 @@ test('None1', () => { TestUtils.validateResults(analysisResults, 1); }); +test('None2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['none2.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + test('Constructor1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor1.py']); @@ -716,6 +963,37 @@ test('Constructor9', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Constructor10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor10.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor11', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor11.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('Constructor12', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['constructor12.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('InconsistentConstructor1', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.diagnosticRuleSet.reportInconsistentConstructor = 'none'; + let analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentConstructor1.py'], configOptions); + TestUtils.validateResults(analysisResults, 0); + + // Enable it as an error. + configOptions.diagnosticRuleSet.reportInconsistentConstructor = 'error'; + analysisResults = TestUtils.typeAnalyzeSampleFiles(['inconsistentConstructor1.py'], configOptions); + TestUtils.validateResults(analysisResults, 2); +}); + test('ClassGetItem1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classGetItem1.py']); @@ -830,6 +1108,12 @@ test('Decorator5', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Decorator6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['decorator6.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('DataclassTransform1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform1.py']); @@ -845,6 +1129,18 @@ test('DataclassTransform2', () => { test('DataclassTransform3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform3.py']); + TestUtils.validateResults(analysisResults, 4); +}); + +test('DataclassTransform4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform4.py']); + + TestUtils.validateResults(analysisResults, 1); +}); + +test('DataclassTransform5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassTransform5.py']); + TestUtils.validateResults(analysisResults, 1); }); diff --git a/packages/pyright-internal/src/tests/typeEvaluator4.test.ts b/packages/pyright-internal/src/tests/typeEvaluator4.test.ts index 27fc855b64a1..35a6066e6cd1 100644 --- a/packages/pyright-internal/src/tests/typeEvaluator4.test.ts +++ b/packages/pyright-internal/src/tests/typeEvaluator4.test.ts @@ -32,6 +32,15 @@ test('Required2', () => { TestUtils.validateResults(analysisResults, 7); }); +test('Required3', () => { + // Analyze with Python 3.10 settings. + const configOptions = new ConfigOptions('.'); + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['required3.py'], configOptions); + + TestUtils.validateResults(analysisResults, 2); +}); + test('Metaclass1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass1.py']); TestUtils.validateResults(analysisResults, 0); @@ -67,6 +76,16 @@ test('Metaclass7', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Metaclass8', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass8.py']); + TestUtils.validateResults(analysisResults, 1); +}); + +test('Metaclass9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['metaclass9.py']); + TestUtils.validateResults(analysisResults, 6); +}); + test('AssignmentExpr1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr1.py']); TestUtils.validateResults(analysisResults, 5); @@ -74,12 +93,12 @@ test('AssignmentExpr1', () => { test('AssignmentExpr2', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr2.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 6); }); test('AssignmentExpr3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr3.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 4); }); test('AssignmentExpr4', () => { @@ -99,7 +118,7 @@ test('AssignmentExpr6', () => { test('AssignmentExpr7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['assignmentExpr7.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 2); }); test('AssignmentExpr8', () => { @@ -129,7 +148,7 @@ test('Import4', () => { test('Import6', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['import6.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 2); }); test('Import7', () => { @@ -243,12 +262,12 @@ test('Overload5', () => { configOptions.diagnosticRuleSet.reportOverlappingOverload = 'error'; analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload5.py'], configOptions); - TestUtils.validateResults(analysisResults, 10); + TestUtils.validateResults(analysisResults, 12); }); test('Overload6', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload6.py']); - TestUtils.validateResults(analysisResults, 1); + TestUtils.validateResults(analysisResults, 2); }); test('Overload7', () => { @@ -266,6 +285,11 @@ test('Overload9', () => { TestUtils.validateResults(analysisResults, 1); }); +test('Overload10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['overload10.py']); + TestUtils.validateResults(analysisResults, 1); +}); + test('Final1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final1.py']); TestUtils.validateResults(analysisResults, 1); @@ -278,7 +302,7 @@ test('Final2', () => { test('Final3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final3.py']); - TestUtils.validateResults(analysisResults, 15); + TestUtils.validateResults(analysisResults, 28); }); test('Final4', () => { @@ -286,6 +310,11 @@ test('Final4', () => { TestUtils.validateResults(analysisResults, 3); }); +test('Final5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['final5.py']); + TestUtils.validateResults(analysisResults, 0); +}); + test('InferredTypes1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['inferredTypes1.py']); TestUtils.validateResults(analysisResults, 0); @@ -395,12 +424,43 @@ test('MemberAccess13', () => { TestUtils.validateResults(analysisResults, 0); }); +test('MemberAccess14', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess14.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess15', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess15.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess16.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess17', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess17.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('MemberAccess18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['memberAccess18.py']); + TestUtils.validateResults(analysisResults, 0); +}); + test('DataClass1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass1.py']); TestUtils.validateResults(analysisResults, 2); }); +test('DataClass2', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass2.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + test('DataClass3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass3.py']); @@ -428,7 +488,7 @@ test('DataClass6', () => { test('DataClass7', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass7.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 4); }); test('DataClass8', () => { @@ -481,6 +541,38 @@ test('DataClass15', () => { TestUtils.validateResults(analysisResults, 3); }); +test('DataClass16', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass16.py']); + + TestUtils.validateResults(analysisResults, 0); +}); + +test('DataClass17', () => { + const configOptions = new ConfigOptions('.'); + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass17.py'], configOptions); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('DataClass18', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclass18.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('DataClassPostInit1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['dataclassPostInit1.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('InitVar1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['initVar1.py']); + + TestUtils.validateResults(analysisResults, 2, 1); +}); + test('Callable1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable1.py']); @@ -505,19 +597,37 @@ test('Callable4', () => { TestUtils.validateResults(analysisResults, 0); }); +test('Callable5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable5.py']); + + TestUtils.validateResults(analysisResults, 2); +}); + +test('Callable6', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['callable6.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + test('ThreePartVersion1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['threePartVersion1.py']); TestUtils.validateResults(analysisResults, 0); }); +test('Generic1', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['generic1.py']); + + TestUtils.validateResults(analysisResults, 9); +}); + test('Unions1', () => { const configOptions = new ConfigOptions('.'); // Analyze with Python 3.9 settings. This will generate errors. configOptions.defaultPythonVersion = PythonVersion.V3_9; const analysisResults3_9 = TestUtils.typeAnalyzeSampleFiles(['unions1.py'], configOptions); - TestUtils.validateResults(analysisResults3_9, 9); + TestUtils.validateResults(analysisResults3_9, 7); // Analyze with Python 3.10 settings. configOptions.defaultPythonVersion = PythonVersion.V3_10; @@ -548,12 +658,24 @@ test('Unions3', () => { TestUtils.validateResults(analysisResults3_10, 0); }); +test('Unions4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unions4.py']); + + TestUtils.validateResults(analysisResults, 3); +}); + +test('Unions5', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['unions5.py']); + + TestUtils.validateResults(analysisResults, 6); +}); + test('ParamSpec1', () => { const configOptions = new ConfigOptions('.'); configOptions.defaultPythonVersion = PythonVersion.V3_10; const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec1.py'], configOptions); - TestUtils.validateResults(results, 6); + TestUtils.validateResults(results, 9); }); test('ParamSpec2', () => { @@ -581,7 +703,7 @@ test('ParamSpec4', () => { configOptions.defaultPythonVersion = PythonVersion.V3_10; const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec4.py'], configOptions); - TestUtils.validateResults(results, 5); + TestUtils.validateResults(results, 7); }); test('ParamSpec5', () => { @@ -696,6 +818,102 @@ test('ParamSpec18', () => { TestUtils.validateResults(results, 0); }); +test('ParamSpec19', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec19.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec20', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec20.py'], configOptions); + TestUtils.validateResults(results, 6); +}); + +test('ParamSpec21', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec21.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec22', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec22.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec23', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec23.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec24', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec24.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec25', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec25.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec26', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec26.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec27', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec27.py'], configOptions); + TestUtils.validateResults(results, 2); +}); + +test('ParamSpec28', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec28.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + +test('ParamSpec29', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec29.py'], configOptions); + TestUtils.validateResults(results, 3); +}); + +test('ParamSpec30', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const results = TestUtils.typeAnalyzeSampleFiles(['paramSpec30.py'], configOptions); + TestUtils.validateResults(results, 0); +}); + test('ClassVar1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar1.py']); @@ -711,7 +929,13 @@ test('ClassVar2', () => { test('ClassVar3', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar3.py']); - TestUtils.validateResults(analysisResults, 7); + TestUtils.validateResults(analysisResults, 9); +}); + +test('ClassVar4', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['classVar4.py']); + + TestUtils.validateResults(analysisResults, 2); }); test('TypeVar1', () => { @@ -765,7 +989,7 @@ test('TypeVar8', () => { test('TypeVar9', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['typeVar9.py']); - TestUtils.validateResults(analysisResults, 5); + TestUtils.validateResults(analysisResults, 6); }); test('TypeVar10', () => { @@ -785,17 +1009,17 @@ test('Annotated1', () => { configOptions.defaultPythonVersion = PythonVersion.V3_8; const analysisResults38 = TestUtils.typeAnalyzeSampleFiles(['annotated1.py'], configOptions); - TestUtils.validateResults(analysisResults38, 1); + TestUtils.validateResults(analysisResults38, 2); configOptions.defaultPythonVersion = PythonVersion.V3_9; const analysisResults39 = TestUtils.typeAnalyzeSampleFiles(['annotated1.py'], configOptions); - TestUtils.validateResults(analysisResults39, 2); + TestUtils.validateResults(analysisResults39, 3); }); test('Circular1', () => { const analysisResults = TestUtils.typeAnalyzeSampleFiles(['circular1.py']); - TestUtils.validateResults(analysisResults, 2); + TestUtils.validateResults(analysisResults, 0); }); test('TryExcept1', () => { @@ -833,3 +1057,33 @@ test('TryExcept6', () => { TestUtils.validateResults(analysisResults, 1); }); + +test('TryExcept7', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_10; + const analysisResults1 = TestUtils.typeAnalyzeSampleFiles(['tryExcept7.py'], configOptions); + TestUtils.validateResults(analysisResults1, 2); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults2 = TestUtils.typeAnalyzeSampleFiles(['tryExcept7.py'], configOptions); + TestUtils.validateResults(analysisResults2, 0); +}); + +test('TryExcept8', () => { + const configOptions = new ConfigOptions('.'); + + configOptions.defaultPythonVersion = PythonVersion.V3_11; + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept8.py'], configOptions); + TestUtils.validateResults(analysisResults, 3); +}); + +test('TryExcept9', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept9.py']); + TestUtils.validateResults(analysisResults, 0); +}); + +test('TryExcept10', () => { + const analysisResults = TestUtils.typeAnalyzeSampleFiles(['tryExcept10.py']); + TestUtils.validateResults(analysisResults, 1); +}); diff --git a/packages/pyright-internal/src/tests/updateSymbolReference.test.ts b/packages/pyright-internal/src/tests/updateSymbolReference.test.ts new file mode 100644 index 000000000000..4d4b90716f4e --- /dev/null +++ b/packages/pyright-internal/src/tests/updateSymbolReference.test.ts @@ -0,0 +1,1107 @@ +/* + * moveSymbolAtPosition.test.ts + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT license. + * + * Tests Program.moveSymbol + */ + +import { parseAndGetTestState } from './harness/fourslash/testState'; +import { testMoveSymbolAtPosition } from './renameModuleTestUtils'; + +test('move symbol to another file - simple from import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":"moved"|}test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - nested file', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":"nested.moved"|}test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - parent file', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":"moved"|}nested.test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// +//// def stay(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from moved import foo!n!"|}|]from test import [|{|"r":""|}foo, |]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import with submodules', () => { + const code = ` +// @filename: nested/__init__.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/test.py +//// # empty + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from moved import foo!n!"|}|]from nested import [|{|"r":""|}foo, |]test + `; + + testFromCode(code); +}); + +test('move symbol to another file - no merge with existing imports', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from [|{|"r":"moved"|}test|] import foo +//// from moved import stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - merge with existing imports', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from test import bar[|{|"r":""|}, foo|] +//// from moved import [|{|"r":"foo, "|}|]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import - nested folder', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// +//// def stay(): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from nested.moved import foo!n!"|}|]from test import [|{|"r":""|}foo, |]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import with submodules - parent folder', () => { + const code = ` +// @filename: nested/__init__.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/test.py +//// # empty + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from moved import foo!n!"|}|]from nested import [|{|"r":""|}foo, |]test + `; + + testFromCode(code); +}); + +test('move symbol to another file - no merge with existing imports - nested folder', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from [|{|"r":"nested.moved"|}test|] import foo +//// from nested.moved import stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - merge with existing imports - nested folder', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from test import bar[|{|"r":""|}, foo|] +//// from nested.moved import [|{|"r":"foo, "|}|]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import - parent folder', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass +//// +//// def stay(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from moved import foo!n!"|}|]from nested.test import [|{|"r":""|}foo, |]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import with submodules - sibling folder', () => { + const code = ` +// @filename: nested/__init__.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/test.py +//// # empty + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from nested import [|{|"r":""|}foo, |]test[|{|"r":"!n!from nested.moved import foo"|}|] + `; + + testFromCode(code); +}); + +test('move symbol to another file - no merge with existing imports - parent folder', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from [|{|"r":"moved"|}nested.test|] import foo +//// from moved import stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - merge with existing imports - parent folder', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from nested.test import bar[|{|"r":""|}, foo|] +//// from moved import [|{|"r":"foo, "|}|]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - simple from import - relative path', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":".moved"|}.test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - nested file - relative path', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: nested/used.py +//// from [|{|"r":".moved"|}..test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - parent file - relative path', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":".moved"|}.nested.test|] import foo + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import - relative path', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// +//// def stay(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: nested/used.py +//// [|{|"r":"from ..moved import foo!n!"|}|]from ..test import [|{|"r":""|}foo, |]stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - multiple import with submodules - relative path', () => { + const code = ` +// @filename: nested/__init__.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/test.py +//// # empty + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"from .moved import foo!n!"|}|]from .nested import [|{|"r":""|}foo, |]test + `; + + testFromCode(code); +}); + +test('move symbol to another file - no merge with existing imports - relative path', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from [|{|"r":".moved"|}.test|] import foo +//// from moved import stay + `; + + testFromCode(code); +}); + +test('move symbol to another file - merge with existing imports - relative path', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] +//// def stay(): pass + +// @filename: used.py +//// from .test import bar[|{|"r":""|}, foo|] +//// from .moved import [|{|"r":"foo, "|}|]stay + `; + + testFromCode(code); +}); + +test('member off import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import [|{|"r":"moved"|}test|] +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import with existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}import test +//// |]import moved +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import with existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}import test +//// |]import moved as m +//// [|{|"r":"m"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import with existing import - multiple imports', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved[|{|"r":""|}, test|] +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import with existing import - multiple imports with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved as m[|{|"r":""|}, test|] +//// [|{|"r":"m"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}from . import test +//// |]import moved +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}from . import test +//// |]import moved as m +//// [|{|"r":"m"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing from import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}from . import test +//// |]from . import moved +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing from import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":""|}from . import test +//// |]from . import moved as m +//// [|{|"r":"m"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing import - multiple imports', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved[|{|"r":""|}, test|] +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off from import with existing import - multiple imports with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved as m[|{|"r":""|}, test|] +//// [|{|"r":"m"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off submodule', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import [|{|"r":"moved"|}test|] +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import - dotted name', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import [|{|"r":"nested.moved"|}test|] +//// [|{|"r":"nested.moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off submodule - dotted name', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":".nested"|}.|] import [|{|"r":"moved"|}test|] +//// [|{|"r":"moved"|}test|].foo() + `; + + testFromCode(code); +}); + +test('member off import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import [|{|"r":"moved"|}test|] as t +//// t.foo() + `; + + testFromCode(code); +}); + +test('member off submodule with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import [|{|"r":"moved"|}test|] as test +//// test.foo() + `; + + testFromCode(code); +}); + +test('member off import with alias - dotted name', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: nested/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import [|{|"r":"nested.moved"|}test|] as t +//// t.foo() + `; + + testFromCode(code); +}); + +test('member off submodule with alias - dotted name', () => { + const code = ` +// @filename: nested/test.py +//// def [|/*marker*/foo|](): pass + +// @filename: sub/moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":"sub"|}nested|] import [|{|"r":"moved"|}test|] as test +//// test.foo() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"import moved!n!"|}|]import test +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved +//// import test +//// +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved as m +//// import test +//// +//// [|{|"r":"m"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols with alias - existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved +//// import test as t +//// +//// [|{|"r":"moved"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols with alias - new import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"import moved!n!"|}|]import test as t +//// +//// [|{|"r":"moved"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols with alias - existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved as m +//// import test as t +//// +//// [|{|"r":"m"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing from import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved +//// import test +//// +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing from import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved as m +//// import test +//// +//// [|{|"r":"m"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing from import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved +//// import test +//// +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - multiple symbols - existing from import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved as m +//// import test +//// +//// [|{|"r":"m"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"import moved!n!"|}|]from . import test +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols - existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved +//// from . import test +//// +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols - existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved as m +//// from . import test +//// +//// [|{|"r":"m"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols with alias - existing import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved +//// from . import test as t +//// +//// [|{|"r":"moved"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols with alias - new import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// [|{|"r":"import moved!n!"|}|]from . import test as t +//// +//// [|{|"r":"moved"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols with alias - existing import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// import moved as m +//// from . import test as t +//// +//// [|{|"r":"m"|}t|].foo() +//// t.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols - existing from import', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved +//// from . import test +//// +//// [|{|"r":"moved"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off from import - multiple symbols - existing from import with alias', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass +//// def bar(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from . import moved as m +//// from . import test +//// +//// [|{|"r":"m"|}test|].foo() +//// test.bar() + `; + + testFromCode(code); +}); + +test('member off import - error case that we dont touch - function return module', () => { + // We could put import in test so test module still has symbol "foo" but + // for now, we won't handle such corner case. + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: test2.py +//// def foo(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from test +//// from test2 +//// def getTestModule(a): +//// return test if a > 0 else test2 +//// +//// getTestModule(1).foo() + `; + + testFromCode(code); +}); + +test('member off import - error case that we dont touch - field return module', () => { + // We could put import in test so test module still has symbol "foo" but + // for now, we won't handle such corner case. + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): pass + +// @filename: test2.py +//// def foo(): pass + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from test +//// from test2 +//// module = test if a > 0 else test2 +//// +//// module.foo() + `; + + testFromCode(code); +}); + +test('simple symbol reference', () => { + const code = ` +// @filename: test.py +//// def [|/*marker*/foo|](): +//// return 1 + +// @filename: moved.py +//// [|/*dest*/|] + +// @filename: used.py +//// from [|{|"r":"moved"|}test|] import foo +//// +//// foo() +//// b = foo().real + `; + + testFromCode(code); +}); + +function testFromCode(code: string) { + const state = parseAndGetTestState(code).state; + + testMoveSymbolAtPosition( + state, + state.getMarkerByName('marker').fileName, + state.getMarkerByName('dest').fileName, + state.getPositionRange('marker').start + ); +} diff --git a/packages/pyright-internal/typeshed-fallback/README.md b/packages/pyright-internal/typeshed-fallback/README.md index e18354d9de5d..afd140b10831 100644 --- a/packages/pyright-internal/typeshed-fallback/README.md +++ b/packages/pyright-internal/typeshed-fallback/README.md @@ -31,12 +31,11 @@ you can install the type stubs using $ pip install types-six types-requests These PyPI packages follow [PEP 561](http://www.python.org/dev/peps/pep-0561/) -and are automatically generated by typeshed internal machinery. Also starting -from version 0.900 mypy will provide an option to automatically install missing -type stub packages (if found on PyPI). +and are automatically released (multiple times a day, when needed) by +[typeshed internal machinery](https://github.com/typeshed-internal/stub_uploader). -PyCharm, pytype etc. work in a similar way, for more details see documentation -for the type-checking tool you are using. +Type checkers should be able to use these stub packages when installed. For more +details, see the documentation for your type checker. ### The `_typeshed` package diff --git a/packages/pyright-internal/typeshed-fallback/commit.txt b/packages/pyright-internal/typeshed-fallback/commit.txt index b4801dfcff49..d2b141e3bd0a 100644 --- a/packages/pyright-internal/typeshed-fallback/commit.txt +++ b/packages/pyright-internal/typeshed-fallback/commit.txt @@ -1,2 +1 @@ -28eb7c8b4db7858cec6cb359c88712fc21400c2d - +ee8aa1e0120fcc0bd88421e2a5d39fad4243e473 diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/VERSIONS b/packages/pyright-internal/typeshed-fallback/stdlib/VERSIONS index 9b9271117fa7..aa12ba88fb13 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/VERSIONS +++ b/packages/pyright-internal/typeshed-fallback/stdlib/VERSIONS @@ -29,8 +29,8 @@ _compression: 3.6- _csv: 2.7- _curses: 2.7- _decimal: 3.6- -_dummy_thread: 3.6- -_dummy_threading: 2.7- +_dummy_thread: 3.6-3.8 +_dummy_threading: 2.7-3.8 _heapq: 2.7- _imp: 3.6- _json: 2.7- @@ -62,6 +62,15 @@ array: 2.7- ast: 2.7- asynchat: 2.7- asyncio: 3.4- +asyncio.mixins: 3.10- +asyncio.compat: 3.4-3.6 +asyncio.exceptions: 3.8- +asyncio.format_helpers: 3.7- +asyncio.runners: 3.7- +asyncio.staggered: 3.8- +asyncio.taskgroups: 3.11- +asyncio.threads: 3.9- +asyncio.trsock: 3.8- asyncore: 2.7- atexit: 2.7- audioop: 2.7- @@ -103,8 +112,10 @@ decimal: 2.7- difflib: 2.7- dis: 2.7- distutils: 2.7- +distutils.command.bdist_msi: 2.7-3.10 +distutils.command.bdist_wininst: 2.7-3.9 doctest: 2.7- -dummy_threading: 2.7- +dummy_threading: 2.7-3.8 email: 2.7- encodings: 2.7- ensurepip: 2.7- @@ -138,6 +149,7 @@ imghdr: 2.7- imp: 2.7- importlib: 2.7- importlib.metadata: 3.8- +importlib.metadata._meta: 3.10- importlib.resources: 3.7- inspect: 2.7- io: 2.7- @@ -162,6 +174,7 @@ modulefinder: 2.7- msilib: 2.7- msvcrt: 2.7- multiprocessing: 2.7- +multiprocessing.shared_memory: 3.8- netrc: 2.7- nis: 2.7- nntplib: 2.7- @@ -259,6 +272,8 @@ typing: 3.5- typing_extensions: 2.7- unicodedata: 2.7- unittest: 2.7- +unittest._log: 3.9- +unittest.async_case: 3.8- urllib: 2.7- uu: 2.7- uuid: 2.7- diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_ast.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_ast.pyi index 08725cfc0b77..eea49588cb95 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_ast.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_ast.pyi @@ -1,18 +1,17 @@ import sys -import typing from typing import Any, ClassVar from typing_extensions import Literal -PyCF_ONLY_AST: int +PyCF_ONLY_AST: Literal[1024] if sys.version_info >= (3, 8): - PyCF_TYPE_COMMENTS: int - PyCF_ALLOW_TOP_LEVEL_AWAIT: int + PyCF_TYPE_COMMENTS: Literal[4096] + PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] _identifier = str class AST: - _attributes: ClassVar[typing.Tuple[str, ...]] - _fields: ClassVar[typing.Tuple[str, ...]] + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] def __init__(self, *args: Any, **kwargs: Any) -> None: ... # TODO: Not all nodes have all of the following attributes lineno: int @@ -26,8 +25,10 @@ class mod(AST): ... if sys.version_info >= (3, 8): class type_ignore(AST): ... + class TypeIgnore(type_ignore): tag: str + class FunctionType(mod): argtypes: list[expr] returns: expr @@ -229,12 +230,16 @@ class JoinedStr(expr): if sys.version_info < (3, 8): class Num(expr): # Deprecated in 3.8; use Constant n: complex + class Str(expr): # Deprecated in 3.8; use Constant s: str + class Bytes(expr): # Deprecated in 3.8; use Constant s: bytes + class NameConstant(expr): # Deprecated in 3.8; use Constant value: Any + class Ellipsis(expr): ... # Deprecated in 3.8; use Constant class Constant(expr): @@ -268,6 +273,7 @@ class Slice(_SliceT): if sys.version_info < (3, 9): class ExtSlice(slice): dims: list[slice] + class Index(slice): value: expr @@ -298,6 +304,7 @@ if sys.version_info < (3, 9): class AugLoad(expr_context): ... class AugStore(expr_context): ... class Param(expr_context): ... + class Suite(mod): body: list[stmt] @@ -381,32 +388,42 @@ if sys.version_info >= (3, 10): class Match(stmt): subject: expr cases: list[match_case] + class pattern(AST): ... # Without the alias, Pyright complains variables named pattern are recursively defined _pattern = pattern + class match_case(AST): pattern: _pattern guard: expr | None body: list[stmt] + class MatchValue(pattern): value: expr + class MatchSingleton(pattern): value: Literal[True, False, None] + class MatchSequence(pattern): patterns: list[pattern] + class MatchStar(pattern): name: _identifier | None + class MatchMapping(pattern): keys: list[expr] patterns: list[pattern] rest: _identifier | None + class MatchClass(pattern): cls: expr patterns: list[pattern] kwd_attrs: list[_identifier] kwd_patterns: list[pattern] + class MatchAs(pattern): pattern: _pattern | None name: _identifier | None + class MatchOr(pattern): patterns: list[pattern] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_bisect.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_bisect.pyi index 6da6e7f58823..5608094ccbd6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_bisect.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_bisect.pyi @@ -1,25 +1,73 @@ import sys -from _typeshed import SupportsLessThan -from typing import Callable, MutableSequence, Sequence, TypeVar +from _typeshed import SupportsRichComparisonT +from typing import Callable, MutableSequence, Sequence, TypeVar, overload _T = TypeVar("_T") if sys.version_info >= (3, 10): + @overload def bisect_left( - a: Sequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsLessThan] | None = ... + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... ) -> int: ... + @overload + def bisect_left( + a: Sequence[_T], + x: SupportsRichComparisonT, + lo: int = ..., + hi: int | None = ..., + *, + key: Callable[[_T], SupportsRichComparisonT] = ..., + ) -> int: ... + @overload + def bisect_right( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ..., *, key: None = ... + ) -> int: ... + @overload def bisect_right( - a: Sequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsLessThan] | None = ... + a: Sequence[_T], + x: SupportsRichComparisonT, + lo: int = ..., + hi: int | None = ..., + *, + key: Callable[[_T], SupportsRichComparisonT] = ..., ) -> int: ... + @overload def insort_left( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsLessThan] | None = ... + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = ..., + hi: int | None = ..., + *, + key: None = ..., + ) -> None: ... + @overload + def insort_left( + a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... + ) -> None: ... + @overload + def insort_right( + a: MutableSequence[SupportsRichComparisonT], + x: SupportsRichComparisonT, + lo: int = ..., + hi: int | None = ..., + *, + key: None = ..., ) -> None: ... + @overload def insort_right( - a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsLessThan] | None = ... + a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ..., *, key: Callable[[_T], SupportsRichComparisonT] = ... ) -> None: ... else: - def bisect_left(a: Sequence[_T], x: _T, lo: int = ..., hi: int | None = ...) -> int: ... - def bisect_right(a: Sequence[_T], x: _T, lo: int = ..., hi: int | None = ...) -> int: ... - def insort_left(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ...) -> None: ... - def insort_right(a: MutableSequence[_T], x: _T, lo: int = ..., hi: int | None = ...) -> None: ... + def bisect_left( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + ) -> int: ... + def bisect_right( + a: Sequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + ) -> int: ... + def insort_left( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + ) -> None: ... + def insort_right( + a: MutableSequence[SupportsRichComparisonT], x: SupportsRichComparisonT, lo: int = ..., hi: int | None = ... + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_codecs.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_codecs.pyi index aa30309cb53c..470722a293a3 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_codecs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_codecs.pyi @@ -1,13 +1,13 @@ import codecs import sys -from typing import Any, Callable, Dict, Tuple, Union +from typing import Any, Callable, Union # This type is not exposed; it is defined in unicodeobject.c class _EncodingMap: def size(self) -> int: ... -_MapT = Union[Dict[int, int], _EncodingMap] -_Handler = Callable[[Exception], Tuple[str, int]] +_MapT = Union[dict[int, int], _EncodingMap] +_Handler = Callable[[Exception], tuple[str, int]] def register(__search_function: Callable[[str], Any]) -> None: ... def register_error(__errors: str, __handler: _Handler) -> None: ... @@ -16,51 +16,63 @@ def lookup_error(__name: str) -> _Handler: ... def decode(obj: Any, encoding: str = ..., errors: str | None = ...) -> Any: ... def encode(obj: Any, encoding: str = ..., errors: str | None = ...) -> Any: ... def charmap_build(__map: str) -> _MapT: ... -def ascii_decode(__data: bytes, __errors: str | None = ...) -> Tuple[str, int]: ... -def ascii_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def charmap_decode(__data: bytes, __errors: str | None = ..., __mapping: _MapT | None = ...) -> Tuple[str, int]: ... -def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _MapT | None = ...) -> Tuple[bytes, int]: ... -def escape_decode(__data: str | bytes, __errors: str | None = ...) -> Tuple[str, int]: ... -def escape_encode(__data: bytes, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def latin_1_decode(__data: bytes, __errors: str | None = ...) -> Tuple[str, int]: ... -def latin_1_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> Tuple[str, int]: ... -def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def readbuffer_encode(__data: str | bytes, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> Tuple[str, int]: ... -def unicode_escape_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... +def ascii_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def charmap_decode(__data: bytes, __errors: str | None = ..., __mapping: _MapT | None = ...) -> tuple[str, int]: ... +def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _MapT | None = ...) -> tuple[bytes, int]: ... +def escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... +def latin_1_decode(__data: bytes, __errors: str | None = ...) -> tuple[str, int]: ... +def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + +else: + def raw_unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + +def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def unicode_escape_decode(__data: str | bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... + +else: + def unicode_escape_decode(__data: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + +def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.version_info < (3, 8): - def unicode_internal_decode(__obj: str | bytes, __errors: str | None = ...) -> Tuple[str, int]: ... - def unicode_internal_encode(__obj: str | bytes, __errors: str | None = ...) -> Tuple[bytes, int]: ... + def unicode_internal_decode(__obj: str | bytes, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_16_be_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def utf_16_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> Tuple[bytes, int]: ... +def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_16_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_16_ex_decode( __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... -) -> Tuple[str, int, int]: ... -def utf_16_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_16_le_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def utf_32_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_32_be_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def utf_32_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> Tuple[bytes, int]: ... +) -> tuple[str, int, int]: ... +def utf_16_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_be_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... def utf_32_ex_decode( __data: bytes, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... -) -> Tuple[str, int, int]: ... -def utf_32_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_32_le_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def utf_7_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_7_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def utf_8_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... -def utf_8_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... +) -> tuple[str, int, int]: ... +def utf_32_le_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_7_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_8_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... if sys.platform == "win32": - def mbcs_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... - def mbcs_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... - def code_page_decode(__codepage: int, __data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... - def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... - def oem_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> Tuple[str, int]: ... - def oem_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... + def mbcs_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def code_page_decode(__codepage: int, __data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def oem_decode(__data: bytes, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_collections_abc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_collections_abc.pyi index 27d5234432f3..bd8d35641b37 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_collections_abc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_collections_abc.pyi @@ -1,3 +1,5 @@ +import sys +from types import MappingProxyType from typing import ( AbstractSet as Set, AsyncGenerator as AsyncGenerator, @@ -10,6 +12,7 @@ from typing import ( Container as Container, Coroutine as Coroutine, Generator as Generator, + Generic, Hashable as Hashable, ItemsView as ItemsView, Iterable as Iterable, @@ -23,8 +26,10 @@ from typing import ( Reversible as Reversible, Sequence as Sequence, Sized as Sized, + TypeVar, ValuesView as ValuesView, ) +from typing_extensions import final __all__ = [ "Awaitable", @@ -53,3 +58,24 @@ __all__ = [ "MutableSequence", "ByteString", ] + +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. + +@final +class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_compat_pickle.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_compat_pickle.pyi index ba6c88a03035..50fb22442cc9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_compat_pickle.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_compat_pickle.pyi @@ -1,10 +1,8 @@ -from typing import Tuple - IMPORT_MAPPING: dict[str, str] -NAME_MAPPING: dict[Tuple[str, str], Tuple[str, str]] -PYTHON2_EXCEPTIONS: Tuple[str, ...] -MULTIPROCESSING_EXCEPTIONS: Tuple[str, ...] +NAME_MAPPING: dict[tuple[str, str], tuple[str, str]] +PYTHON2_EXCEPTIONS: tuple[str, ...] +MULTIPROCESSING_EXCEPTIONS: tuple[str, ...] REVERSE_IMPORT_MAPPING: dict[str, str] -REVERSE_NAME_MAPPING: dict[Tuple[str, str], Tuple[str, str]] -PYTHON3_OSERROR_EXCEPTIONS: Tuple[str, ...] -PYTHON3_IMPORTERROR_EXCEPTIONS: Tuple[str, ...] +REVERSE_NAME_MAPPING: dict[tuple[str, str], tuple[str, str]] +PYTHON3_OSERROR_EXCEPTIONS: tuple[str, ...] +PYTHON3_IMPORTERROR_EXCEPTIONS: tuple[str, ...] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi index 8f81847ff492..e71f7d14bd2b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_compression.pyi @@ -1,8 +1,8 @@ from _typeshed import WriteableBuffer -from io import BufferedIOBase, RawIOBase -from typing import Any, Callable, Protocol, Tuple, Type +from io import DEFAULT_BUFFER_SIZE, BufferedIOBase, RawIOBase +from typing import Any, Callable, Protocol -BUFFER_SIZE: Any +BUFFER_SIZE = DEFAULT_BUFFER_SIZE class _Reader(Protocol): def read(self, __n: int) -> bytes: ... @@ -16,7 +16,7 @@ class DecompressReader(RawIOBase): self, fp: _Reader, decomp_factory: Callable[..., object], - trailing_error: Type[Exception] | Tuple[Type[Exception], ...] = ..., + trailing_error: type[Exception] | tuple[type[Exception], ...] = ..., **decomp_args: Any, ) -> None: ... def readable(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_csv.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_csv.pyi index 1dc43780f687..161a89778de8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_csv.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_csv.pyi @@ -1,9 +1,12 @@ -from typing import Any, Iterable, Iterator, List, Protocol, Type, Union +from typing import Any, Iterable, Iterator, Protocol, Union +from typing_extensions import Literal -QUOTE_ALL: int -QUOTE_MINIMAL: int -QUOTE_NONE: int -QUOTE_NONNUMERIC: int +__version__: str + +QUOTE_ALL: Literal[1] +QUOTE_MINIMAL: Literal[0] +QUOTE_NONE: Literal[3] +QUOTE_NONNUMERIC: Literal[2] class Error(Exception): ... @@ -18,9 +21,9 @@ class Dialect: strict: int def __init__(self) -> None: ... -_DialectLike = Union[str, Dialect, Type[Dialect]] +_DialectLike = Union[str, Dialect, type[Dialect]] -class _reader(Iterator[List[str]]): +class _reader(Iterator[list[str]]): dialect: Dialect line_num: int def __next__(self) -> list[str]: ... @@ -31,7 +34,7 @@ class _writer: def writerows(self, rows: Iterable[Iterable[Any]]) -> None: ... class _Writer(Protocol): - def write(self, s: str) -> Any: ... + def write(self, __s: str) -> object: ... def writer(csvfile: _Writer, dialect: _DialectLike = ..., **fmtparams: Any) -> _writer: ... def reader(csvfile: Iterable[str], dialect: _DialectLike = ..., **fmtparams: Any) -> _reader: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_curses.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_curses.pyi index e4fc2a8f68f0..7022097628fc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_curses.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_curses.pyi @@ -1,548 +1,555 @@ import sys -from typing import IO, Any, BinaryIO, NamedTuple, Tuple, Union, overload +from _typeshed import SupportsRead +from typing import IO, Any, NamedTuple, Union, overload +from typing_extensions import final -_chtype = Union[str, bytes, int] +if sys.platform != "win32": + _chtype = Union[str, bytes, int] -# ACS codes are only initialized after initscr is called -ACS_BBSS: int -ACS_BLOCK: int -ACS_BOARD: int -ACS_BSBS: int -ACS_BSSB: int -ACS_BSSS: int -ACS_BTEE: int -ACS_BULLET: int -ACS_CKBOARD: int -ACS_DARROW: int -ACS_DEGREE: int -ACS_DIAMOND: int -ACS_GEQUAL: int -ACS_HLINE: int -ACS_LANTERN: int -ACS_LARROW: int -ACS_LEQUAL: int -ACS_LLCORNER: int -ACS_LRCORNER: int -ACS_LTEE: int -ACS_NEQUAL: int -ACS_PI: int -ACS_PLMINUS: int -ACS_PLUS: int -ACS_RARROW: int -ACS_RTEE: int -ACS_S1: int -ACS_S3: int -ACS_S7: int -ACS_S9: int -ACS_SBBS: int -ACS_SBSB: int -ACS_SBSS: int -ACS_SSBB: int -ACS_SSBS: int -ACS_SSSB: int -ACS_SSSS: int -ACS_STERLING: int -ACS_TTEE: int -ACS_UARROW: int -ACS_ULCORNER: int -ACS_URCORNER: int -ACS_VLINE: int -ALL_MOUSE_EVENTS: int -A_ALTCHARSET: int -A_ATTRIBUTES: int -A_BLINK: int -A_BOLD: int -A_CHARTEXT: int -A_COLOR: int -A_DIM: int -A_HORIZONTAL: int -A_INVIS: int -if sys.version_info >= (3, 7): - A_ITALIC: int -A_LEFT: int -A_LOW: int -A_NORMAL: int -A_PROTECT: int -A_REVERSE: int -A_RIGHT: int -A_STANDOUT: int -A_TOP: int -A_UNDERLINE: int -A_VERTICAL: int -BUTTON1_CLICKED: int -BUTTON1_DOUBLE_CLICKED: int -BUTTON1_PRESSED: int -BUTTON1_RELEASED: int -BUTTON1_TRIPLE_CLICKED: int -BUTTON2_CLICKED: int -BUTTON2_DOUBLE_CLICKED: int -BUTTON2_PRESSED: int -BUTTON2_RELEASED: int -BUTTON2_TRIPLE_CLICKED: int -BUTTON3_CLICKED: int -BUTTON3_DOUBLE_CLICKED: int -BUTTON3_PRESSED: int -BUTTON3_RELEASED: int -BUTTON3_TRIPLE_CLICKED: int -BUTTON4_CLICKED: int -BUTTON4_DOUBLE_CLICKED: int -BUTTON4_PRESSED: int -BUTTON4_RELEASED: int -BUTTON4_TRIPLE_CLICKED: int -# Darwin ncurses doesn't provide BUTTON5_* constants -if sys.version_info >= (3, 10) and sys.platform != "darwin": - BUTTON5_PRESSED: int - BUTTON5_RELEASED: int - BUTTON5_CLICKED: int - BUTTON5_DOUBLE_CLICKED: int - BUTTON5_TRIPLE_CLICKED: int -BUTTON_ALT: int -BUTTON_CTRL: int -BUTTON_SHIFT: int -COLOR_BLACK: int -COLOR_BLUE: int -COLOR_CYAN: int -COLOR_GREEN: int -COLOR_MAGENTA: int -COLOR_RED: int -COLOR_WHITE: int -COLOR_YELLOW: int -ERR: int -KEY_A1: int -KEY_A3: int -KEY_B2: int -KEY_BACKSPACE: int -KEY_BEG: int -KEY_BREAK: int -KEY_BTAB: int -KEY_C1: int -KEY_C3: int -KEY_CANCEL: int -KEY_CATAB: int -KEY_CLEAR: int -KEY_CLOSE: int -KEY_COMMAND: int -KEY_COPY: int -KEY_CREATE: int -KEY_CTAB: int -KEY_DC: int -KEY_DL: int -KEY_DOWN: int -KEY_EIC: int -KEY_END: int -KEY_ENTER: int -KEY_EOL: int -KEY_EOS: int -KEY_EXIT: int -KEY_F0: int -KEY_F1: int -KEY_F10: int -KEY_F11: int -KEY_F12: int -KEY_F13: int -KEY_F14: int -KEY_F15: int -KEY_F16: int -KEY_F17: int -KEY_F18: int -KEY_F19: int -KEY_F2: int -KEY_F20: int -KEY_F21: int -KEY_F22: int -KEY_F23: int -KEY_F24: int -KEY_F25: int -KEY_F26: int -KEY_F27: int -KEY_F28: int -KEY_F29: int -KEY_F3: int -KEY_F30: int -KEY_F31: int -KEY_F32: int -KEY_F33: int -KEY_F34: int -KEY_F35: int -KEY_F36: int -KEY_F37: int -KEY_F38: int -KEY_F39: int -KEY_F4: int -KEY_F40: int -KEY_F41: int -KEY_F42: int -KEY_F43: int -KEY_F44: int -KEY_F45: int -KEY_F46: int -KEY_F47: int -KEY_F48: int -KEY_F49: int -KEY_F5: int -KEY_F50: int -KEY_F51: int -KEY_F52: int -KEY_F53: int -KEY_F54: int -KEY_F55: int -KEY_F56: int -KEY_F57: int -KEY_F58: int -KEY_F59: int -KEY_F6: int -KEY_F60: int -KEY_F61: int -KEY_F62: int -KEY_F63: int -KEY_F7: int -KEY_F8: int -KEY_F9: int -KEY_FIND: int -KEY_HELP: int -KEY_HOME: int -KEY_IC: int -KEY_IL: int -KEY_LEFT: int -KEY_LL: int -KEY_MARK: int -KEY_MAX: int -KEY_MESSAGE: int -KEY_MIN: int -KEY_MOUSE: int -KEY_MOVE: int -KEY_NEXT: int -KEY_NPAGE: int -KEY_OPEN: int -KEY_OPTIONS: int -KEY_PPAGE: int -KEY_PREVIOUS: int -KEY_PRINT: int -KEY_REDO: int -KEY_REFERENCE: int -KEY_REFRESH: int -KEY_REPLACE: int -KEY_RESET: int -KEY_RESIZE: int -KEY_RESTART: int -KEY_RESUME: int -KEY_RIGHT: int -KEY_SAVE: int -KEY_SBEG: int -KEY_SCANCEL: int -KEY_SCOMMAND: int -KEY_SCOPY: int -KEY_SCREATE: int -KEY_SDC: int -KEY_SDL: int -KEY_SELECT: int -KEY_SEND: int -KEY_SEOL: int -KEY_SEXIT: int -KEY_SF: int -KEY_SFIND: int -KEY_SHELP: int -KEY_SHOME: int -KEY_SIC: int -KEY_SLEFT: int -KEY_SMESSAGE: int -KEY_SMOVE: int -KEY_SNEXT: int -KEY_SOPTIONS: int -KEY_SPREVIOUS: int -KEY_SPRINT: int -KEY_SR: int -KEY_SREDO: int -KEY_SREPLACE: int -KEY_SRESET: int -KEY_SRIGHT: int -KEY_SRSUME: int -KEY_SSAVE: int -KEY_SSUSPEND: int -KEY_STAB: int -KEY_SUNDO: int -KEY_SUSPEND: int -KEY_UNDO: int -KEY_UP: int -OK: int -REPORT_MOUSE_POSITION: int -_C_API: Any -version: bytes + # ACS codes are only initialized after initscr is called + ACS_BBSS: int + ACS_BLOCK: int + ACS_BOARD: int + ACS_BSBS: int + ACS_BSSB: int + ACS_BSSS: int + ACS_BTEE: int + ACS_BULLET: int + ACS_CKBOARD: int + ACS_DARROW: int + ACS_DEGREE: int + ACS_DIAMOND: int + ACS_GEQUAL: int + ACS_HLINE: int + ACS_LANTERN: int + ACS_LARROW: int + ACS_LEQUAL: int + ACS_LLCORNER: int + ACS_LRCORNER: int + ACS_LTEE: int + ACS_NEQUAL: int + ACS_PI: int + ACS_PLMINUS: int + ACS_PLUS: int + ACS_RARROW: int + ACS_RTEE: int + ACS_S1: int + ACS_S3: int + ACS_S7: int + ACS_S9: int + ACS_SBBS: int + ACS_SBSB: int + ACS_SBSS: int + ACS_SSBB: int + ACS_SSBS: int + ACS_SSSB: int + ACS_SSSS: int + ACS_STERLING: int + ACS_TTEE: int + ACS_UARROW: int + ACS_ULCORNER: int + ACS_URCORNER: int + ACS_VLINE: int + ALL_MOUSE_EVENTS: int + A_ALTCHARSET: int + A_ATTRIBUTES: int + A_BLINK: int + A_BOLD: int + A_CHARTEXT: int + A_COLOR: int + A_DIM: int + A_HORIZONTAL: int + A_INVIS: int + if sys.version_info >= (3, 7): + A_ITALIC: int + A_LEFT: int + A_LOW: int + A_NORMAL: int + A_PROTECT: int + A_REVERSE: int + A_RIGHT: int + A_STANDOUT: int + A_TOP: int + A_UNDERLINE: int + A_VERTICAL: int + BUTTON1_CLICKED: int + BUTTON1_DOUBLE_CLICKED: int + BUTTON1_PRESSED: int + BUTTON1_RELEASED: int + BUTTON1_TRIPLE_CLICKED: int + BUTTON2_CLICKED: int + BUTTON2_DOUBLE_CLICKED: int + BUTTON2_PRESSED: int + BUTTON2_RELEASED: int + BUTTON2_TRIPLE_CLICKED: int + BUTTON3_CLICKED: int + BUTTON3_DOUBLE_CLICKED: int + BUTTON3_PRESSED: int + BUTTON3_RELEASED: int + BUTTON3_TRIPLE_CLICKED: int + BUTTON4_CLICKED: int + BUTTON4_DOUBLE_CLICKED: int + BUTTON4_PRESSED: int + BUTTON4_RELEASED: int + BUTTON4_TRIPLE_CLICKED: int + # Darwin ncurses doesn't provide BUTTON5_* constants + if sys.version_info >= (3, 10) and sys.platform != "darwin": + BUTTON5_PRESSED: int + BUTTON5_RELEASED: int + BUTTON5_CLICKED: int + BUTTON5_DOUBLE_CLICKED: int + BUTTON5_TRIPLE_CLICKED: int + BUTTON_ALT: int + BUTTON_CTRL: int + BUTTON_SHIFT: int + COLOR_BLACK: int + COLOR_BLUE: int + COLOR_CYAN: int + COLOR_GREEN: int + COLOR_MAGENTA: int + COLOR_RED: int + COLOR_WHITE: int + COLOR_YELLOW: int + ERR: int + KEY_A1: int + KEY_A3: int + KEY_B2: int + KEY_BACKSPACE: int + KEY_BEG: int + KEY_BREAK: int + KEY_BTAB: int + KEY_C1: int + KEY_C3: int + KEY_CANCEL: int + KEY_CATAB: int + KEY_CLEAR: int + KEY_CLOSE: int + KEY_COMMAND: int + KEY_COPY: int + KEY_CREATE: int + KEY_CTAB: int + KEY_DC: int + KEY_DL: int + KEY_DOWN: int + KEY_EIC: int + KEY_END: int + KEY_ENTER: int + KEY_EOL: int + KEY_EOS: int + KEY_EXIT: int + KEY_F0: int + KEY_F1: int + KEY_F10: int + KEY_F11: int + KEY_F12: int + KEY_F13: int + KEY_F14: int + KEY_F15: int + KEY_F16: int + KEY_F17: int + KEY_F18: int + KEY_F19: int + KEY_F2: int + KEY_F20: int + KEY_F21: int + KEY_F22: int + KEY_F23: int + KEY_F24: int + KEY_F25: int + KEY_F26: int + KEY_F27: int + KEY_F28: int + KEY_F29: int + KEY_F3: int + KEY_F30: int + KEY_F31: int + KEY_F32: int + KEY_F33: int + KEY_F34: int + KEY_F35: int + KEY_F36: int + KEY_F37: int + KEY_F38: int + KEY_F39: int + KEY_F4: int + KEY_F40: int + KEY_F41: int + KEY_F42: int + KEY_F43: int + KEY_F44: int + KEY_F45: int + KEY_F46: int + KEY_F47: int + KEY_F48: int + KEY_F49: int + KEY_F5: int + KEY_F50: int + KEY_F51: int + KEY_F52: int + KEY_F53: int + KEY_F54: int + KEY_F55: int + KEY_F56: int + KEY_F57: int + KEY_F58: int + KEY_F59: int + KEY_F6: int + KEY_F60: int + KEY_F61: int + KEY_F62: int + KEY_F63: int + KEY_F7: int + KEY_F8: int + KEY_F9: int + KEY_FIND: int + KEY_HELP: int + KEY_HOME: int + KEY_IC: int + KEY_IL: int + KEY_LEFT: int + KEY_LL: int + KEY_MARK: int + KEY_MAX: int + KEY_MESSAGE: int + KEY_MIN: int + KEY_MOUSE: int + KEY_MOVE: int + KEY_NEXT: int + KEY_NPAGE: int + KEY_OPEN: int + KEY_OPTIONS: int + KEY_PPAGE: int + KEY_PREVIOUS: int + KEY_PRINT: int + KEY_REDO: int + KEY_REFERENCE: int + KEY_REFRESH: int + KEY_REPLACE: int + KEY_RESET: int + KEY_RESIZE: int + KEY_RESTART: int + KEY_RESUME: int + KEY_RIGHT: int + KEY_SAVE: int + KEY_SBEG: int + KEY_SCANCEL: int + KEY_SCOMMAND: int + KEY_SCOPY: int + KEY_SCREATE: int + KEY_SDC: int + KEY_SDL: int + KEY_SELECT: int + KEY_SEND: int + KEY_SEOL: int + KEY_SEXIT: int + KEY_SF: int + KEY_SFIND: int + KEY_SHELP: int + KEY_SHOME: int + KEY_SIC: int + KEY_SLEFT: int + KEY_SMESSAGE: int + KEY_SMOVE: int + KEY_SNEXT: int + KEY_SOPTIONS: int + KEY_SPREVIOUS: int + KEY_SPRINT: int + KEY_SR: int + KEY_SREDO: int + KEY_SREPLACE: int + KEY_SRESET: int + KEY_SRIGHT: int + KEY_SRSUME: int + KEY_SSAVE: int + KEY_SSUSPEND: int + KEY_STAB: int + KEY_SUNDO: int + KEY_SUSPEND: int + KEY_UNDO: int + KEY_UP: int + OK: int + REPORT_MOUSE_POSITION: int + _C_API: Any + version: bytes + def baudrate() -> int: ... + def beep() -> None: ... + def can_change_color() -> bool: ... + def cbreak(__flag: bool = ...) -> None: ... + def color_content(__color_number: int) -> tuple[int, int, int]: ... + # Changed in Python 3.8.8 and 3.9.2 + if sys.version_info >= (3, 8): + def color_pair(pair_number: int) -> int: ... + else: + def color_pair(__color_number: int) -> int: ... -def baudrate() -> int: ... -def beep() -> None: ... -def can_change_color() -> bool: ... -def cbreak(__flag: bool = ...) -> None: ... -def color_content(__color_number: int) -> Tuple[int, int, int]: ... + def curs_set(__visibility: int) -> int: ... + def def_prog_mode() -> None: ... + def def_shell_mode() -> None: ... + def delay_output(__ms: int) -> None: ... + def doupdate() -> None: ... + def echo(__flag: bool = ...) -> None: ... + def endwin() -> None: ... + def erasechar() -> bytes: ... + def filter() -> None: ... + def flash() -> None: ... + def flushinp() -> None: ... + if sys.version_info >= (3, 9): + def get_escdelay() -> int: ... + def get_tabsize() -> int: ... -# Changed in Python 3.8.8 and 3.9.2 -if sys.version_info >= (3, 8): - def color_pair(pair_number: int) -> int: ... + def getmouse() -> tuple[int, int, int, int, int]: ... + def getsyx() -> tuple[int, int]: ... + def getwin(__file: SupportsRead[bytes]) -> _CursesWindow: ... + def halfdelay(__tenths: int) -> None: ... + def has_colors() -> bool: ... + if sys.version_info >= (3, 10): + def has_extended_color_support() -> bool: ... -else: - def color_pair(__color_number: int) -> int: ... + def has_ic() -> bool: ... + def has_il() -> bool: ... + def has_key(__key: int) -> bool: ... + def init_color(__color_number: int, __r: int, __g: int, __b: int) -> None: ... + def init_pair(__pair_number: int, __fg: int, __bg: int) -> None: ... + def initscr() -> _CursesWindow: ... + def intrflush(__flag: bool) -> None: ... + def is_term_resized(__nlines: int, __ncols: int) -> bool: ... + def isendwin() -> bool: ... + def keyname(__key: int) -> bytes: ... + def killchar() -> bytes: ... + def longname() -> bytes: ... + def meta(__yes: bool) -> None: ... + def mouseinterval(__interval: int) -> None: ... + def mousemask(__newmask: int) -> tuple[int, int]: ... + def napms(__ms: int) -> int: ... + def newpad(__nlines: int, __ncols: int) -> _CursesWindow: ... + def newwin(__nlines: int, __ncols: int, __begin_y: int = ..., __begin_x: int = ...) -> _CursesWindow: ... + def nl(__flag: bool = ...) -> None: ... + def nocbreak() -> None: ... + def noecho() -> None: ... + def nonl() -> None: ... + def noqiflush() -> None: ... + def noraw() -> None: ... + def pair_content(__pair_number: int) -> tuple[int, int]: ... + def pair_number(__attr: int) -> int: ... + def putp(__string: bytes) -> None: ... + def qiflush(__flag: bool = ...) -> None: ... + def raw(__flag: bool = ...) -> None: ... + def reset_prog_mode() -> None: ... + def reset_shell_mode() -> None: ... + def resetty() -> None: ... + def resize_term(__nlines: int, __ncols: int) -> None: ... + def resizeterm(__nlines: int, __ncols: int) -> None: ... + def savetty() -> None: ... + if sys.version_info >= (3, 9): + def set_escdelay(__ms: int) -> None: ... + def set_tabsize(__size: int) -> None: ... -def curs_set(__visibility: int) -> int: ... -def def_prog_mode() -> None: ... -def def_shell_mode() -> None: ... -def delay_output(__ms: int) -> None: ... -def doupdate() -> None: ... -def echo(__flag: bool = ...) -> None: ... -def endwin() -> None: ... -def erasechar() -> bytes: ... -def filter() -> None: ... -def flash() -> None: ... -def flushinp() -> None: ... -def getmouse() -> Tuple[int, int, int, int, int]: ... -def getsyx() -> Tuple[int, int]: ... -def getwin(__file: BinaryIO) -> _CursesWindow: ... -def halfdelay(__tenths: int) -> None: ... -def has_colors() -> bool: ... + def setsyx(__y: int, __x: int) -> None: ... + def setupterm(term: str | None = ..., fd: int = ...) -> None: ... + def start_color() -> None: ... + def termattrs() -> int: ... + def termname() -> bytes: ... + def tigetflag(__capname: str) -> int: ... + def tigetnum(__capname: str) -> int: ... + def tigetstr(__capname: str) -> bytes | None: ... + def tparm( + __str: bytes, + __i1: int = ..., + __i2: int = ..., + __i3: int = ..., + __i4: int = ..., + __i5: int = ..., + __i6: int = ..., + __i7: int = ..., + __i8: int = ..., + __i9: int = ..., + ) -> bytes: ... + def typeahead(__fd: int) -> None: ... + def unctrl(__ch: _chtype) -> bytes: ... + def unget_wch(__ch: int | str) -> None: ... + def ungetch(__ch: _chtype) -> None: ... + def ungetmouse(__id: int, __x: int, __y: int, __z: int, __bstate: int) -> None: ... + def update_lines_cols() -> None: ... + def use_default_colors() -> None: ... + def use_env(__flag: bool) -> None: ... -if sys.version_info >= (3, 10): - def has_extended_color_support() -> bool: ... + class error(Exception): ... -def has_ic() -> bool: ... -def has_il() -> bool: ... -def has_key(__key: int) -> bool: ... -def init_color(__color_number: int, __r: int, __g: int, __b: int) -> None: ... -def init_pair(__pair_number: int, __fg: int, __bg: int) -> None: ... -def initscr() -> _CursesWindow: ... -def intrflush(__flag: bool) -> None: ... -def is_term_resized(__nlines: int, __ncols: int) -> bool: ... -def isendwin() -> bool: ... -def keyname(__key: int) -> bytes: ... -def killchar() -> bytes: ... -def longname() -> bytes: ... -def meta(__yes: bool) -> None: ... -def mouseinterval(__interval: int) -> None: ... -def mousemask(__newmask: int) -> Tuple[int, int]: ... -def napms(__ms: int) -> int: ... -def newpad(__nlines: int, __ncols: int) -> _CursesWindow: ... -def newwin(__nlines: int, __ncols: int, __begin_y: int = ..., __begin_x: int = ...) -> _CursesWindow: ... -def nl(__flag: bool = ...) -> None: ... -def nocbreak() -> None: ... -def noecho() -> None: ... -def nonl() -> None: ... -def noqiflush() -> None: ... -def noraw() -> None: ... -def pair_content(__pair_number: int) -> Tuple[int, int]: ... -def pair_number(__attr: int) -> int: ... -def putp(__string: bytes) -> None: ... -def qiflush(__flag: bool = ...) -> None: ... -def raw(__flag: bool = ...) -> None: ... -def reset_prog_mode() -> None: ... -def reset_shell_mode() -> None: ... -def resetty() -> None: ... -def resize_term(__nlines: int, __ncols: int) -> None: ... -def resizeterm(__nlines: int, __ncols: int) -> None: ... -def savetty() -> None: ... -def setsyx(__y: int, __x: int) -> None: ... -def setupterm(term: str | None = ..., fd: int = ...) -> None: ... -def start_color() -> None: ... -def termattrs() -> int: ... -def termname() -> bytes: ... -def tigetflag(__capname: str) -> int: ... -def tigetnum(__capname: str) -> int: ... -def tigetstr(__capname: str) -> bytes: ... -def tparm( - __str: bytes, - __i1: int = ..., - __i2: int = ..., - __i3: int = ..., - __i4: int = ..., - __i5: int = ..., - __i6: int = ..., - __i7: int = ..., - __i8: int = ..., - __i9: int = ..., -) -> bytes: ... -def typeahead(__fd: int) -> None: ... -def unctrl(__ch: _chtype) -> bytes: ... -def unget_wch(__ch: int | str) -> None: ... -def ungetch(__ch: _chtype) -> None: ... -def ungetmouse(__id: int, __x: int, __y: int, __z: int, __bstate: int) -> None: ... -def update_lines_cols() -> int: ... -def use_default_colors() -> None: ... -def use_env(__flag: bool) -> None: ... - -class error(Exception): ... - -class _CursesWindow: - encoding: str - @overload - def addch(self, ch: _chtype, attr: int = ...) -> None: ... - @overload - def addch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... - @overload - def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... - @overload - def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... - @overload - def addstr(self, str: str, attr: int = ...) -> None: ... - @overload - def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... - def attroff(self, __attr: int) -> None: ... - def attron(self, __attr: int) -> None: ... - def attrset(self, __attr: int) -> None: ... - def bkgd(self, __ch: _chtype, __attr: int = ...) -> None: ... - def bkgdset(self, __ch: _chtype, __attr: int = ...) -> None: ... - def border( - self, - ls: _chtype = ..., - rs: _chtype = ..., - ts: _chtype = ..., - bs: _chtype = ..., - tl: _chtype = ..., - tr: _chtype = ..., - bl: _chtype = ..., - br: _chtype = ..., - ) -> None: ... - @overload - def box(self) -> None: ... - @overload - def box(self, vertch: _chtype = ..., horch: _chtype = ...) -> None: ... - @overload - def chgat(self, attr: int) -> None: ... - @overload - def chgat(self, num: int, attr: int) -> None: ... - @overload - def chgat(self, y: int, x: int, attr: int) -> None: ... - @overload - def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... - def clear(self) -> None: ... - def clearok(self, yes: int) -> None: ... - def clrtobot(self) -> None: ... - def clrtoeol(self) -> None: ... - def cursyncup(self) -> None: ... - @overload - def delch(self) -> None: ... - @overload - def delch(self, y: int, x: int) -> None: ... - def deleteln(self) -> None: ... - @overload - def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... - @overload - def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... - def echochar(self, __ch: _chtype, __attr: int = ...) -> None: ... - def enclose(self, __y: int, __x: int) -> bool: ... - def erase(self) -> None: ... - def getbegyx(self) -> Tuple[int, int]: ... - def getbkgd(self) -> Tuple[int, int]: ... - @overload - def getch(self) -> int: ... - @overload - def getch(self, y: int, x: int) -> int: ... - @overload - def get_wch(self) -> int | str: ... - @overload - def get_wch(self, y: int, x: int) -> int | str: ... - @overload - def getkey(self) -> str: ... - @overload - def getkey(self, y: int, x: int) -> str: ... - def getmaxyx(self) -> Tuple[int, int]: ... - def getparyx(self) -> Tuple[int, int]: ... - @overload - def getstr(self) -> _chtype: ... - @overload - def getstr(self, n: int) -> _chtype: ... - @overload - def getstr(self, y: int, x: int) -> _chtype: ... - @overload - def getstr(self, y: int, x: int, n: int) -> _chtype: ... - def getyx(self) -> Tuple[int, int]: ... - @overload - def hline(self, ch: _chtype, n: int) -> None: ... - @overload - def hline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... - def idcok(self, flag: bool) -> None: ... - def idlok(self, yes: bool) -> None: ... - def immedok(self, flag: bool) -> None: ... - @overload - def inch(self) -> _chtype: ... - @overload - def inch(self, y: int, x: int) -> _chtype: ... - @overload - def insch(self, ch: _chtype, attr: int = ...) -> None: ... - @overload - def insch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... - def insdelln(self, nlines: int) -> None: ... - def insertln(self) -> None: ... - @overload - def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... - @overload - def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... - @overload - def insstr(self, str: str, attr: int = ...) -> None: ... - @overload - def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... - @overload - def instr(self, n: int = ...) -> _chtype: ... - @overload - def instr(self, y: int, x: int, n: int = ...) -> _chtype: ... - def is_linetouched(self, __line: int) -> bool: ... - def is_wintouched(self) -> bool: ... - def keypad(self, yes: bool) -> None: ... - def leaveok(self, yes: bool) -> None: ... - def move(self, new_y: int, new_x: int) -> None: ... - def mvderwin(self, y: int, x: int) -> None: ... - def mvwin(self, new_y: int, new_x: int) -> None: ... - def nodelay(self, yes: bool) -> None: ... - def notimeout(self, yes: bool) -> None: ... - @overload - def noutrefresh(self) -> None: ... - @overload - def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... - @overload - def overlay(self, destwin: _CursesWindow) -> None: ... - @overload - def overlay( - self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int - ) -> None: ... - @overload - def overwrite(self, destwin: _CursesWindow) -> None: ... - @overload - def overwrite( - self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int - ) -> None: ... - def putwin(self, __file: IO[Any]) -> None: ... - def redrawln(self, __beg: int, __num: int) -> None: ... - def redrawwin(self) -> None: ... - @overload - def refresh(self) -> None: ... - @overload - def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... - def resize(self, nlines: int, ncols: int) -> None: ... - def scroll(self, lines: int = ...) -> None: ... - def scrollok(self, flag: bool) -> None: ... - def setscrreg(self, __top: int, __bottom: int) -> None: ... - def standend(self) -> None: ... - def standout(self) -> None: ... - @overload - def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ... - @overload - def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... - @overload - def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... - @overload - def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... - def syncdown(self) -> None: ... - def syncok(self, flag: bool) -> None: ... - def syncup(self) -> None: ... - def timeout(self, delay: int) -> None: ... - def touchline(self, start: int, count: int, changed: bool = ...) -> None: ... - def touchwin(self) -> None: ... - def untouchwin(self) -> None: ... - @overload - def vline(self, ch: _chtype, n: int) -> None: ... - @overload - def vline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... - -if sys.version_info >= (3, 8): - class _ncurses_version(NamedTuple): - major: int - minor: int - patch: int - ncurses_version: _ncurses_version - window = _CursesWindow # undocumented + @final + class _CursesWindow: + encoding: str + @overload + def addch(self, ch: _chtype, attr: int = ...) -> None: ... + @overload + def addch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... + @overload + def addnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def addstr(self, str: str, attr: int = ...) -> None: ... + @overload + def addstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + def attroff(self, __attr: int) -> None: ... + def attron(self, __attr: int) -> None: ... + def attrset(self, __attr: int) -> None: ... + def bkgd(self, __ch: _chtype, __attr: int = ...) -> None: ... + def bkgdset(self, __ch: _chtype, __attr: int = ...) -> None: ... + def border( + self, + ls: _chtype = ..., + rs: _chtype = ..., + ts: _chtype = ..., + bs: _chtype = ..., + tl: _chtype = ..., + tr: _chtype = ..., + bl: _chtype = ..., + br: _chtype = ..., + ) -> None: ... + @overload + def box(self) -> None: ... + @overload + def box(self, vertch: _chtype = ..., horch: _chtype = ...) -> None: ... + @overload + def chgat(self, attr: int) -> None: ... + @overload + def chgat(self, num: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, attr: int) -> None: ... + @overload + def chgat(self, y: int, x: int, num: int, attr: int) -> None: ... + def clear(self) -> None: ... + def clearok(self, yes: int) -> None: ... + def clrtobot(self) -> None: ... + def clrtoeol(self) -> None: ... + def cursyncup(self) -> None: ... + @overload + def delch(self) -> None: ... + @overload + def delch(self, y: int, x: int) -> None: ... + def deleteln(self) -> None: ... + @overload + def derwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def derwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def echochar(self, __ch: _chtype, __attr: int = ...) -> None: ... + def enclose(self, __y: int, __x: int) -> bool: ... + def erase(self) -> None: ... + def getbegyx(self) -> tuple[int, int]: ... + def getbkgd(self) -> tuple[int, int]: ... + @overload + def getch(self) -> int: ... + @overload + def getch(self, y: int, x: int) -> int: ... + @overload + def get_wch(self) -> int | str: ... + @overload + def get_wch(self, y: int, x: int) -> int | str: ... + @overload + def getkey(self) -> str: ... + @overload + def getkey(self, y: int, x: int) -> str: ... + def getmaxyx(self) -> tuple[int, int]: ... + def getparyx(self) -> tuple[int, int]: ... + @overload + def getstr(self) -> bytes: ... + @overload + def getstr(self, n: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int) -> bytes: ... + @overload + def getstr(self, y: int, x: int, n: int) -> bytes: ... + def getyx(self) -> tuple[int, int]: ... + @overload + def hline(self, ch: _chtype, n: int) -> None: ... + @overload + def hline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... + def idcok(self, flag: bool) -> None: ... + def idlok(self, yes: bool) -> None: ... + def immedok(self, flag: bool) -> None: ... + @overload + def inch(self) -> int: ... + @overload + def inch(self, y: int, x: int) -> int: ... + @overload + def insch(self, ch: _chtype, attr: int = ...) -> None: ... + @overload + def insch(self, y: int, x: int, ch: _chtype, attr: int = ...) -> None: ... + def insdelln(self, nlines: int) -> None: ... + def insertln(self) -> None: ... + @overload + def insnstr(self, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insnstr(self, y: int, x: int, str: str, n: int, attr: int = ...) -> None: ... + @overload + def insstr(self, str: str, attr: int = ...) -> None: ... + @overload + def insstr(self, y: int, x: int, str: str, attr: int = ...) -> None: ... + @overload + def instr(self, n: int = ...) -> bytes: ... + @overload + def instr(self, y: int, x: int, n: int = ...) -> bytes: ... + def is_linetouched(self, __line: int) -> bool: ... + def is_wintouched(self) -> bool: ... + def keypad(self, yes: bool) -> None: ... + def leaveok(self, yes: bool) -> None: ... + def move(self, new_y: int, new_x: int) -> None: ... + def mvderwin(self, y: int, x: int) -> None: ... + def mvwin(self, new_y: int, new_x: int) -> None: ... + def nodelay(self, yes: bool) -> None: ... + def notimeout(self, yes: bool) -> None: ... + @overload + def noutrefresh(self) -> None: ... + @overload + def noutrefresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + @overload + def overlay(self, destwin: _CursesWindow) -> None: ... + @overload + def overlay( + self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + @overload + def overwrite(self, destwin: _CursesWindow) -> None: ... + @overload + def overwrite( + self, destwin: _CursesWindow, sminrow: int, smincol: int, dminrow: int, dmincol: int, dmaxrow: int, dmaxcol: int + ) -> None: ... + def putwin(self, __file: IO[Any]) -> None: ... + def redrawln(self, __beg: int, __num: int) -> None: ... + def redrawwin(self) -> None: ... + @overload + def refresh(self) -> None: ... + @overload + def refresh(self, pminrow: int, pmincol: int, sminrow: int, smincol: int, smaxrow: int, smaxcol: int) -> None: ... + def resize(self, nlines: int, ncols: int) -> None: ... + def scroll(self, lines: int = ...) -> None: ... + def scrollok(self, flag: bool) -> None: ... + def setscrreg(self, __top: int, __bottom: int) -> None: ... + def standend(self) -> None: ... + def standout(self) -> None: ... + @overload + def subpad(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subpad(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subwin(self, begin_y: int, begin_x: int) -> _CursesWindow: ... + @overload + def subwin(self, nlines: int, ncols: int, begin_y: int, begin_x: int) -> _CursesWindow: ... + def syncdown(self) -> None: ... + def syncok(self, flag: bool) -> None: ... + def syncup(self) -> None: ... + def timeout(self, delay: int) -> None: ... + def touchline(self, start: int, count: int, changed: bool = ...) -> None: ... + def touchwin(self) -> None: ... + def untouchwin(self) -> None: ... + @overload + def vline(self, ch: _chtype, n: int) -> None: ... + @overload + def vline(self, y: int, x: int, ch: _chtype, n: int) -> None: ... + if sys.version_info >= (3, 8): + class _ncurses_version(NamedTuple): + major: int + minor: int + patch: int + ncurses_version: _ncurses_version + window = _CursesWindow # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_decimal.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_decimal.pyi index e58805855b13..de49a787283d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_decimal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_decimal.pyi @@ -1 +1,3 @@ from decimal import * + +__libmpdec_version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_thread.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_thread.pyi index 886d9d739780..6e936726a48f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_thread.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_thread.pyi @@ -1,15 +1,15 @@ -from typing import Any, Callable, NoReturn, Tuple +from typing import Any, Callable, NoReturn TIMEOUT_MAX: int error = RuntimeError -def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: dict[str, Any] = ...) -> None: ... +def start_new_thread(function: Callable[..., Any], args: tuple[Any, ...], kwargs: dict[str, Any] = ...) -> None: ... def exit() -> NoReturn: ... def get_ident() -> int: ... def allocate_lock() -> LockType: ... def stack_size(size: int | None = ...) -> int: ... -class LockType(object): +class LockType: locked_status: bool def __init__(self) -> None: ... def acquire(self, waitflag: bool | None = ..., timeout: int = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_threading.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_threading.pyi index 64998d86bf9f..f63bec448f90 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_threading.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_dummy_threading.pyi @@ -1,6 +1,6 @@ import sys from types import FrameType, TracebackType -from typing import Any, Callable, Iterable, Mapping, Optional, Type, TypeVar +from typing import Any, Callable, Iterable, Mapping, Optional, TypeVar # TODO recursive type _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] @@ -8,7 +8,56 @@ _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] _PF = Callable[[FrameType, str, Any], None] _T = TypeVar("_T") -__all__: list[str] +if sys.version_info >= (3, 8): + __all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", + "excepthook", + "ExceptHookArgs", + ] +else: + __all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", + ] def active_count() -> int: ... def current_thread() -> Thread: ... @@ -16,10 +65,6 @@ def currentThread() -> Thread: ... def get_ident() -> int: ... def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... - -if sys.version_info >= (3, 8): - from _thread import get_native_id as get_native_id - def settrace(func: _TF) -> None: ... def setprofile(func: _PF | None) -> None: ... def stack_size(size: int = ...) -> int: ... @@ -28,7 +73,7 @@ TIMEOUT_MAX: float class ThreadError(Exception): ... -class local(object): +class local: def __getattribute__(self, name: str) -> Any: ... def __setattr__(self, name: str, value: Any) -> None: ... def __delattr__(self, name: str) -> None: ... @@ -55,9 +100,11 @@ class Thread: if sys.version_info >= (3, 8): @property def native_id(self) -> int | None: ... # only available on some platforms + def is_alive(self) -> bool: ... if sys.version_info < (3, 9): def isAlive(self) -> bool: ... + def isDaemon(self) -> bool: ... def setDaemon(self, daemonic: bool) -> None: ... @@ -67,7 +114,7 @@ class Lock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... @@ -77,7 +124,7 @@ class _RLock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... @@ -88,7 +135,7 @@ class Condition: def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... @@ -101,7 +148,7 @@ class Condition: class Semaphore: def __init__(self, value: int = ...) -> None: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_heapq.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_heapq.pyi index 87e0fe0fa777..90dc28deb71f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_heapq.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_heapq.pyi @@ -2,6 +2,8 @@ from typing import Any, TypeVar _T = TypeVar("_T") +__about__: str + def heapify(__heap: list[Any]) -> None: ... def heappop(__heap: list[_T]) -> _T: ... def heappush(__heap: list[_T], __item: _T) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_imp.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_imp.pyi index b61c9f29b96d..23272591df4c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_imp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_imp.pyi @@ -1,7 +1,11 @@ +import sys import types from importlib.machinery import ModuleSpec from typing import Any +if sys.version_info >= (3, 7): + check_hash_based_pycs: str + def create_builtin(__spec: ModuleSpec) -> types.ModuleType: ... def create_dynamic(__spec: ModuleSpec, __file: Any = ...) -> None: ... def acquire_lock() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_json.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_json.pyi index f807a85b93f4..36b0ed679502 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_json.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_json.pyi @@ -1,5 +1,7 @@ -from typing import Any, Callable, Tuple +from typing import Any, Callable +from typing_extensions import final +@final class make_encoder: sort_keys: Any skipkeys: Any @@ -23,6 +25,7 @@ class make_encoder: ) -> None: ... def __call__(self, obj: object, _current_indent_level: int) -> Any: ... +@final class make_scanner: object_hook: Any object_pairs_hook: Any @@ -32,7 +35,7 @@ class make_scanner: strict: bool # TODO: 'context' needs the attrs above (ducktype), but not __call__. def __init__(self, context: make_scanner) -> None: ... - def __call__(self, string: str, index: int) -> Tuple[Any, int]: ... + def __call__(self, string: str, index: int) -> tuple[Any, int]: ... def encode_basestring_ascii(s: str) -> str: ... -def scanstring(string: str, end: int, strict: bool = ...) -> Tuple[str, int]: ... +def scanstring(string: str, end: int, strict: bool = ...) -> tuple[str, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_markupbase.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_markupbase.pyi index d8bc79f34e8c..2c497f65bb43 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_markupbase.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_markupbase.pyi @@ -1,8 +1,17 @@ -from typing import Tuple +import sys +from typing import Any class ParserBase: def __init__(self) -> None: ... - def error(self, message: str) -> None: ... def reset(self) -> None: ... - def getpos(self) -> Tuple[int, int]: ... + def getpos(self) -> tuple[int, int]: ... def unknown_decl(self, data: str) -> None: ... + def parse_comment(self, i: int, report: int = ...) -> int: ... # undocumented + def parse_declaration(self, i: int) -> int: ... # undocumented + def parse_marked_section(self, i: int, report: int = ...) -> int: ... # undocumented + def updatepos(self, i: int, j: int) -> int: ... # undocumented + if sys.version_info < (3, 10): + # Removed from ParserBase: https://bugs.python.org/issue31844 + def error(self, message: str) -> Any: ... # undocumented + lineno: int # undocumented + offset: int # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_msi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_msi.pyi index 754febe68da9..b7e852f38ae9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_msi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_msi.pyi @@ -10,8 +10,8 @@ if sys.platform == "win32": def Modify(self, mode: int, record: _Record) -> None: ... def Close(self) -> None: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] # Actual typename Summary, not exposed by the implementation class _Summary: def GetProperty(self, propid: int) -> str | bytes | None: ... @@ -19,8 +19,8 @@ if sys.platform == "win32": def SetProperty(self, propid: int, value: str | bytes) -> None: ... def Persist(self) -> None: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] # Actual typename Database, not exposed by the implementation class _Database: def OpenView(self, sql: str) -> _View: ... @@ -28,8 +28,8 @@ if sys.platform == "win32": def GetSummaryInformation(self, updateCount: int) -> _Summary: ... def Close(self) -> None: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] # Actual typename Record, not exposed by the implementation class _Record: def GetFieldCount(self) -> int: ... @@ -40,8 +40,8 @@ if sys.platform == "win32": def SetInteger(self, field: int, int: int) -> None: ... def ClearData(self) -> None: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] def UuidCreate() -> str: ... def FCICreate(cabname: str, files: list[str]) -> None: ... def OpenDatabase(name: str, flags: int) -> _Database: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_operator.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_operator.pyi index bea438861886..375d8e4ddfbf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_operator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_operator.pyi @@ -1,60 +1,158 @@ -# In reality the import is the other way around, but this way we can keep the operator stub in 2and3 -from operator import ( - abs as abs, - add as add, - and_ as and_, - attrgetter as attrgetter, - concat as concat, - contains as contains, - countOf as countOf, - delitem as delitem, - eq as eq, - floordiv as floordiv, - ge as ge, - getitem as getitem, - gt as gt, - iadd as iadd, - iand as iand, - iconcat as iconcat, - ifloordiv as ifloordiv, - ilshift as ilshift, - imatmul as imatmul, - imod as imod, - imul as imul, - index as index, - indexOf as indexOf, - inv as inv, - invert as invert, - ior as ior, - ipow as ipow, - irshift as irshift, - is_ as is_, - is_not as is_not, - isub as isub, - itemgetter as itemgetter, - itruediv as itruediv, - ixor as ixor, - le as le, - length_hint as length_hint, - lshift as lshift, - lt as lt, - matmul as matmul, - methodcaller as methodcaller, - mod as mod, - mul as mul, - ne as ne, - neg as neg, - not_ as not_, - or_ as or_, - pos as pos, - pow as pow, - rshift as rshift, - setitem as setitem, - sub as sub, - truediv as truediv, - truth as truth, - xor as xor, +import sys +from typing import ( + Any, + AnyStr, + Callable, + Container, + Generic, + Iterable, + Mapping, + MutableMapping, + MutableSequence, + Protocol, + Sequence, + SupportsAbs, + TypeVar, + overload, ) -from typing import AnyStr +from typing_extensions import ParamSpec, SupportsIndex, final + +_R = TypeVar("_R") +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_K = TypeVar("_K") +_V = TypeVar("_V") +_P = ParamSpec("_P") + +# The following protocols return "Any" instead of bool, since the comparison +# operators can be overloaded to return an arbitrary object. For example, +# the numpy.array comparison dunders return another numpy.array. + +class _SupportsDunderLT(Protocol): + def __lt__(self, __other: Any) -> Any: ... + +class _SupportsDunderGT(Protocol): + def __gt__(self, __other: Any) -> Any: ... + +class _SupportsDunderLE(Protocol): + def __le__(self, __other: Any) -> Any: ... + +class _SupportsDunderGE(Protocol): + def __ge__(self, __other: Any) -> Any: ... + +_SupportsComparison = _SupportsDunderLE | _SupportsDunderGE | _SupportsDunderGT | _SupportsDunderLT + +class _SupportsInversion(Protocol[_T_co]): + def __invert__(self) -> _T_co: ... + +class _SupportsNeg(Protocol[_T_co]): + def __neg__(self) -> _T_co: ... + +class _SupportsPos(Protocol[_T_co]): + def __pos__(self) -> _T_co: ... + +# All four comparison functions must have the same signature, or we get false-positive errors +def lt(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def le(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def eq(__a: object, __b: object) -> Any: ... +def ne(__a: object, __b: object) -> Any: ... +def ge(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def gt(__a: _SupportsComparison, __b: _SupportsComparison) -> Any: ... +def not_(__a: object) -> bool: ... +def truth(__a: object) -> bool: ... +def is_(__a: object, __b: object) -> bool: ... +def is_not(__a: object, __b: object) -> bool: ... +def abs(__a: SupportsAbs[_T]) -> _T: ... +def add(__a: Any, __b: Any) -> Any: ... +def and_(__a: Any, __b: Any) -> Any: ... +def floordiv(__a: Any, __b: Any) -> Any: ... +def index(__a: SupportsIndex) -> int: ... +def inv(__a: _SupportsInversion[_T_co]) -> _T_co: ... +def invert(__a: _SupportsInversion[_T_co]) -> _T_co: ... +def lshift(__a: Any, __b: Any) -> Any: ... +def mod(__a: Any, __b: Any) -> Any: ... +def mul(__a: Any, __b: Any) -> Any: ... +def matmul(__a: Any, __b: Any) -> Any: ... +def neg(__a: _SupportsNeg[_T_co]) -> _T_co: ... +def or_(__a: Any, __b: Any) -> Any: ... +def pos(__a: _SupportsPos[_T_co]) -> _T_co: ... +def pow(__a: Any, __b: Any) -> Any: ... +def rshift(__a: Any, __b: Any) -> Any: ... +def sub(__a: Any, __b: Any) -> Any: ... +def truediv(__a: Any, __b: Any) -> Any: ... +def xor(__a: Any, __b: Any) -> Any: ... +def concat(__a: Sequence[_T], __b: Sequence[_T]) -> Sequence[_T]: ... +def contains(__a: Container[object], __b: object) -> bool: ... +def countOf(__a: Iterable[object], __b: object) -> int: ... +@overload +def delitem(__a: MutableSequence[Any], __b: SupportsIndex) -> None: ... +@overload +def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... +@overload +def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... +@overload +def getitem(__a: Sequence[_T], __b: SupportsIndex) -> _T: ... +@overload +def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... +@overload +def getitem(__a: Mapping[_K, _V], __b: _K) -> _V: ... +def indexOf(__a: Iterable[_T], __b: _T) -> int: ... +@overload +def setitem(__a: MutableSequence[_T], __b: SupportsIndex, __c: _T) -> None: ... +@overload +def setitem(__a: MutableSequence[_T], __b: slice, __c: Sequence[_T]) -> None: ... +@overload +def setitem(__a: MutableMapping[_K, _V], __b: _K, __c: _V) -> None: ... +def length_hint(__obj: object, __default: int = ...) -> int: ... +@final +class attrgetter(Generic[_T_co]): + @overload + def __new__(cls, attr: str) -> attrgetter[Any]: ... + @overload + def __new__(cls, attr: str, __attr2: str) -> attrgetter[tuple[Any, Any]]: ... + @overload + def __new__(cls, attr: str, __attr2: str, __attr3: str) -> attrgetter[tuple[Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, __attr2: str, __attr3: str, __attr4: str) -> attrgetter[tuple[Any, Any, Any, Any]]: ... + @overload + def __new__(cls, attr: str, *attrs: str) -> attrgetter[tuple[Any, ...]]: ... + def __call__(self, obj: Any) -> _T_co: ... + +@final +class itemgetter(Generic[_T_co]): + @overload + def __new__(cls, item: Any) -> itemgetter[Any]: ... + @overload + def __new__(cls, item: Any, __item2: Any) -> itemgetter[tuple[Any, Any]]: ... + @overload + def __new__(cls, item: Any, __item2: Any, __item3: Any) -> itemgetter[tuple[Any, Any, Any]]: ... + @overload + def __new__(cls, item: Any, __item2: Any, __item3: Any, __item4: Any) -> itemgetter[tuple[Any, Any, Any, Any]]: ... + @overload + def __new__(cls, item: Any, *items: Any) -> itemgetter[tuple[Any, ...]]: ... + def __call__(self, obj: Any) -> _T_co: ... + +@final +class methodcaller: + def __init__(self, __name: str, *args: Any, **kwargs: Any) -> None: ... + def __call__(self, obj: Any) -> Any: ... + +def iadd(__a: Any, __b: Any) -> Any: ... +def iand(__a: Any, __b: Any) -> Any: ... +def iconcat(__a: Any, __b: Any) -> Any: ... +def ifloordiv(__a: Any, __b: Any) -> Any: ... +def ilshift(__a: Any, __b: Any) -> Any: ... +def imod(__a: Any, __b: Any) -> Any: ... +def imul(__a: Any, __b: Any) -> Any: ... +def imatmul(__a: Any, __b: Any) -> Any: ... +def ior(__a: Any, __b: Any) -> Any: ... +def ipow(__a: Any, __b: Any) -> Any: ... +def irshift(__a: Any, __b: Any) -> Any: ... +def isub(__a: Any, __b: Any) -> Any: ... +def itruediv(__a: Any, __b: Any) -> Any: ... +def ixor(__a: Any, __b: Any) -> Any: ... + +if sys.version_info >= (3, 11): + def call(__obj: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... def _compare_digest(__a: AnyStr, __b: AnyStr) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_osx_support.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_osx_support.pyi index f03c37d1011a..cb43fa93bb80 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_osx_support.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_osx_support.pyi @@ -1,17 +1,24 @@ -from typing import Iterable, Sequence, Tuple, TypeVar +import sys +from typing import Iterable, Sequence, TypeVar _T = TypeVar("_T") _K = TypeVar("_K") _V = TypeVar("_V") -__all__: list[str] +__all__ = ["compiler_fixup", "customize_config_vars", "customize_compiler", "get_platform_osx"] -_UNIVERSAL_CONFIG_VARS: Tuple[str, ...] # undocumented -_COMPILER_CONFIG_VARS: Tuple[str, ...] # undocumented +_UNIVERSAL_CONFIG_VARS: tuple[str, ...] # undocumented +_COMPILER_CONFIG_VARS: tuple[str, ...] # undocumented _INITPRE: str # undocumented def _find_executable(executable: str, path: str | None = ...) -> str | None: ... # undocumented -def _read_output(commandstring: str) -> str | None: ... # undocumented + +if sys.version_info >= (3, 8): + def _read_output(commandstring: str, capture_stderr: bool = ...) -> str | None: ... # undocumented + +else: + def _read_output(commandstring: str) -> str | None: ... # undocumented + def _find_build_tool(toolname: str) -> str: ... # undocumented _SYSTEM_VERSION: str | None # undocumented @@ -30,4 +37,4 @@ def customize_config_vars(_config_vars: dict[str, str]) -> dict[str, str]: ... def customize_compiler(_config_vars: dict[str, str]) -> dict[str, str]: ... def get_platform_osx( _config_vars: dict[str, str], osname: _T, release: _K, machine: _V -) -> Tuple[str | _T, str | _K, str | _V]: ... +) -> tuple[str | _T, str | _K, str | _V]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_posixsubprocess.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_posixsubprocess.pyi index 05209ba05b9b..5481100cacfc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_posixsubprocess.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_posixsubprocess.pyi @@ -1,24 +1,24 @@ -# NOTE: These are incomplete! +import sys +from typing import Callable, Sequence -from typing import Callable, Sequence, Tuple - -def cloexec_pipe() -> Tuple[int, int]: ... -def fork_exec( - args: Sequence[str], - executable_list: Sequence[bytes], - close_fds: bool, - fds_to_keep: Sequence[int], - cwd: str, - env_list: Sequence[bytes], - p2cread: int, - p2cwrite: int, - c2pred: int, - c2pwrite: int, - errread: int, - errwrite: int, - errpipe_read: int, - errpipe_write: int, - restore_signals: int, - start_new_session: int, - preexec_fn: Callable[[], None], -) -> int: ... +if sys.platform != "win32": + def cloexec_pipe() -> tuple[int, int]: ... + def fork_exec( + args: Sequence[str], + executable_list: Sequence[bytes], + close_fds: bool, + fds_to_keep: Sequence[int], + cwd: str, + env_list: Sequence[bytes], + p2cread: int, + p2cwrite: int, + c2pred: int, + c2pwrite: int, + errread: int, + errwrite: int, + errpipe_read: int, + errpipe_write: int, + restore_signals: int, + start_new_session: int, + preexec_fn: Callable[[], None], + ) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_py_abc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_py_abc.pyi index 8d7938918271..ddf04364a238 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_py_abc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_py_abc.pyi @@ -1,10 +1,12 @@ -from typing import Any, Tuple, Type, TypeVar +from _typeshed import Self +from typing import Any, NewType, TypeVar _T = TypeVar("_T") -# TODO: Change the return into a NewType bound to int after pytype/#597 -def get_cache_token() -> object: ... +_CacheToken = NewType("_CacheToken", int) + +def get_cache_token() -> _CacheToken: ... class ABCMeta(type): - def __new__(__mcls, __name: str, __bases: Tuple[Type[Any], ...], __namespace: dict[str, Any]) -> ABCMeta: ... - def register(cls, subclass: Type[_T]) -> Type[_T]: ... + def __new__(__mcls: type[Self], __name: str, __bases: tuple[type[Any], ...], __namespace: dict[str, Any]) -> Self: ... + def register(cls, subclass: type[_T]) -> type[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_random.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_random.pyi index fa80c6d98144..9aff4b3cb026 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_random.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_random.pyi @@ -1,9 +1,7 @@ -from typing import Tuple - # Actually Tuple[(int,) * 625] -_State = Tuple[int, ...] +_State = tuple[int, ...] -class Random(object): +class Random: def __init__(self, seed: object = ...) -> None: ... def seed(self, __n: object = ...) -> None: ... def getstate(self) -> _State: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_socket.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_socket.pyi index 7945c662e820..c948e246d4da 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_socket.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_socket.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import ReadableBuffer, WriteableBuffer from collections.abc import Iterable -from typing import Any, SupportsInt, Tuple, Union, overload +from typing import Any, SupportsInt, Union, overload if sys.version_info >= (3, 8): from typing import SupportsIndex @@ -10,12 +10,12 @@ if sys.version_info >= (3, 8): else: _FD = SupportsInt -_CMSG = Tuple[int, int, bytes] -_CMSGArg = Tuple[int, int, ReadableBuffer] +_CMSG = tuple[int, int, bytes] +_CMSGArg = tuple[int, int, ReadableBuffer] # Addresses can be either tuples of varying lengths (AF_INET, AF_INET6, # AF_NETLINK, AF_TIPC) or strings (AF_UNIX). -_Address = Union[Tuple[Any, ...], str] +_Address = Union[tuple[Any, ...], str] _RetAddress = Any # TODO Most methods allow bytes as address objects @@ -325,6 +325,7 @@ if sys.platform == "linux" and sys.version_info >= (3, 7): if sys.platform == "linux" and sys.version_info >= (3, 9): CAN_J1939: int + CAN_RAW_JOIN_FILTERS: int J1939_MAX_UNICAST_ADDR: int J1939_IDLE_ADDR: int @@ -355,6 +356,9 @@ if sys.platform == "linux" and sys.version_info >= (3, 9): J1939_FILTER_MAX: int +if sys.platform == "linux" and sys.version_info >= (3, 10): + IPPROTO_MPTCP: int + if sys.platform == "linux": AF_PACKET: int PF_PACKET: int @@ -526,6 +530,8 @@ class socket: family: int type: int proto: int + @property + def timeout(self) -> float | None: ... def __init__(self, family: int = ..., type: int = ..., proto: int = ..., fileno: _FD | None = ...) -> None: ... def bind(self, __address: _Address | bytes) -> None: ... def close(self) -> None: ... @@ -541,9 +547,11 @@ class socket: def getsockopt(self, __level: int, __optname: int, __buflen: int) -> bytes: ... if sys.version_info >= (3, 7): def getblocking(self) -> bool: ... + def gettimeout(self) -> float | None: ... if sys.platform == "win32": def ioctl(self, __control: int, __option: int | tuple[int, int, int] | bool) -> None: ... + def listen(self, __backlog: int = ...) -> None: ... def recv(self, __bufsize: int, __flags: int = ...) -> bytes: ... def recvfrom(self, __bufsize: int, __flags: int = ...) -> tuple[bytes, _RetAddress]: ... @@ -552,6 +560,7 @@ class socket: def recvmsg_into( self, __buffers: Iterable[WriteableBuffer], __ancbufsize: int = ..., __flags: int = ... ) -> tuple[int, list[_CMSG], int, Any]: ... + def recvfrom_into(self, buffer: WriteableBuffer, nbytes: int = ..., flags: int = ...) -> tuple[int, _RetAddress]: ... def recv_into(self, buffer: WriteableBuffer, nbytes: int = ..., flags: int = ...) -> int: ... def send(self, __data: ReadableBuffer, __flags: int = ...) -> int: ... @@ -572,6 +581,7 @@ class socket: def sendmsg_afalg( self, msg: Iterable[ReadableBuffer] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> int: ... + def setblocking(self, __flag: bool) -> None: ... def settimeout(self, __value: float | None) -> None: ... @overload @@ -580,6 +590,7 @@ class socket: def setsockopt(self, __level: int, __optname: int, __value: None, __optlen: int) -> None: ... if sys.platform == "win32": def share(self, __process_id: int) -> bytes: ... + def shutdown(self, __how: int) -> None: ... SocketType = socket diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_stat.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_stat.pyi index 634f7da02563..83d832e4dd8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_stat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_stat.pyi @@ -1,54 +1,68 @@ -SF_APPEND: int -SF_ARCHIVED: int -SF_IMMUTABLE: int -SF_NOUNLINK: int -SF_SNAPSHOT: int -ST_ATIME: int -ST_CTIME: int -ST_DEV: int -ST_GID: int -ST_INO: int -ST_MODE: int -ST_MTIME: int -ST_NLINK: int -ST_SIZE: int -ST_UID: int -S_ENFMT: int -S_IEXEC: int -S_IFBLK: int -S_IFCHR: int -S_IFDIR: int +import sys +from typing_extensions import Literal + +SF_APPEND: Literal[0x00040000] +SF_ARCHIVED: Literal[0x00010000] +SF_IMMUTABLE: Literal[0x00020000] +SF_NOUNLINK: Literal[0x00100000] +SF_SNAPSHOT: Literal[0x00200000] + +ST_MODE: Literal[0] +ST_INO: Literal[1] +ST_DEV: Literal[2] +ST_NLINK: Literal[3] +ST_UID: Literal[4] +ST_GID: Literal[5] +ST_SIZE: Literal[6] +ST_ATIME: Literal[7] +ST_MTIME: Literal[8] +ST_CTIME: Literal[9] + +S_IFIFO: Literal[0o010000] +S_IFLNK: Literal[0o120000] +S_IFREG: Literal[0o100000] +S_IFSOCK: Literal[0o140000] +S_IFBLK: Literal[0o060000] +S_IFCHR: Literal[0o020000] +S_IFDIR: Literal[0o040000] + +# These are 0 on systems that don't support the specific kind of file. +# Example: Linux doesn't support door files, so S_IFDOOR is 0 on linux. S_IFDOOR: int -S_IFIFO: int -S_IFLNK: int S_IFPORT: int -S_IFREG: int -S_IFSOCK: int S_IFWHT: int -S_IREAD: int -S_IRGRP: int -S_IROTH: int -S_IRUSR: int -S_IRWXG: int -S_IRWXO: int -S_IRWXU: int -S_ISGID: int -S_ISUID: int -S_ISVTX: int -S_IWGRP: int -S_IWOTH: int -S_IWRITE: int -S_IWUSR: int -S_IXGRP: int -S_IXOTH: int -S_IXUSR: int -UF_APPEND: int -UF_COMPRESSED: int -UF_HIDDEN: int -UF_IMMUTABLE: int -UF_NODUMP: int -UF_NOUNLINK: int -UF_OPAQUE: int + +S_ISUID: Literal[0o4000] +S_ISGID: Literal[0o2000] +S_ISVTX: Literal[0o1000] + +S_IRWXU: Literal[0o0700] +S_IRUSR: Literal[0o0400] +S_IWUSR: Literal[0o0200] +S_IXUSR: Literal[0o0100] + +S_IRWXG: Literal[0o0070] +S_IRGRP: Literal[0o0040] +S_IWGRP: Literal[0o0020] +S_IXGRP: Literal[0o0010] + +S_IRWXO: Literal[0o0007] +S_IROTH: Literal[0o0004] +S_IWOTH: Literal[0o0002] +S_IXOTH: Literal[0o0001] + +S_ENFMT: Literal[0o2000] +S_IREAD: Literal[0o0400] +S_IWRITE: Literal[0o0200] +S_IEXEC: Literal[0o0100] + +UF_APPEND: Literal[0x00000004] +UF_COMPRESSED: Literal[0x00000020] # OS X 10.6+ only +UF_HIDDEN: Literal[0x00008000] # OX X 10.5+ only +UF_IMMUTABLE: Literal[0x00000002] +UF_NODUMP: Literal[0x00000001] +UF_NOUNLINK: Literal[0x00000010] +UF_OPAQUE: Literal[0x00000008] def S_IMODE(mode: int) -> int: ... def S_IFMT(mode: int) -> int: ... @@ -63,3 +77,27 @@ def S_ISREG(mode: int) -> bool: ... def S_ISSOCK(mode: int) -> bool: ... def S_ISWHT(mode: int) -> bool: ... def filemode(mode: int) -> str: ... + +if sys.platform == "win32" and sys.version_info >= (3, 8): + IO_REPARSE_TAG_SYMLINK: int + IO_REPARSE_TAG_MOUNT_POINT: int + IO_REPARSE_TAG_APPEXECLINK: int + +if sys.platform == "win32": + FILE_ATTRIBUTE_ARCHIVE: Literal[32] + FILE_ATTRIBUTE_COMPRESSED: Literal[2048] + FILE_ATTRIBUTE_DEVICE: Literal[64] + FILE_ATTRIBUTE_DIRECTORY: Literal[16] + FILE_ATTRIBUTE_ENCRYPTED: Literal[16384] + FILE_ATTRIBUTE_HIDDEN: Literal[2] + FILE_ATTRIBUTE_INTEGRITY_STREAM: Literal[32768] + FILE_ATTRIBUTE_NORMAL: Literal[128] + FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: Literal[8192] + FILE_ATTRIBUTE_NO_SCRUB_DATA: Literal[131072] + FILE_ATTRIBUTE_OFFLINE: Literal[4096] + FILE_ATTRIBUTE_READONLY: Literal[1] + FILE_ATTRIBUTE_REPARSE_POINT: Literal[1024] + FILE_ATTRIBUTE_SPARSE_FILE: Literal[512] + FILE_ATTRIBUTE_SYSTEM: Literal[4] + FILE_ATTRIBUTE_TEMPORARY: Literal[256] + FILE_ATTRIBUTE_VIRTUAL: Literal[65536] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_thread.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_thread.pyi index 2425703121b5..744799f1f66f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_thread.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_thread.pyi @@ -1,7 +1,9 @@ import sys +from _typeshed import structseq from threading import Thread from types import TracebackType -from typing import Any, Callable, NoReturn, Optional, Tuple, Type +from typing import Any, Callable, NoReturn, Optional +from typing_extensions import final error = RuntimeError @@ -9,16 +11,17 @@ def _count() -> int: ... _dangling: Any +@final class LockType: def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... def locked(self) -> bool: ... def __enter__(self) -> bool: ... def __exit__( - self, type: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... -def start_new_thread(function: Callable[..., Any], args: Tuple[Any, ...], kwargs: dict[str, Any] = ...) -> int: ... +def start_new_thread(function: Callable[..., Any], args: tuple[Any, ...], kwargs: dict[str, Any] = ...) -> int: ... def interrupt_main() -> None: ... def exit() -> NoReturn: ... def allocate_lock() -> LockType: ... @@ -29,9 +32,12 @@ TIMEOUT_MAX: float if sys.version_info >= (3, 8): def get_native_id() -> int: ... # only available on some platforms - class _ExceptHookArgs(Tuple[Type[BaseException], Optional[BaseException], Optional[TracebackType], Optional[Thread]]): + @final + class _ExceptHookArgs( + structseq[Any], tuple[type[BaseException], Optional[BaseException], Optional[TracebackType], Optional[Thread]] + ): @property - def exc_type(self) -> Type[BaseException]: ... + def exc_type(self) -> type[BaseException]: ... @property def exc_value(self) -> BaseException | None: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_threading_local.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_threading_local.pyi index 461459d694dd..2ad77a177c37 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_threading_local.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_threading_local.pyi @@ -1,11 +1,12 @@ -from typing import Any, Dict, Tuple +from typing import Any from weakref import ReferenceType -localdict = Dict[Any, Any] +__all__ = ["local"] +localdict = dict[Any, Any] class _localimpl: key: str - dicts: dict[int, Tuple[ReferenceType[Any], localdict]] + dicts: dict[int, tuple[ReferenceType[Any], localdict]] def __init__(self) -> None: ... def get_dict(self) -> localdict: ... def create_dict(self) -> localdict: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_tkinter.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_tkinter.pyi index 378b04202c4f..6dfe070dafac 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_tkinter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_tkinter.pyi @@ -1,5 +1,6 @@ +import sys from typing import Any -from typing_extensions import Literal +from typing_extensions import Literal, final # _tkinter is meant to be only used internally by tkinter, but some tkinter # functions e.g. return _tkinter.Tcl_Obj objects. Tcl_Obj represents a Tcl @@ -14,9 +15,17 @@ from typing_extensions import Literal # >>> text.tag_add('foo', '1.0', 'end') # >>> text.tag_ranges('foo') # (, ) +@final class Tcl_Obj: - string: str # str(tclobj) returns this + string: str | bytes typename: str + __hash__: None # type: ignore[assignment] + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __gt__(self, __other): ... + def __le__(self, __other): ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... class TclError(Exception): ... @@ -37,40 +46,45 @@ class TclError(Exception): ... # # eval always returns str because _tkinter_tkapp_eval_impl in _tkinter.c calls # Tkapp_UnicodeResult, and it returns a string when it succeeds. +@final class TkappType: # Please keep in sync with tkinter.Tk + def adderrorinfo(self, __msg): ... def call(self, __command: Any, *args: Any) -> Any: ... + def createcommand(self, __name, __func): ... + if sys.platform != "win32": + def createfilehandler(self, __file, __mask, __func): ... + def deletefilehandler(self, __file): ... + + def createtimerhandler(self, __milliseconds, __func): ... + def deletecommand(self, __name): ... + def dooneevent(self, __flags: int = ...): ... def eval(self, __script: str) -> str: ... - adderrorinfo: Any - createcommand: Any - createfilehandler: Any - createtimerhandler: Any - deletecommand: Any - deletefilehandler: Any - dooneevent: Any - evalfile: Any - exprboolean: Any - exprdouble: Any - exprlong: Any - exprstring: Any - getboolean: Any - getdouble: Any - getint: Any - getvar: Any - globalgetvar: Any - globalsetvar: Any - globalunsetvar: Any - interpaddr: Any - loadtk: Any - mainloop: Any - quit: Any - record: Any - setvar: Any - split: Any - splitlist: Any - unsetvar: Any - wantobjects: Any - willdispatch: Any + def evalfile(self, __fileName): ... + def exprboolean(self, __s): ... + def exprdouble(self, __s): ... + def exprlong(self, __s): ... + def exprstring(self, __s): ... + def getboolean(self, __arg): ... + def getdouble(self, __arg): ... + def getint(self, __arg): ... + def getvar(self, *args, **kwargs): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self): ... + def loadtk(self) -> None: ... + def mainloop(self, __threshold: int = ...): ... + def quit(self): ... + def record(self, __script): ... + def setvar(self, *ags, **kwargs): ... + if sys.version_info < (3, 11): + def split(self, __arg): ... + + def splitlist(self, __arg): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self): ... # These should be kept in sync with tkinter.tix constants, except ALL_EVENTS which doesn't match TCL_ALL_EVENTS ALL_EVENTS: Literal[-3] @@ -87,9 +101,19 @@ WRITABLE: Literal[4] TCL_VERSION: str TK_VERSION: str -# TODO: figure out what these are (with e.g. help()) and get rid of Any -TkttType: Any -_flatten: Any -create: Any -getbusywaitinterval: Any -setbusywaitinterval: Any +@final +class TkttType: + def deletetimerhandler(self): ... + +def create( + __screenName: str | None = ..., + __baseName: str | None = ..., + __className: str = ..., + __interactive: bool = ..., + __wantobjects: bool = ..., + __wantTk: bool = ..., + __sync: bool = ..., + __use: str | None = ..., +): ... +def getbusywaitinterval(): ... +def setbusywaitinterval(__new_val): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_tracemalloc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_tracemalloc.pyi index 27637637f6c5..fd159dc586cb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_tracemalloc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_tracemalloc.pyi @@ -1,12 +1,12 @@ import sys from tracemalloc import _FrameTupleT, _TraceTupleT -from typing import Sequence, Tuple +from typing import Sequence def _get_object_traceback(__obj: object) -> Sequence[_FrameTupleT] | None: ... def _get_traces() -> Sequence[_TraceTupleT]: ... def clear_traces() -> None: ... def get_traceback_limit() -> int: ... -def get_traced_memory() -> Tuple[int, int]: ... +def get_traced_memory() -> tuple[int, int]: ... def get_tracemalloc_memory() -> int: ... def is_tracing() -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/__init__.pyi index 3f8978a64623..103af47c7524 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/__init__.pyi @@ -3,11 +3,12 @@ # See the README.md file in this directory for more information. import array +import ctypes import mmap import sys from os import PathLike -from typing import AbstractSet, Any, Awaitable, Container, Iterable, Protocol, Tuple, TypeVar, Union -from typing_extensions import Literal, final +from typing import AbstractSet, Any, Container, Generic, Iterable, Protocol, TypeVar, Union +from typing_extensions import Final, Literal, final _KT = TypeVar("_KT") _KT_co = TypeVar("_KT_co", covariant=True) @@ -32,12 +33,26 @@ class SupportsNext(Protocol[_T_co]): # stable class SupportsAnext(Protocol[_T_co]): - def __anext__(self) -> Awaitable[_T_co]: ... + async def __anext__(self) -> _T_co: ... -class SupportsLessThan(Protocol): +# Comparison protocols + +class SupportsDunderLT(Protocol): def __lt__(self, __other: Any) -> bool: ... -SupportsLessThanT = TypeVar("SupportsLessThanT", bound=SupportsLessThan) # noqa: Y001 +class SupportsDunderGT(Protocol): + def __gt__(self, __other: Any) -> bool: ... + +class SupportsDunderLE(Protocol): + def __le__(self, __other: Any) -> bool: ... + +class SupportsDunderGE(Protocol): + def __ge__(self, __other: Any) -> bool: ... + +class SupportsAllComparisons(SupportsDunderLT, SupportsDunderGT, SupportsDunderLE, SupportsDunderGE, Protocol): ... + +SupportsRichComparison = Union[SupportsDunderLT, SupportsDunderGT] +SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001 class SupportsDivMod(Protocol[_T_contra, _T_co]): def __divmod__(self, __other: _T_contra) -> _T_co: ... @@ -56,7 +71,7 @@ class SupportsTrunc(Protocol): # stable class SupportsItems(Protocol[_KT_co, _VT_co]): - def items(self) -> AbstractSet[Tuple[_KT_co, _VT_co]]: ... + def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ... # stable class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): @@ -165,10 +180,15 @@ class SupportsNoArgReadline(Protocol[_T_co]): # stable class SupportsWrite(Protocol[_T_contra]): - def write(self, __s: _T_contra) -> Any: ... + def write(self, __s: _T_contra) -> object: ... -ReadableBuffer = Union[bytes, bytearray, memoryview, array.array[Any], mmap.mmap] # stable -WriteableBuffer = Union[bytearray, memoryview, array.array[Any], mmap.mmap] # stable +ReadOnlyBuffer = bytes # stable +# Anything that implements the read-write buffer interface. +# The buffer interface is defined purely on the C level, so we cannot define a normal Protocol +# for it. Instead we have to list the most common stdlib buffer classes in a Union. +WriteableBuffer = Union[bytearray, memoryview, array.array[Any], mmap.mmap, ctypes._CData] # stable +# Same as _WriteableBuffer, but also includes read-only buffer types (like bytes). +ReadableBuffer = Union[ReadOnlyBuffer, WriteableBuffer] # stable # stable if sys.version_info >= (3, 10): @@ -178,3 +198,20 @@ else: @final class NoneType: def __bool__(self) -> Literal[False]: ... + +# This is an internal CPython type that is like, but subtly different from, a NamedTuple +# Subclasses of this type are found in multiple modules. +# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple` +# See discussion at #6546 & #6560 +# `structseq` classes are unsubclassable, so are all decorated with `@final`. +class structseq(Generic[_T_co]): + n_fields: Final[int] + n_unnamed_fields: Final[int] + n_sequence_fields: Final[int] + # The first parameter will generally only take an iterable of a specific length. + # E.g. `os.uname_result` takes any iterable of length exactly 5. + # + # The second parameter will accept a dict of any kind without raising an exception, + # but only has any meaning if you supply it a dict where the keys are strings. + # https://github.com/python/typeshed/pull/6560#discussion_r767149830 + def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/dbapi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/dbapi.pyi new file mode 100644 index 000000000000..eee4fc03874e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/dbapi.pyi @@ -0,0 +1,36 @@ +# PEP 249 Database API 2.0 Types +# https://www.python.org/dev/peps/pep-0249/ + +from collections.abc import Mapping, Sequence +from typing import Any, Protocol + +DBAPITypeCode = Any | None +# Strictly speaking, this should be a Sequence, but the type system does +# not support fixed-length sequences. +DBAPIColumnDescription = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None] + +class DBAPIConnection(Protocol): + def close(self) -> object: ... + def commit(self) -> object: ... + # optional: + # def rollback(self) -> Any: ... + def cursor(self) -> DBAPICursor: ... + +class DBAPICursor(Protocol): + @property + def description(self) -> Sequence[DBAPIColumnDescription] | None: ... + @property + def rowcount(self) -> int: ... + # optional: + # def callproc(self, __procname: str, __parameters: Sequence[Any] = ...) -> Sequence[Any]: ... + def close(self) -> object: ... + def execute(self, __operation: str, __parameters: Sequence[Any] | Mapping[str, Any] = ...) -> object: ... + def executemany(self, __operation: str, __seq_of_parameters: Sequence[Sequence[Any]]) -> object: ... + def fetchone(self) -> Sequence[Any] | None: ... + def fetchmany(self, __size: int = ...) -> Sequence[Sequence[Any]]: ... + def fetchall(self) -> Sequence[Sequence[Any]]: ... + # optional: + # def nextset(self) -> None | Literal[True]: ... + arraysize: int + def setinputsizes(self, __sizes: Sequence[DBAPITypeCode | int | None]) -> object: ... + def setoutputsize(self, __size: int, __column: int = ...) -> object: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/wsgi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/wsgi.pyi index ddb32b78332a..031d1472b6c5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/wsgi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/wsgi.pyi @@ -3,15 +3,15 @@ # See the README.md file in this directory for more information. from sys import _OptExcInfo -from typing import Any, Callable, Dict, Iterable, Protocol, Tuple +from typing import Any, Callable, Iterable, Protocol # stable class StartResponse(Protocol): def __call__( - self, status: str, headers: list[Tuple[str, str]], exc_info: _OptExcInfo | None = ... + self, status: str, headers: list[tuple[str, str]], exc_info: _OptExcInfo | None = ... ) -> Callable[[bytes], Any]: ... -WSGIEnvironment = Dict[str, Any] # stable +WSGIEnvironment = dict[str, Any] # stable WSGIApplication = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable # WSGI input streams per PEP 3333, stable diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/xml.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/xml.pyi index d53b743af2d3..231c2b86e912 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/xml.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_typeshed/xml.pyi @@ -1,7 +1,6 @@ # See the README.md file in this directory for more information. -from typing import Any -from typing_extensions import Protocol +from typing import Any, Protocol # As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects class DOMImplementation(Protocol): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_warnings.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_warnings.pyi index fef73e69c626..2eb9ae478a5d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_warnings.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_warnings.pyi @@ -1,21 +1,21 @@ -from typing import Any, Tuple, Type, overload +from typing import Any, overload _defaultaction: str _onceregistry: dict[Any, Any] -filters: list[tuple[str, str | None, Type[Warning], str | None, int]] +filters: list[tuple[str, str | None, type[Warning], str | None, int]] @overload -def warn(message: str, category: Type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... +def warn(message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... @overload def warn(message: Warning, category: Any = ..., stacklevel: int = ..., source: Any | None = ...) -> None: ... @overload def warn_explicit( message: str, - category: Type[Warning], + category: type[Warning], filename: str, lineno: int, module: str | None = ..., - registry: dict[str | Tuple[str, Type[Warning], int], int] | None = ..., + registry: dict[str | tuple[str, type[Warning], int], int] | None = ..., module_globals: dict[str, Any] | None = ..., source: Any | None = ..., ) -> None: ... @@ -26,7 +26,7 @@ def warn_explicit( filename: str, lineno: int, module: str | None = ..., - registry: dict[str | Tuple[str, Type[Warning], int], int] | None = ..., + registry: dict[str | tuple[str, type[Warning], int], int] | None = ..., module_globals: dict[str, Any] | None = ..., source: Any | None = ..., ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_weakref.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_weakref.pyi index 006836f85055..00dc2d5114b8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_weakref.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_weakref.pyi @@ -1,5 +1,6 @@ import sys from typing import Any, Callable, Generic, TypeVar, overload +from typing_extensions import final if sys.version_info >= (3, 9): from types import GenericAlias @@ -7,9 +8,12 @@ if sys.version_info >= (3, 9): _C = TypeVar("_C", bound=Callable[..., Any]) _T = TypeVar("_T") +@final class CallableProxyType(Generic[_C]): # "weakcallableproxy" def __getattr__(self, attr: str) -> Any: ... + __call__: _C +@final class ProxyType(Generic[_T]): # "weakproxy" def __getattr__(self, attr: str) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_weakrefset.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_weakrefset.pyi index 1f0132f4c240..b0c22a5ecc13 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_weakrefset.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_weakrefset.pyi @@ -1,34 +1,36 @@ import sys +from _typeshed import Self from typing import Any, Generic, Iterable, Iterator, MutableSet, TypeVar if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = ["WeakSet"] + _S = TypeVar("_S") _T = TypeVar("_T") -_SelfT = TypeVar("_SelfT", bound=WeakSet[Any]) class WeakSet(MutableSet[_T], Generic[_T]): def __init__(self, data: Iterable[_T] | None = ...) -> None: ... def add(self, item: _T) -> None: ... def clear(self) -> None: ... def discard(self, item: _T) -> None: ... - def copy(self: _SelfT) -> _SelfT: ... + def copy(self: Self) -> Self: ... def pop(self) -> _T: ... def remove(self, item: _T) -> None: ... def update(self, other: Iterable[_T]) -> None: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... - def __ior__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... - def difference(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... - def __sub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... - def difference_update(self, other: Iterable[_T]) -> None: ... - def __isub__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... - def intersection(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... - def __and__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... - def intersection_update(self, other: Iterable[_T]) -> None: ... - def __iand__(self: _SelfT, other: Iterable[_T]) -> _SelfT: ... + def __ior__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] + def difference(self: Self, other: Iterable[_T]) -> Self: ... + def __sub__(self: Self, other: Iterable[Any]) -> Self: ... + def difference_update(self, other: Iterable[Any]) -> None: ... + def __isub__(self: Self, other: Iterable[Any]) -> Self: ... + def intersection(self: Self, other: Iterable[_T]) -> Self: ... + def __and__(self: Self, other: Iterable[Any]) -> Self: ... + def intersection_update(self, other: Iterable[Any]) -> None: ... + def __iand__(self: Self, other: Iterable[Any]) -> Self: ... def issubset(self, other: Iterable[_T]) -> bool: ... def __le__(self, other: Iterable[_T]) -> bool: ... def __lt__(self, other: Iterable[_T]) -> bool: ... @@ -38,8 +40,8 @@ class WeakSet(MutableSet[_T], Generic[_T]): def __eq__(self, other: object) -> bool: ... def symmetric_difference(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __xor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... - def symmetric_difference_update(self, other: Iterable[Any]) -> None: ... - def __ixor__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... + def symmetric_difference_update(self, other: Iterable[_T]) -> None: ... + def __ixor__(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override,misc] def union(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def __or__(self, other: Iterable[_S]) -> WeakSet[_S | _T]: ... def isdisjoint(self, other: Iterable[_T]) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi index eabbad312abc..1e8c51477083 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/_winapi.pyi @@ -1,134 +1,196 @@ import sys -from typing import Any, NoReturn, Sequence, Tuple, overload -from typing_extensions import Literal +from typing import Any, NoReturn, Sequence, overload +from typing_extensions import Literal, final -CREATE_NEW_CONSOLE: int -CREATE_NEW_PROCESS_GROUP: int -DUPLICATE_CLOSE_SOURCE: int -DUPLICATE_SAME_ACCESS: int -ERROR_ALREADY_EXISTS: int -ERROR_BROKEN_PIPE: int -ERROR_IO_PENDING: int -ERROR_MORE_DATA: int -ERROR_NETNAME_DELETED: int -ERROR_NO_DATA: int -ERROR_NO_SYSTEM_RESOURCES: int -ERROR_OPERATION_ABORTED: int -ERROR_PIPE_BUSY: int -ERROR_PIPE_CONNECTED: int -ERROR_SEM_TIMEOUT: int -FILE_FLAG_FIRST_PIPE_INSTANCE: int -FILE_FLAG_OVERLAPPED: int -FILE_GENERIC_READ: int -FILE_GENERIC_WRITE: int -GENERIC_READ: int -GENERIC_WRITE: int -INFINITE: int -NMPWAIT_WAIT_FOREVER: int -NULL: int -OPEN_EXISTING: int -PIPE_ACCESS_DUPLEX: int -PIPE_ACCESS_INBOUND: int -PIPE_READMODE_MESSAGE: int -PIPE_TYPE_MESSAGE: int -PIPE_UNLIMITED_INSTANCES: int -PIPE_WAIT: int -PROCESS_ALL_ACCESS: int -PROCESS_DUP_HANDLE: int -STARTF_USESHOWWINDOW: int -STARTF_USESTDHANDLES: int -STD_ERROR_HANDLE: int -STD_INPUT_HANDLE: int -STD_OUTPUT_HANDLE: int -STILL_ACTIVE: int -SW_HIDE: int -WAIT_ABANDONED_0: int -WAIT_OBJECT_0: int -WAIT_TIMEOUT: int +if sys.platform == "win32": + if sys.version_info >= (3, 7): + ABOVE_NORMAL_PRIORITY_CLASS: Literal[32768] + BELOW_NORMAL_PRIORITY_CLASS: Literal[16384] + CREATE_BREAKAWAY_FROM_JOB: Literal[16777216] + CREATE_DEFAULT_ERROR_MODE: Literal[67108864] + CREATE_NO_WINDOW: Literal[134217728] + CREATE_NEW_CONSOLE: Literal[16] + CREATE_NEW_PROCESS_GROUP: Literal[512] + if sys.version_info >= (3, 7): + DETACHED_PROCESS: Literal[8] + DUPLICATE_CLOSE_SOURCE: Literal[1] + DUPLICATE_SAME_ACCESS: Literal[2] -def CloseHandle(__handle: int) -> None: ... -@overload -def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... -@overload -def ConnectNamedPipe(handle: int, overlapped: Literal[False] = ...) -> None: ... -@overload -def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... -def CreateFile( - __file_name: str, - __desired_access: int, - __share_mode: int, - __security_attributes: int, - __creation_disposition: int, - __flags_and_attributes: int, - __template_file: int, -) -> int: ... -def CreateJunction(__src_path: str, __dst_path: str) -> None: ... -def CreateNamedPipe( - __name: str, - __open_mode: int, - __pipe_mode: int, - __max_instances: int, - __out_buffer_size: int, - __in_buffer_size: int, - __default_timeout: int, - __security_attributes: int, -) -> int: ... -def CreatePipe(__pipe_attrs: Any, __size: int) -> Tuple[int, int]: ... -def CreateProcess( - __application_name: str | None, - __command_line: str | None, - __proc_attrs: Any, - __thread_attrs: Any, - __inherit_handles: bool, - __creation_flags: int, - __env_mapping: dict[str, str], - __current_directory: str | None, - __startup_info: Any, -) -> Tuple[int, int, int, int]: ... -def DuplicateHandle( - __source_process_handle: int, - __source_handle: int, - __target_process_handle: int, - __desired_access: int, - __inherit_handle: bool, - __options: int = ..., -) -> int: ... -def ExitProcess(__ExitCode: int) -> NoReturn: ... + ERROR_ALREADY_EXISTS: Literal[183] + ERROR_BROKEN_PIPE: Literal[109] + ERROR_IO_PENDING: Literal[997] + ERROR_MORE_DATA: Literal[234] + ERROR_NETNAME_DELETED: Literal[64] + ERROR_NO_DATA: Literal[232] + ERROR_NO_SYSTEM_RESOURCES: Literal[1450] + ERROR_OPERATION_ABORTED: Literal[995] + ERROR_PIPE_BUSY: Literal[231] + ERROR_PIPE_CONNECTED: Literal[535] + ERROR_SEM_TIMEOUT: Literal[121] -if sys.version_info >= (3, 7): - def GetACP() -> int: ... - def GetFileType(handle: int) -> int: ... + FILE_FLAG_FIRST_PIPE_INSTANCE: Literal[524288] + FILE_FLAG_OVERLAPPED: Literal[1073741824] + FILE_GENERIC_READ: Literal[1179785] + FILE_GENERIC_WRITE: Literal[1179926] + if sys.version_info >= (3, 8): + FILE_MAP_ALL_ACCESS: Literal[983071] + FILE_MAP_COPY: Literal[1] + FILE_MAP_EXECUTE: Literal[32] + FILE_MAP_READ: Literal[4] + FILE_MAP_WRITE: Literal[2] + if sys.version_info >= (3, 7): + FILE_TYPE_CHAR: Literal[2] + FILE_TYPE_DISK: Literal[1] + FILE_TYPE_PIPE: Literal[3] + FILE_TYPE_REMOTE: Literal[32768] + FILE_TYPE_UNKNOWN: Literal[0] -def GetCurrentProcess() -> int: ... -def GetExitCodeProcess(__process: int) -> int: ... -def GetLastError() -> int: ... -def GetModuleFileName(__module_handle: int) -> str: ... -def GetStdHandle(__std_handle: int) -> int: ... -def GetVersion() -> int: ... -def OpenProcess(__desired_access: int, __inherit_handle: bool, __process_id: int) -> int: ... -def PeekNamedPipe(__handle: int, __size: int = ...) -> Tuple[int, int] | Tuple[bytes, int, int]: ... -@overload -def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> Tuple[Overlapped, int]: ... -@overload -def ReadFile(handle: int, size: int, overlapped: Literal[False] = ...) -> Tuple[bytes, int]: ... -@overload -def ReadFile(handle: int, size: int, overlapped: int | bool) -> Tuple[Any, int]: ... -def SetNamedPipeHandleState( - __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None -) -> None: ... -def TerminateProcess(__handle: int, __exit_code: int) -> None: ... -def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = ...) -> int: ... -def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... -def WaitNamedPipe(__name: str, __timeout: int) -> None: ... -@overload -def WriteFile(handle: int, buffer: bytes, overlapped: Literal[True]) -> Tuple[Overlapped, int]: ... -@overload -def WriteFile(handle: int, buffer: bytes, overlapped: Literal[False] = ...) -> Tuple[int, int]: ... -@overload -def WriteFile(handle: int, buffer: bytes, overlapped: int | bool) -> Tuple[Any, int]: ... + GENERIC_READ: Literal[2147483648] + GENERIC_WRITE: Literal[1073741824] + if sys.version_info >= (3, 7): + HIGH_PRIORITY_CLASS: Literal[128] + INFINITE: Literal[4294967295] + if sys.version_info >= (3, 8): + INVALID_HANDLE_VALUE: int # very large number + if sys.version_info >= (3, 7): + IDLE_PRIORITY_CLASS: Literal[64] + NORMAL_PRIORITY_CLASS: Literal[32] + REALTIME_PRIORITY_CLASS: Literal[256] + NMPWAIT_WAIT_FOREVER: Literal[4294967295] -class Overlapped: - event: int - def GetOverlappedResult(self, __wait: bool) -> Tuple[int, int]: ... - def cancel(self) -> None: ... - def getbuffer(self) -> bytes | None: ... + if sys.version_info >= (3, 8): + MEM_COMMIT: Literal[4096] + MEM_FREE: Literal[65536] + MEM_IMAGE: Literal[16777216] + MEM_MAPPED: Literal[262144] + MEM_PRIVATE: Literal[131072] + MEM_RESERVE: Literal[8192] + + NULL: Literal[0] + OPEN_EXISTING: Literal[3] + + PIPE_ACCESS_DUPLEX: Literal[3] + PIPE_ACCESS_INBOUND: Literal[1] + PIPE_READMODE_MESSAGE: Literal[2] + PIPE_TYPE_MESSAGE: Literal[4] + PIPE_UNLIMITED_INSTANCES: Literal[255] + PIPE_WAIT: Literal[0] + if sys.version_info >= (3, 8): + PAGE_EXECUTE: Literal[16] + PAGE_EXECUTE_READ: Literal[32] + PAGE_EXECUTE_READWRITE: Literal[64] + PAGE_EXECUTE_WRITECOPY: Literal[128] + PAGE_GUARD: Literal[256] + PAGE_NOACCESS: Literal[1] + PAGE_NOCACHE: Literal[512] + PAGE_READONLY: Literal[2] + PAGE_READWRITE: Literal[4] + PAGE_WRITECOMBINE: Literal[1024] + PAGE_WRITECOPY: Literal[8] + + PROCESS_ALL_ACCESS: Literal[2097151] + PROCESS_DUP_HANDLE: Literal[64] + if sys.version_info >= (3, 8): + SEC_COMMIT: Literal[134217728] + SEC_IMAGE: Literal[16777216] + SEC_LARGE_PAGES: Literal[2147483648] + SEC_NOCACHE: Literal[268435456] + SEC_RESERVE: Literal[67108864] + SEC_WRITECOMBINE: Literal[1073741824] + STARTF_USESHOWWINDOW: Literal[1] + STARTF_USESTDHANDLES: Literal[256] + STD_ERROR_HANDLE: Literal[4294967284] + STD_INPUT_HANDLE: Literal[4294967286] + STD_OUTPUT_HANDLE: Literal[4294967285] + STILL_ACTIVE: Literal[259] + SW_HIDE: Literal[0] + if sys.version_info >= (3, 8): + SYNCHRONIZE: Literal[1048576] + WAIT_ABANDONED_0: Literal[128] + WAIT_OBJECT_0: Literal[0] + WAIT_TIMEOUT: Literal[258] + def CloseHandle(__handle: int) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[True]) -> Overlapped: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: Literal[False] = ...) -> None: ... + @overload + def ConnectNamedPipe(handle: int, overlapped: bool) -> Overlapped | None: ... + def CreateFile( + __file_name: str, + __desired_access: int, + __share_mode: int, + __security_attributes: int, + __creation_disposition: int, + __flags_and_attributes: int, + __template_file: int, + ) -> int: ... + def CreateJunction(__src_path: str, __dst_path: str) -> None: ... + def CreateNamedPipe( + __name: str, + __open_mode: int, + __pipe_mode: int, + __max_instances: int, + __out_buffer_size: int, + __in_buffer_size: int, + __default_timeout: int, + __security_attributes: int, + ) -> int: ... + def CreatePipe(__pipe_attrs: Any, __size: int) -> tuple[int, int]: ... + def CreateProcess( + __application_name: str | None, + __command_line: str | None, + __proc_attrs: Any, + __thread_attrs: Any, + __inherit_handles: bool, + __creation_flags: int, + __env_mapping: dict[str, str], + __current_directory: str | None, + __startup_info: Any, + ) -> tuple[int, int, int, int]: ... + def DuplicateHandle( + __source_process_handle: int, + __source_handle: int, + __target_process_handle: int, + __desired_access: int, + __inherit_handle: bool, + __options: int = ..., + ) -> int: ... + def ExitProcess(__ExitCode: int) -> NoReturn: ... + if sys.version_info >= (3, 7): + def GetACP() -> int: ... + def GetFileType(handle: int) -> int: ... + + def GetCurrentProcess() -> int: ... + def GetExitCodeProcess(__process: int) -> int: ... + def GetLastError() -> int: ... + def GetModuleFileName(__module_handle: int) -> str: ... + def GetStdHandle(__std_handle: int) -> int: ... + def GetVersion() -> int: ... + def OpenProcess(__desired_access: int, __inherit_handle: bool, __process_id: int) -> int: ... + def PeekNamedPipe(__handle: int, __size: int = ...) -> tuple[int, int] | tuple[bytes, int, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: Literal[False] = ...) -> tuple[bytes, int]: ... + @overload + def ReadFile(handle: int, size: int, overlapped: int | bool) -> tuple[Any, int]: ... + def SetNamedPipeHandleState( + __named_pipe: int, __mode: int | None, __max_collection_count: int | None, __collect_data_timeout: int | None + ) -> None: ... + def TerminateProcess(__handle: int, __exit_code: int) -> None: ... + def WaitForMultipleObjects(__handle_seq: Sequence[int], __wait_flag: bool, __milliseconds: int = ...) -> int: ... + def WaitForSingleObject(__handle: int, __milliseconds: int) -> int: ... + def WaitNamedPipe(__name: str, __timeout: int) -> None: ... + @overload + def WriteFile(handle: int, buffer: bytes, overlapped: Literal[True]) -> tuple[Overlapped, int]: ... + @overload + def WriteFile(handle: int, buffer: bytes, overlapped: Literal[False] = ...) -> tuple[int, int]: ... + @overload + def WriteFile(handle: int, buffer: bytes, overlapped: int | bool) -> tuple[Any, int]: ... + @final + class Overlapped: + event: int + def GetOverlappedResult(self, __wait: bool) -> tuple[int, int]: ... + def cancel(self) -> None: ... + def getbuffer(self) -> bytes | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/abc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/abc.pyi index 7896e910c81f..58985067b125 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/abc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/abc.pyi @@ -1,26 +1,38 @@ +import sys from _typeshed import SupportsWrite -from typing import Any, Callable, Tuple, Type, TypeVar +from collections.abc import Callable +from typing import Any, Generic, TypeVar +from typing_extensions import Literal _T = TypeVar("_T") +_R_co = TypeVar("_R_co", covariant=True) _FuncT = TypeVar("_FuncT", bound=Callable[..., Any]) # These definitions have special processing in mypy class ABCMeta(type): __abstractmethods__: frozenset[str] - def __init__(self, name: str, bases: Tuple[type, ...], namespace: dict[str, Any]) -> None: ... + def __init__(self, name: str, bases: tuple[type, ...], namespace: dict[str, Any]) -> None: ... def __instancecheck__(cls: ABCMeta, instance: Any) -> Any: ... def __subclasscheck__(cls: ABCMeta, subclass: Any) -> Any: ... def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = ...) -> None: ... - def register(cls: ABCMeta, subclass: Type[_T]) -> Type[_T]: ... + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... def abstractmethod(funcobj: _FuncT) -> _FuncT: ... -class abstractproperty(property): ... +class abstractclassmethod(classmethod[_R_co], Generic[_R_co]): + __isabstractmethod__: Literal[True] + def __init__(self: abstractclassmethod[_R_co], callable: Callable[..., _R_co]) -> None: ... -# These two are deprecated and not supported by mypy -def abstractstaticmethod(callable: _FuncT) -> _FuncT: ... -def abstractclassmethod(callable: _FuncT) -> _FuncT: ... +class abstractstaticmethod(staticmethod[_R_co], Generic[_R_co]): + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[..., _R_co]) -> None: ... + +class abstractproperty(property): + __isabstractmethod__: Literal[True] class ABC(metaclass=ABCMeta): ... def get_cache_token() -> object: ... + +if sys.version_info >= (3, 10): + def update_abstractmethods(cls: type[_T]) -> type[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/aifc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/aifc.pyi index 7d7c6b21f341..8fc188980312 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/aifc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/aifc.pyi @@ -1,9 +1,14 @@ import sys from _typeshed import Self from types import TracebackType -from typing import IO, Any, NamedTuple, Tuple, Type, Union, overload +from typing import IO, Any, NamedTuple, Union, overload from typing_extensions import Literal +if sys.version_info >= (3, 9): + __all__ = ["Error", "open"] +else: + __all__ = ["Error", "open", "openfp"] + class Error(Exception): ... class _aifc_params(NamedTuple): @@ -15,13 +20,13 @@ class _aifc_params(NamedTuple): compname: bytes _File = Union[str, IO[bytes]] -_Marker = Tuple[int, int, bytes] +_Marker = tuple[int, int, bytes] class Aifc_read: def __init__(self, f: _File) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def initfp(self, file: IO[bytes]) -> None: ... def getfp(self) -> IO[bytes]: ... @@ -45,7 +50,7 @@ class Aifc_write: def __del__(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def initfp(self, file: IO[bytes]) -> None: ... def aiff(self) -> None: ... @@ -61,7 +66,7 @@ class Aifc_write: def setcomptype(self, comptype: bytes, compname: bytes) -> None: ... def getcomptype(self) -> bytes: ... def getcompname(self) -> bytes: ... - def setparams(self, params: Tuple[int, int, int, int, bytes, bytes]) -> None: ... + def setparams(self, params: tuple[int, int, int, int, bytes, bytes]) -> None: ... def getparams(self) -> _aifc_params: ... def setmark(self, id: int, pos: int, name: bytes) -> None: ... def getmark(self, id: int) -> _Marker: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi index 631030e94923..ad54660cc45d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/argparse.pyi @@ -1,16 +1,76 @@ import sys -from typing import IO, Any, Callable, Generator, Iterable, NoReturn, Pattern, Protocol, Sequence, Tuple, Type, TypeVar, overload +from typing import ( + IO, + Any, + Callable, + Generator, + Generic, + Iterable, + NewType, + NoReturn, + Pattern, + Protocol, + Sequence, + TypeVar, + overload, +) +from typing_extensions import Literal + +if sys.version_info >= (3, 9): + __all__ = [ + "ArgumentParser", + "ArgumentError", + "ArgumentTypeError", + "BooleanOptionalAction", + "FileType", + "HelpFormatter", + "ArgumentDefaultsHelpFormatter", + "RawDescriptionHelpFormatter", + "RawTextHelpFormatter", + "MetavarTypeHelpFormatter", + "Namespace", + "Action", + "ONE_OR_MORE", + "OPTIONAL", + "PARSER", + "REMAINDER", + "SUPPRESS", + "ZERO_OR_MORE", + ] +else: + __all__ = [ + "ArgumentParser", + "ArgumentError", + "ArgumentTypeError", + "FileType", + "HelpFormatter", + "ArgumentDefaultsHelpFormatter", + "RawDescriptionHelpFormatter", + "RawTextHelpFormatter", + "MetavarTypeHelpFormatter", + "Namespace", + "Action", + "ONE_OR_MORE", + "OPTIONAL", + "PARSER", + "REMAINDER", + "SUPPRESS", + "ZERO_OR_MORE", + ] _T = TypeVar("_T") _ActionT = TypeVar("_ActionT", bound=Action) +_ArgumentParserT = TypeVar("_ArgumentParserT", bound=ArgumentParser) _N = TypeVar("_N") -ONE_OR_MORE: str -OPTIONAL: str -PARSER: str -REMAINDER: str -SUPPRESS: str -ZERO_OR_MORE: str +ONE_OR_MORE: Literal["+"] +OPTIONAL: Literal["?"] +PARSER: Literal["A..."] +REMAINDER: Literal["..."] +_SUPPRESS_T = NewType("_SUPPRESS_T", str) +SUPPRESS: _SUPPRESS_T | str # not using Literal because argparse sometimes compares SUPPRESS with is +# the | str is there so that foo = argparse.SUPPRESS; foo = "test" checks out in mypy +ZERO_OR_MORE: Literal["*"] _UNRECOGNIZED_ARGS_ATTR: str # undocumented class ArgumentError(Exception): @@ -20,7 +80,7 @@ class ArgumentError(Exception): # undocumented class _AttributeHolder: - def _get_kwargs(self) -> list[Tuple[str, Any]]: ... + def _get_kwargs(self) -> list[tuple[str, Any]]: ... def _get_args(self) -> list[Any]: ... # undocumented @@ -46,15 +106,18 @@ class _ActionsContainer: def add_argument( self, *name_or_flags: str, - action: str | Type[Action] = ..., - nargs: int | str = ..., + action: Literal[ + "store", "store_const", "store_true", "store_false", "append", "append_const", "count", "help", "version", "extend" + ] + | type[Action] = ..., + nargs: int | Literal["?", "*", "+", "...", "A...", "==SUPPRESS=="] | _SUPPRESS_T = ..., const: Any = ..., default: Any = ..., - type: Callable[[str], _T] | Callable[[str], _T] | FileType = ..., + type: Callable[[str], _T] | FileType = ..., choices: Iterable[_T] | None = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., dest: str | None = ..., version: str = ..., **kwargs: Any, @@ -66,11 +129,11 @@ class _ActionsContainer: def _add_container_actions(self, container: _ActionsContainer) -> None: ... def _get_positional_kwargs(self, dest: str, **kwargs: Any) -> dict[str, Any]: ... def _get_optional_kwargs(self, *args: Any, **kwargs: Any) -> dict[str, Any]: ... - def _pop_action_class(self, kwargs: Any, default: Type[Action] | None = ...) -> Type[Action]: ... - def _get_handler(self) -> Callable[[Action, Iterable[Tuple[str, Action]]], Any]: ... + def _pop_action_class(self, kwargs: Any, default: type[Action] | None = ...) -> type[Action]: ... + def _get_handler(self) -> Callable[[Action, Iterable[tuple[str, Action]]], Any]: ... def _check_conflict(self, action: Action) -> None: ... - def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[Tuple[str, Action]]) -> NoReturn: ... - def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[Tuple[str, Action]]) -> None: ... + def _handle_conflict_error(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> NoReturn: ... + def _handle_conflict_resolve(self, action: Action, conflicting_actions: Iterable[tuple[str, Action]]) -> None: ... class _FormatterClass(Protocol): def __call__(self, prog: str) -> HelpFormatter: ... @@ -127,49 +190,79 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): @overload def parse_args(self, args: Sequence[str] | None = ...) -> Namespace: ... @overload - def parse_args(self, args: Sequence[str] | None, namespace: None) -> Namespace: ... # type: ignore + def parse_args(self, args: Sequence[str] | None, namespace: None) -> Namespace: ... # type: ignore[misc] @overload def parse_args(self, args: Sequence[str] | None, namespace: _N) -> _N: ... @overload - def parse_args(self, *, namespace: None) -> Namespace: ... # type: ignore + def parse_args(self, *, namespace: None) -> Namespace: ... # type: ignore[misc] @overload def parse_args(self, *, namespace: _N) -> _N: ... if sys.version_info >= (3, 7): + @overload + def add_subparsers( + self: _ArgumentParserT, + *, + title: str = ..., + description: str | None = ..., + prog: str = ..., + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = ..., + required: bool = ..., + help: str | None = ..., + metavar: str | None = ..., + ) -> _SubParsersAction[_ArgumentParserT]: ... + @overload def add_subparsers( self, *, title: str = ..., description: str | None = ..., prog: str = ..., - parser_class: Type[ArgumentParser] = ..., - action: Type[Action] = ..., + parser_class: type[_ArgumentParserT] = ..., + action: type[Action] = ..., option_string: str = ..., dest: str | None = ..., required: bool = ..., help: str | None = ..., metavar: str | None = ..., - ) -> _SubParsersAction: ... + ) -> _SubParsersAction[_ArgumentParserT]: ... else: + @overload + def add_subparsers( + self: _ArgumentParserT, + *, + title: str = ..., + description: str | None = ..., + prog: str = ..., + action: type[Action] = ..., + option_string: str = ..., + dest: str | None = ..., + help: str | None = ..., + metavar: str | None = ..., + ) -> _SubParsersAction[_ArgumentParserT]: ... + @overload def add_subparsers( self, *, title: str = ..., description: str | None = ..., prog: str = ..., - parser_class: Type[ArgumentParser] = ..., - action: Type[Action] = ..., + parser_class: type[_ArgumentParserT] = ..., + action: type[Action] = ..., option_string: str = ..., dest: str | None = ..., help: str | None = ..., metavar: str | None = ..., - ) -> _SubParsersAction: ... + ) -> _SubParsersAction[_ArgumentParserT]: ... + def print_usage(self, file: IO[str] | None = ...) -> None: ... def print_help(self, file: IO[str] | None = ...) -> None: ... def format_usage(self) -> str: ... def format_help(self) -> str: ... def parse_known_args( self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... - ) -> Tuple[Namespace, list[str]]: ... + ) -> tuple[Namespace, list[str]]: ... def convert_arg_line_to_args(self, arg_line: str) -> list[str]: ... def exit(self, status: int = ..., message: str | None = ...) -> NoReturn: ... def error(self, message: str) -> NoReturn: ... @@ -177,16 +270,16 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer): def parse_intermixed_args(self, args: Sequence[str] | None = ..., namespace: Namespace | None = ...) -> Namespace: ... def parse_known_intermixed_args( self, args: Sequence[str] | None = ..., namespace: Namespace | None = ... - ) -> Tuple[Namespace, list[str]]: ... + ) -> tuple[Namespace, list[str]]: ... # undocumented def _get_optional_actions(self) -> list[Action]: ... def _get_positional_actions(self) -> list[Action]: ... - def _parse_known_args(self, arg_strings: list[str], namespace: Namespace) -> Tuple[Namespace, list[str]]: ... + def _parse_known_args(self, arg_strings: list[str], namespace: Namespace) -> tuple[Namespace, list[str]]: ... def _read_args_from_files(self, arg_strings: list[str]) -> list[str]: ... def _match_argument(self, action: Action, arg_strings_pattern: str) -> int: ... def _match_arguments_partial(self, actions: Sequence[Action], arg_strings_pattern: str) -> list[int]: ... - def _parse_optional(self, arg_string: str) -> Tuple[Action | None, str, str | None] | None: ... - def _get_option_tuples(self, option_string: str) -> list[Tuple[Action, str, str | None]]: ... + def _parse_optional(self, arg_string: str) -> tuple[Action | None, str, str | None] | None: ... + def _get_option_tuples(self, option_string: str) -> list[tuple[Action, str, str | None]]: ... def _get_nargs_pattern(self, action: Action) -> str: ... def _get_values(self, action: Action, arg_strings: list[str]) -> Any: ... def _get_value(self, action: Action, arg_string: str) -> Any: ... @@ -207,7 +300,7 @@ class HelpFormatter: _current_section: Any _whitespace_matcher: Pattern[str] _long_break_matcher: Pattern[str] - _Section: Type[Any] # Nested class + _Section: type[Any] # Nested class def __init__(self, prog: str, indent_increment: int = ..., max_help_position: int = ..., width: int | None = ...) -> None: ... def _indent(self) -> None: ... def _dedent(self) -> None: ... @@ -229,7 +322,7 @@ class HelpFormatter: def _format_text(self, text: str) -> str: ... def _format_action(self, action: Action) -> str: ... def _format_action_invocation(self, action: Action) -> str: ... - def _metavar_formatter(self, action: Action, default_metavar: str) -> Callable[[int], Tuple[str, ...]]: ... + def _metavar_formatter(self, action: Action, default_metavar: str) -> Callable[[int], tuple[str, ...]]: ... def _format_args(self, action: Action, default_metavar: str) -> str: ... def _expand_help(self, action: Action) -> str: ... def _iter_indented_subactions(self, action: Action) -> Generator[Action, None, None]: ... @@ -254,7 +347,7 @@ class Action(_AttributeHolder): choices: Iterable[Any] | None required: bool help: str | None - metavar: str | Tuple[str, ...] | None + metavar: str | tuple[str, ...] | None def __init__( self, option_strings: Sequence[str], @@ -262,11 +355,11 @@ class Action(_AttributeHolder): nargs: int | str | None = ..., const: _T | None = ..., default: _T | str | None = ..., - type: Callable[[str], _T] | Callable[[str], _T] | FileType | None = ..., + type: Callable[[str], _T] | FileType | None = ..., choices: Iterable[_T] | None = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... def __call__( self, parser: ArgumentParser, namespace: Namespace, values: str | Sequence[Any] | None, option_string: str | None = ... @@ -281,18 +374,19 @@ if sys.version_info >= (3, 9): option_strings: Sequence[str], dest: str, default: _T | str | None = ..., - type: Callable[[str], _T] | Callable[[str], _T] | FileType | None = ..., + type: Callable[[str], _T] | FileType | None = ..., choices: Iterable[_T] | None = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... class Namespace(_AttributeHolder): def __init__(self, **kwargs: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... def __contains__(self, key: str) -> bool: ... + def __eq__(self, other: object) -> bool: ... class FileType: # undocumented @@ -330,7 +424,7 @@ class _StoreConstAction(Action): default: Any = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... # undocumented @@ -358,7 +452,7 @@ class _AppendConstAction(Action): default: Any = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... # undocumented @@ -379,36 +473,36 @@ class _VersionAction(Action): ) -> None: ... # undocumented -class _SubParsersAction(Action): - _ChoicesPseudoAction: Type[Any] # nested class +class _SubParsersAction(Action, Generic[_ArgumentParserT]): + _ChoicesPseudoAction: type[Any] # nested class _prog_prefix: str - _parser_class: Type[ArgumentParser] - _name_parser_map: dict[str, ArgumentParser] - choices: dict[str, ArgumentParser] + _parser_class: type[_ArgumentParserT] + _name_parser_map: dict[str, _ArgumentParserT] + choices: dict[str, _ArgumentParserT] _choices_actions: list[Action] if sys.version_info >= (3, 7): def __init__( self, option_strings: Sequence[str], prog: str, - parser_class: Type[ArgumentParser], + parser_class: type[_ArgumentParserT], dest: str = ..., required: bool = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... else: def __init__( self, option_strings: Sequence[str], prog: str, - parser_class: Type[ArgumentParser], + parser_class: type[_ArgumentParserT], dest: str = ..., help: str | None = ..., - metavar: str | Tuple[str, ...] | None = ..., + metavar: str | tuple[str, ...] | None = ..., ) -> None: ... # TODO: Type keyword args properly. - def add_parser(self, name: str, **kwargs: Any) -> ArgumentParser: ... + def add_parser(self, name: str, **kwargs: Any) -> _ArgumentParserT: ... def _get_subactions(self) -> list[Action]: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/array.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/array.pyi index c32136d559bf..dcd1429bf1a6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/array.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/array.pyi @@ -1,6 +1,7 @@ import sys -from typing import Any, BinaryIO, Generic, Iterable, MutableSequence, Tuple, TypeVar, Union, overload -from typing_extensions import Literal +from _typeshed import Self +from typing import Any, BinaryIO, Generic, Iterable, MutableSequence, TypeVar, Union, overload +from typing_extensions import Literal, SupportsIndex _IntTypeCode = Literal["b", "B", "h", "H", "i", "I", "l", "L", "q", "Q"] _FloatTypeCode = Literal["f", "d"] @@ -15,17 +16,17 @@ class array(MutableSequence[_T], Generic[_T]): typecode: _TypeCode itemsize: int @overload - def __init__(self: array[int], typecode: _IntTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[int], __typecode: _IntTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... @overload - def __init__(self: array[float], typecode: _FloatTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[float], __typecode: _FloatTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... @overload - def __init__(self: array[str], typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... + def __init__(self: array[str], __typecode: _UnicodeTypeCode, __initializer: bytes | Iterable[_T] = ...) -> None: ... @overload def __init__(self, typecode: str, __initializer: bytes | Iterable[_T] = ...) -> None: ... def append(self, __v: _T) -> None: ... - def buffer_info(self) -> Tuple[int, int]: ... + def buffer_info(self) -> tuple[int, int]: ... def byteswap(self) -> None: ... - def count(self, __v: Any) -> int: ... + def count(self, __v: _T) -> int: ... def extend(self, __bb: Iterable[_T]) -> None: ... def frombytes(self, __buffer: bytes) -> None: ... def fromfile(self, __f: BinaryIO, __n: int) -> None: ... @@ -34,10 +35,11 @@ class array(MutableSequence[_T], Generic[_T]): if sys.version_info >= (3, 10): def index(self, __v: _T, __start: int = ..., __stop: int = ...) -> int: ... else: - def index(self, __v: _T) -> int: ... # type: ignore # Overrides Sequence + def index(self, __v: _T) -> int: ... # type: ignore[override] + def insert(self, __i: int, __v: _T) -> None: ... def pop(self, __i: int = ...) -> _T: ... - def remove(self, __v: Any) -> None: ... + def remove(self, __v: _T) -> None: ... def reverse(self) -> None: ... def tobytes(self) -> bytes: ... def tofile(self, __f: BinaryIO) -> None: ... @@ -46,24 +48,28 @@ class array(MutableSequence[_T], Generic[_T]): if sys.version_info < (3, 9): def fromstring(self, __buffer: bytes) -> None: ... def tostring(self) -> bytes: ... + + def __contains__(self, __key: object) -> bool: ... def __len__(self) -> int: ... @overload - def __getitem__(self, i: int) -> _T: ... + def __getitem__(self, __i: SupportsIndex) -> _T: ... @overload - def __getitem__(self, s: slice) -> array[_T]: ... - @overload # type: ignore # Overrides MutableSequence - def __setitem__(self, i: int, o: _T) -> None: ... + def __getitem__(self, __s: slice) -> array[_T]: ... + @overload # type: ignore[override] + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... @overload - def __setitem__(self, s: slice, o: array[_T]) -> None: ... - def __delitem__(self, i: int | slice) -> None: ... - def __add__(self, x: array[_T]) -> array[_T]: ... - def __ge__(self, other: array[_T]) -> bool: ... - def __gt__(self, other: array[_T]) -> bool: ... - def __iadd__(self, x: array[_T]) -> array[_T]: ... # type: ignore # Overrides MutableSequence - def __imul__(self, n: int) -> array[_T]: ... - def __le__(self, other: array[_T]) -> bool: ... - def __lt__(self, other: array[_T]) -> bool: ... - def __mul__(self, n: int) -> array[_T]: ... - def __rmul__(self, n: int) -> array[_T]: ... + def __setitem__(self, __s: slice, __o: array[_T]) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __x: array[_T]) -> array[_T]: ... + def __ge__(self, __other: array[_T]) -> bool: ... + def __gt__(self, __other: array[_T]) -> bool: ... + def __iadd__(self: Self, __x: array[_T]) -> Self: ... # type: ignore[override] + def __imul__(self: Self, __n: int) -> Self: ... + def __le__(self, __other: array[_T]) -> bool: ... + def __lt__(self, __other: array[_T]) -> bool: ... + def __mul__(self, __n: int) -> array[_T]: ... + def __rmul__(self, __n: int) -> array[_T]: ... + def __copy__(self) -> array[_T]: ... + def __deepcopy__(self, __unused: Any) -> array[_T]: ... ArrayType = array diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ast.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ast.pyi index 1649e4ab41be..5a86d6888b2f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ast.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ast.pyi @@ -8,22 +8,24 @@ # sys. import sys import typing as _typing +from _ast import * from typing import Any, Iterator, TypeVar, overload from typing_extensions import Literal -from _ast import * # type: ignore - if sys.version_info >= (3, 8): class Num(Constant): value: complex + class Str(Constant): value: str # Aliases for value, for backwards compatibility s: str + class Bytes(Constant): value: bytes # Aliases for value, for backwards compatibility s: bytes + class NameConstant(Constant): ... class Ellipsis(Constant): ... @@ -89,6 +91,7 @@ class NodeVisitor: def visit_Constant(self, node: Constant) -> Any: ... if sys.version_info >= (3, 8): def visit_NamedExpr(self, node: NamedExpr) -> Any: ... + def visit_Attribute(self, node: Attribute) -> Any: ... def visit_Subscript(self, node: Subscript) -> Any: ... def visit_Starred(self, node: Starred) -> Any: ... @@ -166,6 +169,57 @@ if sys.version_info >= (3, 8): feature_version: None | int | _typing.Tuple[int, int] = ..., ) -> Module: ... @overload + def parse( + source: str | bytes, + filename: str | bytes, + mode: Literal["eval"], + *, + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> Expression: ... + @overload + def parse( + source: str | bytes, + filename: str | bytes, + mode: Literal["func_type"], + *, + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> FunctionType: ... + @overload + def parse( + source: str | bytes, + filename: str | bytes, + mode: Literal["single"], + *, + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> Interactive: ... + @overload + def parse( + source: str | bytes, + *, + mode: Literal["eval"], + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> Expression: ... + @overload + def parse( + source: str | bytes, + *, + mode: Literal["func_type"], + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> FunctionType: ... + @overload + def parse( + source: str | bytes, + *, + mode: Literal["single"], + type_comments: bool = ..., + feature_version: None | int | _typing.Tuple[int, int] = ..., + ) -> Interactive: ... + @overload def parse( source: str | bytes, filename: str | bytes = ..., @@ -179,6 +233,14 @@ else: @overload def parse(source: str | bytes, filename: str | bytes = ..., mode: Literal["exec"] = ...) -> Module: ... @overload + def parse(source: str | bytes, filename: str | bytes, mode: Literal["eval"]) -> Expression: ... + @overload + def parse(source: str | bytes, filename: str | bytes, mode: Literal["single"]) -> Interactive: ... + @overload + def parse(source: str | bytes, *, mode: Literal["eval"]) -> Expression: ... + @overload + def parse(source: str | bytes, *, mode: Literal["single"]) -> Interactive: ... + @overload def parse(source: str | bytes, filename: str | bytes = ..., mode: str = ...) -> AST: ... if sys.version_info >= (3, 9): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/__init__.pyi index 42e7aa9ba6d8..21eb4d9ef3bf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/__init__.pyi @@ -1,8 +1,7 @@ import sys -from typing import Type from .base_events import BaseEventLoop as BaseEventLoop -from .coroutines import coroutine as coroutine, iscoroutine as iscoroutine, iscoroutinefunction as iscoroutinefunction +from .coroutines import iscoroutine as iscoroutine, iscoroutinefunction as iscoroutinefunction from .events import ( AbstractEventLoop as AbstractEventLoop, AbstractEventLoopPolicy as AbstractEventLoopPolicy, @@ -71,39 +70,51 @@ from .transports import ( WriteTransport as WriteTransport, ) -if sys.version_info >= (3, 7): - from .events import get_running_loop as get_running_loop +if sys.version_info < (3, 11): + from .coroutines import coroutine as coroutine + +if sys.version_info >= (3, 9): + from .threads import to_thread as to_thread + if sys.version_info >= (3, 8): from .exceptions import ( CancelledError as CancelledError, IncompleteReadError as IncompleteReadError, InvalidStateError as InvalidStateError, LimitOverrunError as LimitOverrunError, - SendfileNotAvailableError as SendfileNotAvailableError, TimeoutError as TimeoutError, ) else: - if sys.version_info >= (3, 7): - from .events import SendfileNotAvailableError as SendfileNotAvailableError from .futures import CancelledError as CancelledError, InvalidStateError as InvalidStateError, TimeoutError as TimeoutError from .streams import IncompleteReadError as IncompleteReadError, LimitOverrunError as LimitOverrunError -if sys.version_info >= (3, 7): - from .protocols import BufferedProtocol as BufferedProtocol +if sys.version_info >= (3, 8): + from .exceptions import SendfileNotAvailableError as SendfileNotAvailableError +elif sys.version_info >= (3, 7): + from .events import SendfileNotAvailableError as SendfileNotAvailableError if sys.version_info >= (3, 7): + from .events import get_running_loop as get_running_loop + from .protocols import BufferedProtocol as BufferedProtocol from .runners import run as run + from .tasks import ( + _enter_task as _enter_task, + _leave_task as _leave_task, + _register_task as _register_task, + _unregister_task as _unregister_task, + all_tasks as all_tasks, + create_task as create_task, + current_task as current_task, + ) -if sys.version_info >= (3, 7): - from .tasks import all_tasks as all_tasks, create_task as create_task, current_task as current_task -if sys.version_info >= (3, 9): - from .threads import to_thread as to_thread +if sys.version_info >= (3, 11): + from .taskgroups import TaskGroup as TaskGroup + +DefaultEventLoopPolicy: type[AbstractEventLoopPolicy] -DefaultEventLoopPolicy: Type[AbstractEventLoopPolicy] if sys.platform == "win32": from .windows_events import * - -if sys.platform != "win32": +else: from .streams import open_unix_connection as open_unix_connection, start_unix_server as start_unix_server from .unix_events import ( AbstractChildWatcher as AbstractChildWatcher, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_events.pyi index 349050258a16..02aff1ac23f7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_events.pyi @@ -1,26 +1,29 @@ import ssl import sys from _typeshed import FileDescriptorLike -from abc import ABCMeta from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle from asyncio.futures import Future from asyncio.protocols import BaseProtocol from asyncio.tasks import Task -from asyncio.transports import BaseTransport +from asyncio.transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport from collections.abc import Iterable from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket -from typing import IO, Any, Awaitable, Callable, Dict, Generator, Sequence, Tuple, TypeVar, Union, overload +from typing import IO, Any, Awaitable, Callable, Coroutine, Generator, Sequence, TypeVar, Union, overload from typing_extensions import Literal if sys.version_info >= (3, 7): from contextvars import Context + __all__ = ("BaseEventLoop",) +else: + __all__ = ["BaseEventLoop"] + _T = TypeVar("_T") -_Context = Dict[str, Any] +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context = dict[str, Any] _ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] _ProtocolFactory = Callable[[], BaseProtocol] _SSLContext = Union[bool, None, ssl.SSLContext] -_TransProtPair = Tuple[BaseTransport, BaseProtocol] class Server(AbstractServer): if sys.version_info >= (3, 7): @@ -33,18 +36,24 @@ class Server(AbstractServer): backlog: int, ssl_handshake_timeout: float | None, ) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def is_serving(self) -> bool: ... + async def start_serving(self) -> None: ... + async def serve_forever(self) -> None: ... else: def __init__(self, loop: AbstractEventLoop, sockets: list[socket]) -> None: ... if sys.version_info >= (3, 8): @property - def sockets(self) -> Tuple[socket, ...]: ... + def sockets(self) -> tuple[socket, ...]: ... elif sys.version_info >= (3, 7): @property def sockets(self) -> list[socket]: ... else: sockets: list[socket] | None + def close(self) -> None: ... + async def wait_closed(self) -> None: ... -class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): +class BaseEventLoop(AbstractEventLoop): def run_forever(self) -> None: ... # Can't use a union, see mypy issue # 1873. @overload @@ -69,14 +78,16 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + def time(self) -> float: ... # Future methods def create_future(self) -> Future[Any]: ... # Tasks methods if sys.version_info >= (3, 8): - def create_task(self, coro: Awaitable[_T] | Generator[Any, None, _T], *, name: object = ...) -> Task[_T]: ... + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: object = ...) -> Task[_T]: ... else: - def create_task(self, coro: Awaitable[_T] | Generator[Any, None, _T]) -> Task[_T]: ... + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... + def set_task_factory(self, factory: Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]] | None) -> None: ... def get_task_factory(self) -> Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]] | None: ... # Methods for interacting with threads @@ -84,18 +95,19 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any, context: Context | None = ...) -> Handle: ... else: def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... + def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. async def getaddrinfo( self, host: str | None, port: str | int | None, *, family: int = ..., type: int = ..., proto: int = ..., flags: int = ... - ) -> list[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int] | Tuple[str, int, int, int]]]: ... - async def getnameinfo(self, sockaddr: Tuple[str, int] | Tuple[str, int, int, int], flags: int = ...) -> Tuple[str, str]: ... + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... if sys.version_info >= (3, 8): @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -104,16 +116,16 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -127,12 +139,12 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... elif sys.version_info >= (3, 7): @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -141,14 +153,14 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -160,12 +172,12 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... else: @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -174,13 +186,13 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -191,7 +203,7 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): sock: socket, local_addr: None = ..., server_hostname: str | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... if sys.version_info >= (3, 7): async def sock_sendfile( self, sock: socket, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool | None = ... @@ -232,12 +244,12 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): ) -> Server: ... async def connect_accepted_socket( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], sock: socket, *, ssl: _SSLContext = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... async def sendfile( self, transport: BaseTransport, file: IO[bytes], offset: int = ..., count: int | None = ..., *, fallback: bool = ... ) -> int: ... @@ -283,28 +295,47 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): reuse_port: bool | None = ..., ) -> Server: ... async def connect_accepted_socket( - self, protocol_factory: _ProtocolFactory, sock: socket, *, ssl: _SSLContext = ... - ) -> _TransProtPair: ... - async def create_datagram_endpoint( - self, - protocol_factory: _ProtocolFactory, - local_addr: Tuple[str, int] | None = ..., - remote_addr: Tuple[str, int] | None = ..., - *, - family: int = ..., - proto: int = ..., - flags: int = ..., - reuse_address: bool | None = ..., - reuse_port: bool | None = ..., - allow_broadcast: bool | None = ..., - sock: socket | None = ..., - ) -> _TransProtPair: ... + self, protocol_factory: Callable[[], _ProtocolT], sock: socket, *, ssl: _SSLContext = ... + ) -> tuple[BaseTransport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | None = ..., + remote_addr: tuple[str, int] | None = ..., + *, + family: int = ..., + proto: int = ..., + flags: int = ..., + reuse_port: bool | None = ..., + allow_broadcast: bool | None = ..., + sock: socket | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... + else: + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | None = ..., + remote_addr: tuple[str, int] | None = ..., + *, + family: int = ..., + proto: int = ..., + flags: int = ..., + reuse_address: bool | None = ..., + reuse_port: bool | None = ..., + allow_broadcast: bool | None = ..., + sock: socket | None = ..., + ) -> tuple[BaseTransport, _ProtocolT]: ... # Pipes and subprocesses. - async def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... - async def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... async def subprocess_shell( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, stdin: int | IO[Any] | None = ..., @@ -317,10 +348,10 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): errors: None = ..., text: Literal[False, None] = ..., **kwargs: Any, - ) -> _TransProtPair: ... + ) -> tuple[SubprocessTransport, _ProtocolT]: ... async def subprocess_exec( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, stdin: int | IO[Any] | None = ..., @@ -332,23 +363,23 @@ class BaseEventLoop(AbstractEventLoop, metaclass=ABCMeta): encoding: None = ..., errors: None = ..., **kwargs: Any, - ) -> _TransProtPair: ... + ) -> tuple[SubprocessTransport, _ProtocolT]: ... def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... - def remove_reader(self, fd: FileDescriptorLike) -> None: ... + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... - def remove_writer(self, fd: FileDescriptorLike) -> None: ... + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... # Completion based I/O methods returning Futures prior to 3.7 if sys.version_info >= (3, 7): async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... async def sock_recv_into(self, sock: socket, buf: bytearray) -> int: ... async def sock_sendall(self, sock: socket, data: bytes) -> None: ... async def sock_connect(self, sock: socket, address: _Address) -> None: ... - async def sock_accept(self, sock: socket) -> Tuple[socket, _RetAddress]: ... + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... else: def sock_recv(self, sock: socket, nbytes: int) -> Future[bytes]: ... def sock_sendall(self, sock: socket, data: bytes) -> Future[None]: ... def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... - def sock_accept(self, sock: socket) -> Future[Tuple[socket, _RetAddress]]: ... + def sock_accept(self, sock: socket) -> Future[tuple[socket, _RetAddress]]: ... # Signal handling. def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_signal_handler(self, sig: int) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_futures.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_futures.pyi index 1c5f03e8ea72..1b7fe4671ca8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_futures.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_futures.pyi @@ -1,5 +1,5 @@ import sys -from typing import Any, Callable, Sequence, Tuple +from typing import Any, Callable, Sequence from typing_extensions import Literal if sys.version_info >= (3, 7): @@ -7,14 +7,22 @@ if sys.version_info >= (3, 7): from . import futures +if sys.version_info >= (3, 7): + __all__ = () +else: + __all__: list[str] = [] + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +from .futures import isfuture as isfuture + _PENDING: Literal["PENDING"] # undocumented _CANCELLED: Literal["CANCELLED"] # undocumented _FINISHED: Literal["FINISHED"] # undocumented -def isfuture(obj: object) -> bool: ... - if sys.version_info >= (3, 7): - def _format_callbacks(cb: Sequence[Tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented + def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented else: def _format_callbacks(cb: Sequence[Callable[[futures.Future[Any]], None]]) -> str: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_subprocess.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_subprocess.pyi index 6165e0bb88d0..94c7c01dd1bc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_subprocess.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/base_subprocess.pyi @@ -1,5 +1,6 @@ import subprocess -from typing import IO, Any, Callable, Deque, Optional, Sequence, Tuple, Union +from collections import deque +from typing import IO, Any, Callable, Optional, Sequence, Union from . import events, futures, protocols, transports @@ -14,7 +15,7 @@ class BaseSubprocessTransport(transports.SubprocessTransport): _pid: int | None # undocumented _returncode: int | None # undocumented _exit_waiters: list[futures.Future[Any]] # undocumented - _pending_calls: Deque[Tuple[Callable[..., Any], Tuple[Any, ...]]] # undocumented + _pending_calls: deque[tuple[Callable[..., Any], tuple[Any, ...]]] # undocumented _pipes: dict[int, _File] # undocumented _finished: bool # undocumented def __init__( @@ -45,11 +46,11 @@ class BaseSubprocessTransport(transports.SubprocessTransport): def get_protocol(self) -> protocols.BaseProtocol: ... def is_closing(self) -> bool: ... def close(self) -> None: ... - def get_pid(self) -> int | None: ... # type: ignore + def get_pid(self) -> int | None: ... # type: ignore[override] def get_returncode(self) -> int | None: ... - def get_pipe_transport(self, fd: int) -> _File: ... # type: ignore + def get_pipe_transport(self, fd: int) -> _File: ... # type: ignore[override] def _check_proc(self) -> None: ... # undocumented - def send_signal(self, signal: int) -> None: ... # type: ignore + def send_signal(self, signal: int) -> None: ... # type: ignore[override] def terminate(self) -> None: ... def kill(self) -> None: ... async def _connect_pipes(self, waiter: futures.Future[Any] | None) -> None: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/compat.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/compat.pyi index 1beeea9d6c3f..f6f1bbca7faf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/compat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/compat.pyi @@ -1,7 +1,5 @@ -import sys +PY34: bool +PY35: bool +PY352: bool -if sys.version_info < (3, 7): - PY34: bool - PY35: bool - PY352: bool - def flatten_list_bytes(list_of_data: list[bytes]) -> bytes: ... +def flatten_list_bytes(list_of_data: list[bytes]) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/constants.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/constants.pyi index 2010fe9123ae..230cf4faf483 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/constants.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/constants.pyi @@ -1,12 +1,13 @@ import enum import sys +from typing_extensions import Literal -LOG_THRESHOLD_FOR_CONNLOST_WRITES: int -ACCEPT_RETRY_DELAY: int -DEBUG_STACK_DEPTH: int +LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5] +ACCEPT_RETRY_DELAY: Literal[1] +DEBUG_STACK_DEPTH: Literal[10] if sys.version_info >= (3, 7): SSL_HANDSHAKE_TIMEOUT: float - SENDFILE_FALLBACK_READBUFFER_SIZE: int + SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144] class _SendfileMode(enum.Enum): UNSUPPORTED: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi index e514b884c201..6d4d507c6a4c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/coroutines.pyi @@ -1,7 +1,27 @@ -from typing import Any, Callable, TypeVar +import sys +import types +from collections.abc import Coroutine +from typing import Any +from typing_extensions import TypeGuard -_F = TypeVar("_F", bound=Callable[..., Any]) +if sys.version_info >= (3, 11): + __all__ = ("iscoroutinefunction", "iscoroutine") +elif sys.version_info >= (3, 7): + __all__ = ("coroutine", "iscoroutinefunction", "iscoroutine") +else: + __all__ = ["coroutine", "iscoroutinefunction", "iscoroutine"] + +if sys.version_info < (3, 11): + from collections.abc import Callable + from typing import TypeVar + + _F = TypeVar("_F", bound=Callable[..., Any]) + def coroutine(func: _F) -> _F: ... -def coroutine(func: _F) -> _F: ... def iscoroutinefunction(func: object) -> bool: ... -def iscoroutine(obj: object) -> bool: ... + +if sys.version_info >= (3, 8): + def iscoroutine(obj: object) -> TypeGuard[Coroutine[Any, Any, Any]]: ... + +else: + def iscoroutine(obj: object) -> TypeGuard[types.GeneratorType[Any, Any, Any] | Coroutine[Any, Any, Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/events.pyi index 9b77a915e5c9..37079dc091e5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/events.pyi @@ -3,28 +3,85 @@ import sys from _typeshed import FileDescriptorLike, Self from abc import ABCMeta, abstractmethod from socket import AddressFamily, SocketKind, _Address, _RetAddress, socket -from typing import IO, Any, Awaitable, Callable, Dict, Generator, Sequence, Tuple, TypeVar, Union, overload +from typing import IO, Any, Awaitable, Callable, Coroutine, Generator, Sequence, TypeVar, Union, overload from typing_extensions import Literal from .base_events import Server from .futures import Future from .protocols import BaseProtocol from .tasks import Task -from .transports import BaseTransport +from .transports import BaseTransport, ReadTransport, SubprocessTransport, WriteTransport from .unix_events import AbstractChildWatcher if sys.version_info >= (3, 7): from contextvars import Context +if sys.version_info >= (3, 8): + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +elif sys.version_info >= (3, 7): + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "SendfileNotAvailableError", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +else: + __all__ = [ + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "_get_running_loop", + ] + _T = TypeVar("_T") -_Context = Dict[str, Any] +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context = dict[str, Any] _ExceptionHandler = Callable[[AbstractEventLoop, _Context], Any] _ProtocolFactory = Callable[[], BaseProtocol] _SSLContext = Union[bool, None, ssl.SSLContext] -_TransProtPair = Tuple[BaseTransport, BaseProtocol] class Handle: - _cancelled = False + _cancelled: bool _args: Sequence[Any] if sys.version_info >= (3, 7): def __init__( @@ -32,7 +89,7 @@ class Handle: ) -> None: ... else: def __init__(self, callback: Callable[..., Any], args: Sequence[Any], loop: AbstractEventLoop) -> None: ... - def __repr__(self) -> str: ... + def cancel(self) -> None: ... def _run(self) -> None: ... if sys.version_info >= (3, 7): @@ -50,22 +107,36 @@ class TimerHandle(Handle): ) -> None: ... else: def __init__(self, when: float, callback: Callable[..., Any], args: Sequence[Any], loop: AbstractEventLoop) -> None: ... + def __hash__(self) -> int: ... if sys.version_info >= (3, 7): def when(self) -> float: ... + def __lt__(self, other: TimerHandle) -> bool: ... + def __le__(self, other: TimerHandle) -> bool: ... + def __gt__(self, other: TimerHandle) -> bool: ... + def __ge__(self, other: TimerHandle) -> bool: ... + def __eq__(self, other: object) -> bool: ... + class AbstractServer: + @abstractmethod def close(self) -> None: ... if sys.version_info >= (3, 7): async def __aenter__(self: Self) -> Self: ... async def __aexit__(self, *exc: Any) -> None: ... + @abstractmethod def get_loop(self) -> AbstractEventLoop: ... + @abstractmethod def is_serving(self) -> bool: ... + @abstractmethod async def start_serving(self) -> None: ... + @abstractmethod async def serve_forever(self) -> None: ... + + @abstractmethod async def wait_closed(self) -> None: ... -class AbstractEventLoop(metaclass=ABCMeta): +class AbstractEventLoop: slow_callback_duration: float @abstractmethod def run_forever(self) -> None: ... @@ -87,12 +158,25 @@ class AbstractEventLoop(metaclass=ABCMeta): @abstractmethod async def shutdown_asyncgens(self) -> None: ... # Methods scheduling callbacks. All these return Handles. - @abstractmethod - def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... - @abstractmethod - def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... - @abstractmethod - def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon(self, callback: Callable[..., Any], *args: Any, context: Context | None = ...) -> Handle: ... + @abstractmethod + def call_later( + self, delay: float, callback: Callable[..., Any], *args: Any, context: Context | None = ... + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, when: float, callback: Callable[..., Any], *args: Any, context: Context | None = ... + ) -> TimerHandle: ... + else: + @abstractmethod + def call_soon(self, callback: Callable[..., Any], *args: Any) -> Handle: ... + @abstractmethod + def call_later(self, delay: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + @abstractmethod + def call_at(self, when: float, callback: Callable[..., Any], *args: Any) -> TimerHandle: ... + @abstractmethod def time(self) -> float: ... # Future methods @@ -101,34 +185,42 @@ class AbstractEventLoop(metaclass=ABCMeta): # Tasks methods if sys.version_info >= (3, 8): @abstractmethod - def create_task(self, coro: Awaitable[_T] | Generator[Any, None, _T], *, name: str | None = ...) -> Task[_T]: ... + def create_task( + self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T], *, name: str | None = ... + ) -> Task[_T]: ... else: @abstractmethod - def create_task(self, coro: Awaitable[_T] | Generator[Any, None, _T]) -> Task[_T]: ... + def create_task(self, coro: Coroutine[Any, Any, _T] | Generator[Any, None, _T]) -> Task[_T]: ... + @abstractmethod def set_task_factory(self, factory: Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]] | None) -> None: ... @abstractmethod def get_task_factory(self) -> Callable[[AbstractEventLoop, Generator[Any, None, _T]], Future[_T]] | None: ... # Methods for interacting with threads + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any, context: Context | None = ...) -> Handle: ... + else: + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... + @abstractmethod - def call_soon_threadsafe(self, callback: Callable[..., Any], *args: Any) -> Handle: ... - @abstractmethod - def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Awaitable[_T]: ... + def run_in_executor(self, executor: Any, func: Callable[..., _T], *args: Any) -> Future[_T]: ... @abstractmethod def set_default_executor(self, executor: Any) -> None: ... # Network I/O methods returning Futures. @abstractmethod async def getaddrinfo( self, host: str | None, port: str | int | None, *, family: int = ..., type: int = ..., proto: int = ..., flags: int = ... - ) -> list[Tuple[AddressFamily, SocketKind, int, str, Tuple[str, int] | Tuple[str, int, int, int]]]: ... + ) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... @abstractmethod - async def getnameinfo(self, sockaddr: Tuple[str, int] | Tuple[str, int, int, int], flags: int = ...) -> Tuple[str, str]: ... + async def getnameinfo(self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = ...) -> tuple[str, str]: ... if sys.version_info >= (3, 8): @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -137,17 +229,17 @@ class AbstractEventLoop(metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -161,13 +253,13 @@ class AbstractEventLoop(metaclass=ABCMeta): ssl_handshake_timeout: float | None = ..., happy_eyeballs_delay: float | None = ..., interleave: int | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... elif sys.version_info >= (3, 7): @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -176,15 +268,15 @@ class AbstractEventLoop(metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -196,13 +288,13 @@ class AbstractEventLoop(metaclass=ABCMeta): local_addr: None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... else: @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: str = ..., port: int = ..., *, @@ -211,14 +303,14 @@ class AbstractEventLoop(metaclass=ABCMeta): proto: int = ..., flags: int = ..., sock: None = ..., - local_addr: Tuple[str, int] | None = ..., + local_addr: tuple[str, int] | None = ..., server_hostname: str | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... @overload @abstractmethod async def create_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], host: None = ..., port: None = ..., *, @@ -229,7 +321,7 @@ class AbstractEventLoop(metaclass=ABCMeta): sock: socket, local_addr: None = ..., server_hostname: str | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... if sys.version_info >= (3, 7): @abstractmethod async def sock_sendfile( @@ -273,14 +365,14 @@ class AbstractEventLoop(metaclass=ABCMeta): ) -> Server: ... async def create_unix_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], path: str | None = ..., *, ssl: _SSLContext = ..., sock: socket | None = ..., server_hostname: str | None = ..., ssl_handshake_timeout: float | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -342,13 +434,13 @@ class AbstractEventLoop(metaclass=ABCMeta): ) -> Server: ... async def create_unix_connection( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], path: str, *, ssl: _SSLContext = ..., sock: socket | None = ..., server_hostname: str | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... async def create_unix_server( self, protocol_factory: _ProtocolFactory, @@ -358,12 +450,13 @@ class AbstractEventLoop(metaclass=ABCMeta): backlog: int = ..., ssl: _SSLContext = ..., ) -> Server: ... + @abstractmethod async def create_datagram_endpoint( self, - protocol_factory: _ProtocolFactory, - local_addr: Tuple[str, int] | None = ..., - remote_addr: Tuple[str, int] | None = ..., + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | None = ..., + remote_addr: tuple[str, int] | None = ..., *, family: int = ..., proto: int = ..., @@ -372,16 +465,20 @@ class AbstractEventLoop(metaclass=ABCMeta): reuse_port: bool | None = ..., allow_broadcast: bool | None = ..., sock: socket | None = ..., - ) -> _TransProtPair: ... + ) -> tuple[BaseTransport, _ProtocolT]: ... # Pipes and subprocesses. @abstractmethod - async def connect_read_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... @abstractmethod - async def connect_write_pipe(self, protocol_factory: _ProtocolFactory, pipe: Any) -> _TransProtPair: ... + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... @abstractmethod async def subprocess_shell( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], cmd: bytes | str, *, stdin: int | IO[Any] | None = ..., @@ -394,11 +491,11 @@ class AbstractEventLoop(metaclass=ABCMeta): errors: None = ..., text: Literal[False, None] = ..., **kwargs: Any, - ) -> _TransProtPair: ... + ) -> tuple[SubprocessTransport, _ProtocolT]: ... @abstractmethod async def subprocess_exec( self, - protocol_factory: _ProtocolFactory, + protocol_factory: Callable[[], _ProtocolT], program: Any, *args: Any, stdin: int | IO[Any] | None = ..., @@ -410,15 +507,15 @@ class AbstractEventLoop(metaclass=ABCMeta): encoding: None = ..., errors: None = ..., **kwargs: Any, - ) -> _TransProtPair: ... + ) -> tuple[SubprocessTransport, _ProtocolT]: ... @abstractmethod def add_reader(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod - def remove_reader(self, fd: FileDescriptorLike) -> None: ... + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... @abstractmethod def add_writer(self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any) -> None: ... @abstractmethod - def remove_writer(self, fd: FileDescriptorLike) -> None: ... + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... # Completion based I/O methods returning Futures prior to 3.7 if sys.version_info >= (3, 7): @abstractmethod @@ -430,7 +527,7 @@ class AbstractEventLoop(metaclass=ABCMeta): @abstractmethod async def sock_connect(self, sock: socket, address: _Address) -> None: ... @abstractmethod - async def sock_accept(self, sock: socket) -> Tuple[socket, _RetAddress]: ... + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... else: @abstractmethod def sock_recv(self, sock: socket, nbytes: int) -> Future[bytes]: ... @@ -439,7 +536,7 @@ class AbstractEventLoop(metaclass=ABCMeta): @abstractmethod def sock_connect(self, sock: socket, address: _Address) -> Future[None]: ... @abstractmethod - def sock_accept(self, sock: socket) -> Future[Tuple[socket, _RetAddress]]: ... + def sock_accept(self, sock: socket) -> Future[tuple[socket, _RetAddress]]: ... # Signal handling. @abstractmethod def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... @@ -463,7 +560,7 @@ class AbstractEventLoop(metaclass=ABCMeta): @abstractmethod async def shutdown_default_executor(self) -> None: ... -class AbstractEventLoopPolicy(metaclass=ABCMeta): +class AbstractEventLoopPolicy: @abstractmethod def get_event_loop(self) -> AbstractEventLoop: ... @abstractmethod diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/exceptions.pyi index 5b99966329ac..a1bc2c16ab1f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/exceptions.pyi @@ -1,14 +1,22 @@ -import sys +__all__ = ( + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", +) -if sys.version_info >= (3, 8): - class CancelledError(BaseException): ... - class TimeoutError(Exception): ... - class InvalidStateError(Exception): ... - class SendfileNotAvailableError(RuntimeError): ... - class IncompleteReadError(EOFError): - expected: int | None - partial: bytes - def __init__(self, partial: bytes, expected: int | None) -> None: ... - class LimitOverrunError(Exception): - consumed: int - def __init__(self, message: str, consumed: int) -> None: ... +class CancelledError(BaseException): ... +class TimeoutError(Exception): ... +class InvalidStateError(Exception): ... +class SendfileNotAvailableError(RuntimeError): ... + +class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/format_helpers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/format_helpers.pyi index 29cb8839716e..bf4412264ced 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/format_helpers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/format_helpers.pyi @@ -1,20 +1,18 @@ import functools -import sys import traceback from types import FrameType, FunctionType -from typing import Any, Iterable, Tuple, Union, overload +from typing import Any, Iterable, Union, overload class _HasWrapper: __wrapper__: _HasWrapper | FunctionType _FuncType = Union[FunctionType, _HasWrapper, functools.partial[Any], functools.partialmethod[Any]] -if sys.version_info >= (3, 7): - @overload - def _get_function_source(func: _FuncType) -> Tuple[str, int]: ... - @overload - def _get_function_source(func: object) -> Tuple[str, int] | None: ... - def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... - def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... - def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = ...) -> str: ... - def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> traceback.StackSummary: ... +@overload +def _get_function_source(func: _FuncType) -> tuple[str, int]: ... +@overload +def _get_function_source(func: object) -> tuple[str, int] | None: ... +def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... +def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... +def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = ...) -> str: ... +def extract_stack(f: FrameType | None = ..., limit: int | None = ...) -> traceback.StackSummary: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/futures.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/futures.pyi index 4942796b88ae..70eba2cd39ec 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/futures.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/futures.pyi @@ -1,11 +1,14 @@ import sys +from _typeshed import Self from concurrent.futures._base import Error, Future as _ConcurrentFuture -from typing import Any, Awaitable, Callable, Generator, Iterable, Tuple, TypeVar +from typing import Any, Awaitable, Callable, Generator, Iterable, TypeVar +from typing_extensions import TypeGuard from .events import AbstractEventLoop if sys.version_info < (3, 8): from concurrent.futures import CancelledError as CancelledError, TimeoutError as TimeoutError + class InvalidStateError(Error): ... if sys.version_info >= (3, 7): @@ -14,8 +17,19 @@ if sys.version_info >= (3, 7): if sys.version_info >= (3, 9): from types import GenericAlias +if sys.version_info >= (3, 8): + __all__ = ("Future", "wrap_future", "isfuture") +elif sys.version_info >= (3, 7): + __all__ = ("CancelledError", "TimeoutError", "InvalidStateError", "Future", "wrap_future", "isfuture") +else: + __all__ = ["CancelledError", "TimeoutError", "InvalidStateError", "Future", "wrap_future", "isfuture"] + _T = TypeVar("_T") -_S = TypeVar("_S") + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +def isfuture(obj: object) -> TypeGuard[Future[Any]]: ... if sys.version_info < (3, 7): class _TracebackLogger: @@ -26,33 +40,32 @@ if sys.version_info < (3, 7): def clear(self) -> None: ... def __del__(self) -> None: ... -def isfuture(obj: object) -> bool: ... - class Future(Awaitable[_T], Iterable[_T]): _state: str _exception: BaseException - _blocking = False - _log_traceback = False + _blocking: bool + _log_traceback: bool + _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... - def __repr__(self) -> str: ... def __del__(self) -> None: ... if sys.version_info >= (3, 7): def get_loop(self) -> AbstractEventLoop: ... - def _callbacks(self: _S) -> list[Tuple[Callable[[_S], Any], Context]]: ... - def add_done_callback(self: _S, __fn: Callable[[_S], Any], *, context: Context | None = ...) -> None: ... + def _callbacks(self: Self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self: Self, __fn: Callable[[Self], Any], *, context: Context | None = ...) -> None: ... else: @property - def _callbacks(self: _S) -> list[Callable[[_S], Any]]: ... - def add_done_callback(self: _S, __fn: Callable[[_S], Any]) -> None: ... + def _callbacks(self: Self) -> list[Callable[[Self], Any]]: ... + def add_done_callback(self: Self, __fn: Callable[[Self], Any]) -> None: ... if sys.version_info >= (3, 9): def cancel(self, msg: Any | None = ...) -> bool: ... else: def cancel(self) -> bool: ... + def cancelled(self) -> bool: ... def done(self) -> bool: ... def result(self) -> _T: ... def exception(self) -> BaseException | None: ... - def remove_done_callback(self: _S, __fn: Callable[[_S], Any]) -> int: ... + def remove_done_callback(self: Self, __fn: Callable[[Self], Any]) -> int: ... def set_result(self, __result: _T) -> None: ... def set_exception(self, __exception: type | BaseException) -> None: ... def __iter__(self) -> Generator[Any, None, _T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/locks.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/locks.pyi index 901232740773..d0edcbdcaeda 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/locks.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/locks.pyi @@ -1,41 +1,47 @@ import sys +from collections import deque from types import TracebackType -from typing import Any, Awaitable, Callable, Deque, Generator, Type, TypeVar +from typing import Any, Callable, Generator, TypeVar +from typing_extensions import Literal from .events import AbstractEventLoop from .futures import Future +if sys.version_info >= (3, 7): + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore") +else: + __all__ = ["Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore"] + _T = TypeVar("_T") if sys.version_info >= (3, 9): class _ContextManagerMixin: - def __init__(self, lock: Lock | Semaphore) -> None: ... - def __aenter__(self) -> Awaitable[None]: ... - def __aexit__( - self, exc_type: Type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None - ) -> Awaitable[None]: ... + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... else: class _ContextManager: def __init__(self, lock: Lock | Semaphore) -> None: ... - def __enter__(self) -> object: ... + def __enter__(self) -> None: ... def __exit__(self, *args: Any) -> None: ... + class _ContextManagerMixin: - def __init__(self, lock: Lock | Semaphore) -> None: ... # Apparently this exists to *prohibit* use as a context manager. - def __enter__(self) -> object: ... - def __exit__(self, *args: Any) -> None: ... + # def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043 + # def __exit__(self, *args: Any) -> None: ... def __iter__(self) -> Generator[Any, None, _ContextManager]: ... def __await__(self) -> Generator[Any, None, _ContextManager]: ... - def __aenter__(self) -> Awaitable[None]: ... - def __aexit__( - self, exc_type: Type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None - ) -> Awaitable[None]: ... + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... class Lock(_ContextManagerMixin): def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... def locked(self) -> bool: ... - async def acquire(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... class Event: @@ -43,24 +49,24 @@ class Event: def is_set(self) -> bool: ... def set(self) -> None: ... def clear(self) -> None: ... - async def wait(self) -> bool: ... + async def wait(self) -> Literal[True]: ... class Condition(_ContextManagerMixin): def __init__(self, lock: Lock | None = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... def locked(self) -> bool: ... - async def acquire(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... - async def wait(self) -> bool: ... + async def wait(self) -> Literal[True]: ... async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... class Semaphore(_ContextManagerMixin): _value: int - _waiters: Deque[Future[Any]] + _waiters: deque[Future[Any]] def __init__(self, value: int = ..., *, loop: AbstractEventLoop | None = ...) -> None: ... def locked(self) -> bool: ... - async def acquire(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... def release(self) -> None: ... def _wake_up_next(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/mixins.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/mixins.pyi new file mode 100644 index 000000000000..4c11865c8968 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/mixins.pyi @@ -0,0 +1,7 @@ +import threading +from typing import NoReturn + +_global_lock: threading.Lock + +class _LoopBoundMixin: + def __init__(self, *, loop: NoReturn = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/proactor_events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/proactor_events.pyi index 6c8c558e5dfa..4ffb40160420 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/proactor_events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/proactor_events.pyi @@ -1,14 +1,19 @@ import sys from socket import socket -from typing import Any, Mapping, Type -from typing_extensions import Literal, Protocol +from typing import Any, Mapping, Protocol +from typing_extensions import Literal from . import base_events, constants, events, futures, streams, transports +if sys.version_info >= (3, 7): + __all__ = ("BaseProactorEventLoop",) +else: + __all__ = ["BaseProactorEventLoop"] + if sys.version_info >= (3, 8): class _WarnCallbackProtocol(Protocol): def __call__( - self, message: str, category: Type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... ) -> None: ... class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): @@ -21,23 +26,35 @@ class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTr extra: Mapping[Any, Any] | None = ..., server: events.AbstractServer | None = ..., ) -> None: ... - def __repr__(self) -> str: ... if sys.version_info >= (3, 8): def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ... else: def __del__(self) -> None: ... + def get_write_buffer_size(self) -> int: ... class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): - def __init__( - self, - loop: events.AbstractEventLoop, - sock: socket, - protocol: streams.StreamReaderProtocol, - waiter: futures.Future[Any] | None = ..., - extra: Mapping[Any, Any] | None = ..., - server: events.AbstractServer | None = ..., - ) -> None: ... + if sys.version_info >= (3, 10): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = ..., + extra: Mapping[Any, Any] | None = ..., + server: events.AbstractServer | None = ..., + buffer_size: int = ..., + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = ..., + extra: Mapping[Any, Any] | None = ..., + server: events.AbstractServer | None = ..., + ) -> None: ... class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi index ec8131b02e3d..7b5169702dba 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/protocols.pyi @@ -1,6 +1,11 @@ import sys from asyncio import transports -from typing import Tuple +from typing import Any + +if sys.version_info >= (3, 7): + __all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") +else: + __all__ = ["BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol"] class BaseProtocol: def connection_made(self, transport: transports.BaseTransport) -> None: ... @@ -16,10 +21,15 @@ if sys.version_info >= (3, 7): class BufferedProtocol(BaseProtocol): def get_buffer(self, sizehint: int) -> bytearray: ... def buffer_updated(self, nbytes: int) -> None: ... + def eof_received(self) -> bool | None: ... class DatagramProtocol(BaseProtocol): - def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore - def datagram_received(self, data: bytes, addr: Tuple[str, int]) -> None: ... + def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] + # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. + # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. + # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. + # See https://github.com/python/typing/issues/566 + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... def error_received(self, exc: Exception) -> None: ... class SubprocessProtocol(BaseProtocol): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/queues.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/queues.pyi index aff4af727b08..93ea9d9fc6fe 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/queues.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/queues.pyi @@ -5,6 +5,11 @@ from typing import Any, Generic, TypeVar if sys.version_info >= (3, 9): from types import GenericAlias +if sys.version_info >= (3, 7): + __all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") +else: + __all__ = ["Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty"] + class QueueEmpty(Exception): ... class QueueFull(Exception): ... @@ -15,8 +20,6 @@ class Queue(Generic[_T]): def _init(self, maxsize: int) -> None: ... def _get(self) -> _T: ... def _put(self, item: _T) -> None: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... def _format(self) -> str: ... def qsize(self) -> int: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi index 3f0f42eef48b..7e799dd22fd8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/runners.pyi @@ -1,10 +1,10 @@ import sys +from typing import Awaitable, TypeVar -if sys.version_info >= (3, 7): - from typing import Awaitable, TypeVar +__all__ = ("run",) +_T = TypeVar("_T") +if sys.version_info >= (3, 8): + def run(main: Awaitable[_T], *, debug: bool | None = ...) -> _T: ... - _T = TypeVar("_T") - if sys.version_info >= (3, 8): - def run(main: Awaitable[_T], *, debug: bool | None = ...) -> _T: ... - else: - def run(main: Awaitable[_T], *, debug: bool = ...) -> _T: ... +else: + def run(main: Awaitable[_T], *, debug: bool = ...) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/selector_events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/selector_events.pyi index bcbcd2fbe4b8..698bfef351a1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/selector_events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/selector_events.pyi @@ -1,6 +1,12 @@ import selectors +import sys from . import base_events +if sys.version_info >= (3, 7): + __all__ = ("BaseSelectorEventLoop",) +else: + __all__ = ["BaseSelectorEventLoop"] + class BaseSelectorEventLoop(base_events.BaseEventLoop): def __init__(self, selector: selectors.BaseSelector | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/sslproto.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/sslproto.pyi index 9dda54cc84bb..4ecd7a11dd56 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/sslproto.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/sslproto.pyi @@ -1,6 +1,7 @@ import ssl import sys -from typing import Any, Callable, ClassVar, Deque, Tuple +from collections import deque +from typing import Any, Callable, ClassVar from typing_extensions import Literal from . import constants, events, futures, protocols, transports @@ -38,8 +39,8 @@ class _SSLPipe: def do_handshake(self, callback: Callable[[BaseException | None], None] | None = ...) -> list[bytes]: ... def shutdown(self, callback: Callable[[], None] | None = ...) -> list[bytes]: ... def feed_eof(self) -> None: ... - def feed_ssldata(self, data: bytes, only_handshake: bool = ...) -> Tuple[list[bytes], list[bytes]]: ... - def feed_appdata(self, data: bytes, offset: int = ...) -> Tuple[list[bytes], int]: ... + def feed_ssldata(self, data: bytes, only_handshake: bool = ...) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = ...) -> tuple[list[bytes], int]: ... class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): @@ -56,6 +57,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): def close(self) -> None: ... if sys.version_info >= (3, 7): def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... def resume_reading(self) -> None: ... def set_write_buffer_limits(self, high: int | None = ..., low: int | None = ...) -> None: ... @@ -63,6 +65,7 @@ class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): if sys.version_info >= (3, 7): @property def _protocol_paused(self) -> bool: ... + def write(self, data: bytes) -> None: ... def can_write_eof(self) -> Literal[False]: ... def abort(self) -> None: ... @@ -73,7 +76,7 @@ class SSLProtocol(protocols.Protocol): _server_hostname: str | None _sslcontext: ssl.SSLContext _extra: dict[str, Any] - _write_backlog: Deque[Tuple[bytes, int]] + _write_backlog: deque[tuple[bytes, int]] _write_buffer_size: int _waiter: futures.Future[Any] _loop: events.AbstractEventLoop @@ -113,6 +116,7 @@ class SSLProtocol(protocols.Protocol): ) -> None: ... if sys.version_info >= (3, 7): def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = ...) -> None: ... def connection_made(self, transport: transports.BaseTransport) -> None: ... def connection_lost(self, exc: BaseException | None) -> None: ... @@ -126,6 +130,7 @@ class SSLProtocol(protocols.Protocol): def _start_handshake(self) -> None: ... if sys.version_info >= (3, 7): def _check_handshake_timeout(self) -> None: ... + def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... def _process_write_backlog(self) -> None: ... def _fatal_error(self, exc: BaseException, message: str = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/staggered.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/staggered.pyi index 6ac405ab77fd..fc4bfad76984 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/staggered.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/staggered.pyi @@ -1,9 +1,9 @@ -import sys -from typing import Any, Awaitable, Callable, Iterable, Tuple +from typing import Any, Awaitable, Callable, Iterable from . import events -if sys.version_info >= (3, 8): - async def staggered_race( - coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = ... - ) -> Tuple[Any, int | None, list[Exception | None]]: ... +__all__ = ("staggered_race",) + +async def staggered_race( + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = ... +) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/streams.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/streams.pyi index 6598110f87f9..2b0993ee03d7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/streams.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/streams.pyi @@ -1,10 +1,69 @@ import sys -from _typeshed import StrPath -from typing import Any, AsyncIterator, Awaitable, Callable, Iterable, Optional, Tuple +from _typeshed import Self, StrPath +from typing import Any, AsyncIterator, Awaitable, Callable, Iterable, Optional from . import events, protocols, transports from .base_events import Server +if sys.platform == "win32": + if sys.version_info >= (3, 8): + __all__ = ("StreamReader", "StreamWriter", "StreamReaderProtocol", "open_connection", "start_server") + elif sys.version_info >= (3, 7): + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + ) + else: + __all__ = [ + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + ] +else: + if sys.version_info >= (3, 8): + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "open_unix_connection", + "start_unix_server", + ) + elif sys.version_info >= (3, 7): + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + "open_unix_connection", + "start_unix_server", + ) + else: + __all__ = [ + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + "open_unix_connection", + "start_unix_server", + ] + _ClientConnectedCallback = Callable[[StreamReader, StreamWriter], Optional[Awaitable[None]]] if sys.version_info < (3, 8): @@ -12,47 +71,76 @@ if sys.version_info < (3, 8): expected: int | None partial: bytes def __init__(self, partial: bytes, expected: int | None) -> None: ... + class LimitOverrunError(Exception): consumed: int def __init__(self, message: str, consumed: int) -> None: ... -async def open_connection( - host: str | None = ..., - port: int | str | None = ..., - *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., - ssl_handshake_timeout: float | None = ..., - **kwds: Any, -) -> Tuple[StreamReader, StreamWriter]: ... -async def start_server( - client_connected_cb: _ClientConnectedCallback, - host: str | None = ..., - port: int | str | None = ..., - *, - loop: events.AbstractEventLoop | None = ..., - limit: int = ..., - ssl_handshake_timeout: float | None = ..., - **kwds: Any, -) -> Server: ... +if sys.version_info >= (3, 10): + async def open_connection( + host: str | None = ..., + port: int | str | None = ..., + *, + limit: int = ..., + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | None = ..., + port: int | str | None = ..., + *, + limit: int = ..., + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> Server: ... -if sys.platform != "win32": - if sys.version_info >= (3, 7): - _PathType = StrPath - else: - _PathType = str - async def open_unix_connection( - path: _PathType | None = ..., *, loop: events.AbstractEventLoop | None = ..., limit: int = ..., **kwds: Any - ) -> Tuple[StreamReader, StreamWriter]: ... - async def start_unix_server( +else: + async def open_connection( + host: str | None = ..., + port: int | str | None = ..., + *, + loop: events.AbstractEventLoop | None = ..., + limit: int = ..., + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( client_connected_cb: _ClientConnectedCallback, - path: _PathType | None = ..., + host: str | None = ..., + port: int | str | None = ..., *, loop: events.AbstractEventLoop | None = ..., limit: int = ..., + ssl_handshake_timeout: float | None = ..., **kwds: Any, ) -> Server: ... +if sys.platform != "win32": + if sys.version_info >= (3, 7): + _PathType = StrPath + else: + _PathType = str + if sys.version_info >= (3, 10): + async def open_unix_connection( + path: _PathType | None = ..., *, limit: int = ..., **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, path: _PathType | None = ..., *, limit: int = ..., **kwds: Any + ) -> Server: ... + else: + async def open_unix_connection( + path: _PathType | None = ..., *, loop: events.AbstractEventLoop | None = ..., limit: int = ..., **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, + path: _PathType | None = ..., + *, + loop: events.AbstractEventLoop | None = ..., + limit: int = ..., + **kwds: Any, + ) -> Server: ... + class FlowControlMixin(protocols.Protocol): def __init__(self, loop: events.AbstractEventLoop | None = ...) -> None: ... @@ -86,10 +174,11 @@ class StreamWriter: if sys.version_info >= (3, 7): def is_closing(self) -> bool: ... async def wait_closed(self) -> None: ... + def get_extra_info(self, name: str, default: Any = ...) -> Any: ... async def drain(self) -> None: ... -class StreamReader: +class StreamReader(AsyncIterator[bytes]): def __init__(self, limit: int = ..., loop: events.AbstractEventLoop | None = ...) -> None: ... def exception(self) -> Exception: ... def set_exception(self, exc: Exception) -> None: ... @@ -101,5 +190,5 @@ class StreamReader: async def readuntil(self, separator: bytes = ...) -> bytes: ... async def read(self, n: int = ...) -> bytes: ... async def readexactly(self, n: int) -> bytes: ... - def __aiter__(self) -> AsyncIterator[bytes]: ... + def __aiter__(self: Self) -> Self: ... async def __anext__(self) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/subprocess.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/subprocess.pyi index 428260af5465..f2725a8992ae 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/subprocess.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/subprocess.pyi @@ -2,9 +2,14 @@ import subprocess import sys from _typeshed import StrOrBytesPath from asyncio import events, protocols, streams, transports -from typing import IO, Any, Callable, Tuple, Union +from typing import IO, Any, Callable, Union from typing_extensions import Literal +if sys.version_info >= (3, 7): + __all__ = ("create_subprocess_exec", "create_subprocess_shell") +else: + __all__ = ["create_subprocess_exec", "create_subprocess_shell"] + if sys.version_info >= (3, 8): _ExecArg = StrOrBytesPath else: @@ -38,7 +43,7 @@ class Process: def send_signal(self, signal: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... - async def communicate(self, input: bytes | None = ...) -> Tuple[bytes, bytes]: ... + async def communicate(self, input: bytes | None = ...) -> tuple[bytes, bytes]: ... if sys.version_info >= (3, 10): async def create_subprocess_shell( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/taskgroups.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/taskgroups.pyi new file mode 100644 index 000000000000..ce527e1e8158 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/taskgroups.pyi @@ -0,0 +1,15 @@ +# This only exists in 3.11+. See VERSIONS. + +from _typeshed import Self +from types import TracebackType +from typing import Any, Coroutine, Generator, TypeVar + +from .tasks import Task + +_T = TypeVar("_T") + +class TaskGroup: + def __init__(self) -> None: ... + async def __aenter__(self: Self) -> Self: ... + async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def create_task(self, coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/tasks.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/tasks.pyi index 8cedc1ef0884..9acb96b91820 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/tasks.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/tasks.pyi @@ -2,7 +2,7 @@ import concurrent.futures import sys from collections.abc import Awaitable, Generator, Iterable, Iterator from types import FrameType -from typing import Any, Generic, Optional, Set, TextIO, Tuple, TypeVar, Union, overload +from typing import Any, Coroutine, Generic, Optional, TextIO, TypeVar, Union, overload from typing_extensions import Literal from .events import AbstractEventLoop @@ -11,6 +11,44 @@ from .futures import Future if sys.version_info >= (3, 9): from types import GenericAlias +if sys.version_info >= (3, 7): + __all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", + ) +else: + __all__ = [ + "Task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + ] + _T = TypeVar("_T") _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") @@ -21,9 +59,9 @@ _FT = TypeVar("_FT", bound=Future[Any]) _FutureT = Union[Future[_T], Generator[Any, None, _T], Awaitable[_T]] _TaskYieldType = Optional[Future[object]] -FIRST_EXCEPTION: str -FIRST_COMPLETED: str -ALL_COMPLETED: str +FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED +FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION +ALL_COMPLETED = concurrent.futures.ALL_COMPLETED if sys.version_info >= (3, 10): def as_completed(fs: Iterable[_FutureT[_T]], *, timeout: float | None = ...) -> Iterator[Future[_T]]: ... @@ -34,7 +72,7 @@ else: ) -> Iterator[Future[_T]]: ... @overload -def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = ...) -> _FT: ... # type: ignore +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = ...) -> _FT: ... # type: ignore[misc] @overload def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = ...) -> Task[_T]: ... @@ -47,208 +85,210 @@ def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | No # typing PR #1550 for discussion. if sys.version_info >= (3, 10): @overload - def gather(coro_or_future1: _FutureT[_T1], *, return_exceptions: Literal[False] = ...) -> Future[Tuple[_T1]]: ... + def gather(__coro_or_future1: _FutureT[_T1], *, return_exceptions: Literal[False] = ...) -> Future[tuple[_T1]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], *, return_exceptions: Literal[False] = ... - ) -> Future[Tuple[_T1, _T2]]: ... + __coro_or_future1: _FutureT[_T1], __coro_or_future2: _FutureT[_T2], *, return_exceptions: Literal[False] = ... + ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], *, return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3]]: ... + ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], *, return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ... + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], - coro_or_future5: _FutureT[_T5], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], + __coro_or_future5: _FutureT[_T5], *, return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather( - coro_or_future1: _FutureT[Any], - coro_or_future2: _FutureT[Any], - coro_or_future3: _FutureT[Any], - coro_or_future4: _FutureT[Any], - coro_or_future5: _FutureT[Any], - coro_or_future6: _FutureT[Any], + __coro_or_future1: _FutureT[Any], + __coro_or_future2: _FutureT[Any], + __coro_or_future3: _FutureT[Any], + __coro_or_future4: _FutureT[Any], + __coro_or_future5: _FutureT[Any], + __coro_or_future6: _FutureT[Any], *coros_or_futures: _FutureT[Any], return_exceptions: bool = ..., ) -> Future[list[Any]]: ... @overload - def gather(coro_or_future1: _FutureT[_T1], *, return_exceptions: bool = ...) -> Future[Tuple[_T1 | BaseException]]: ... + def gather(__coro_or_future1: _FutureT[_T1], *, return_exceptions: bool = ...) -> Future[tuple[_T1 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], coro_or_future2: _FutureT[_T2], *, return_exceptions: bool = ... - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException]]: ... + __coro_or_future1: _FutureT[_T1], __coro_or_future2: _FutureT[_T2], *, return_exceptions: bool = ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], *, return_exceptions: bool = ..., - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], *, return_exceptions: bool = ..., - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], - coro_or_future5: _FutureT[_T5], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], + __coro_or_future5: _FutureT[_T5], *, return_exceptions: bool = ..., ) -> Future[ - Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... else: @overload def gather( - coro_or_future1: _FutureT[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ... - ) -> Future[Tuple[_T1]]: ... + __coro_or_future1: _FutureT[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ... + ) -> Future[tuple[_T1]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2]]: ... + ) -> Future[tuple[_T1, _T2]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3]]: ... + ) -> Future[tuple[_T1, _T2, _T3]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3, _T4]]: ... + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], - coro_or_future5: _FutureT[_T5], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], + __coro_or_future5: _FutureT[_T5], *, loop: AbstractEventLoop | None = ..., return_exceptions: Literal[False] = ..., - ) -> Future[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def gather( - coro_or_future1: _FutureT[Any], - coro_or_future2: _FutureT[Any], - coro_or_future3: _FutureT[Any], - coro_or_future4: _FutureT[Any], - coro_or_future5: _FutureT[Any], - coro_or_future6: _FutureT[Any], + __coro_or_future1: _FutureT[Any], + __coro_or_future2: _FutureT[Any], + __coro_or_future3: _FutureT[Any], + __coro_or_future4: _FutureT[Any], + __coro_or_future5: _FutureT[Any], + __coro_or_future6: _FutureT[Any], *coros_or_futures: _FutureT[Any], loop: AbstractEventLoop | None = ..., return_exceptions: bool = ..., ) -> Future[list[Any]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ... - ) -> Future[Tuple[_T1 | BaseException]]: ... + __coro_or_future1: _FutureT[_T1], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ... + ) -> Future[tuple[_T1 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ..., - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException]]: ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ..., - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ..., - ) -> Future[Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... @overload def gather( - coro_or_future1: _FutureT[_T1], - coro_or_future2: _FutureT[_T2], - coro_or_future3: _FutureT[_T3], - coro_or_future4: _FutureT[_T4], - coro_or_future5: _FutureT[_T5], + __coro_or_future1: _FutureT[_T1], + __coro_or_future2: _FutureT[_T2], + __coro_or_future3: _FutureT[_T3], + __coro_or_future4: _FutureT[_T4], + __coro_or_future5: _FutureT[_T5], *, loop: AbstractEventLoop | None = ..., return_exceptions: bool = ..., ) -> Future[ - Tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] ]: ... def run_coroutine_threadsafe(coro: _FutureT[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... if sys.version_info >= (3, 10): def shield(arg: _FutureT[_T]) -> Future[_T]: ... - def sleep(delay: float, result: _T = ...) -> Future[_T]: ... + async def sleep(delay: float, result: _T = ...) -> _T: ... @overload - def wait(fs: Iterable[_FT], *, timeout: float | None = ..., return_when: str = ...) -> Future[Tuple[Set[_FT], Set[_FT]]]: ... # type: ignore + async def wait(fs: Iterable[_FT], *, timeout: float | None = ..., return_when: str = ...) -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] @overload - def wait( + async def wait( fs: Iterable[Awaitable[_T]], *, timeout: float | None = ..., return_when: str = ... - ) -> Future[Tuple[Set[Task[_T]], Set[Task[_T]]]]: ... - def wait_for(fut: _FutureT[_T], timeout: float | None) -> Future[_T]: ... + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureT[_T], timeout: float | None) -> _T: ... else: def shield(arg: _FutureT[_T], *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... - def sleep(delay: float, result: _T = ..., *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... + async def sleep(delay: float, result: _T = ..., *, loop: AbstractEventLoop | None = ...) -> _T: ... @overload - def wait(fs: Iterable[_FT], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ...) -> Future[Tuple[Set[_FT], Set[_FT]]]: ... # type: ignore + async def wait( # type: ignore[misc] + fs: Iterable[_FT], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... + ) -> tuple[set[_FT], set[_FT]]: ... @overload - def wait( + async def wait( fs: Iterable[Awaitable[_T]], *, loop: AbstractEventLoop | None = ..., timeout: float | None = ..., return_when: str = ... - ) -> Future[Tuple[Set[Task[_T]], Set[Task[_T]]]]: ... - def wait_for(fut: _FutureT[_T], timeout: float | None, *, loop: AbstractEventLoop | None = ...) -> Future[_T]: ... + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureT[_T], timeout: float | None, *, loop: AbstractEventLoop | None = ...) -> _T: ... class Task(Future[_T], Generic[_T]): if sys.version_info >= (3, 8): @@ -263,31 +303,39 @@ class Task(Future[_T], Generic[_T]): def __init__( self, coro: Generator[_TaskYieldType, None, _T] | Awaitable[_T], *, loop: AbstractEventLoop = ... ) -> None: ... - def __repr__(self) -> str: ... if sys.version_info >= (3, 8): def get_coro(self) -> Generator[_TaskYieldType, None, _T] | Awaitable[_T]: ... def get_name(self) -> str: ... def set_name(self, __value: object) -> None: ... + def get_stack(self, *, limit: int | None = ...) -> list[FrameType]: ... def print_stack(self, *, limit: int | None = ..., file: TextIO | None = ...) -> None: ... if sys.version_info >= (3, 9): def cancel(self, msg: Any | None = ...) -> bool: ... else: def cancel(self) -> bool: ... + if sys.version_info >= (3, 11): + def cancelling(self) -> bool: ... + def uncancel(self) -> bool: ... if sys.version_info < (3, 9): @classmethod def current_task(cls, loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... @classmethod - def all_tasks(cls, loop: AbstractEventLoop | None = ...) -> Set[Task[Any]]: ... + def all_tasks(cls, loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... if sys.version_info < (3, 7): def _wakeup(self, fut: Future[Any]) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... if sys.version_info >= (3, 7): - def all_tasks(loop: AbstractEventLoop | None = ...) -> Set[Task[Any]]: ... + def all_tasks(loop: AbstractEventLoop | None = ...) -> set[Task[Any]]: ... if sys.version_info >= (3, 8): - def create_task(coro: Generator[_TaskYieldType, None, _T] | Awaitable[_T], *, name: str | None = ...) -> Task[_T]: ... + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T], *, name: str | None = ...) -> Task[_T]: ... else: - def create_task(coro: Generator[_TaskYieldType, None, _T] | Awaitable[_T]) -> Task[_T]: ... + def create_task(coro: Generator[Any, None, _T] | Coroutine[Any, Any, _T]) -> Task[_T]: ... + def current_task(loop: AbstractEventLoop | None = ...) -> Task[Any] | None: ... + def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... + def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... + def _register_task(task: Task[Any]) -> None: ... + def _unregister_task(task: Task[Any]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/threads.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/threads.pyi index 3f798d8ac862..ac3a5c56b222 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/threads.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/threads.pyi @@ -1,7 +1,8 @@ -import sys -from typing import Any, Callable, TypeVar +from typing import Callable, TypeVar +from typing_extensions import ParamSpec -_T = TypeVar("_T") +__all__ = ("to_thread",) +_P = ParamSpec("_P") +_R = TypeVar("_R") -if sys.version_info >= (3, 9): - async def to_thread(__func: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... +async def to_thread(__func: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/transports.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/transports.pyi index 51bf22b882b7..c24ded49cfb8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/transports.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/transports.pyi @@ -2,7 +2,12 @@ import sys from asyncio.events import AbstractEventLoop from asyncio.protocols import BaseProtocol from socket import _Address -from typing import Any, Mapping, Tuple +from typing import Any, Mapping + +if sys.version_info >= (3, 7): + __all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") +else: + __all__ = ["BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport"] class BaseTransport: def __init__(self, extra: Mapping[Any, Any] | None = ...) -> None: ... @@ -15,6 +20,7 @@ class BaseTransport: class ReadTransport(BaseTransport): if sys.version_info >= (3, 7): def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... def resume_reading(self) -> None: ... @@ -43,4 +49,4 @@ class SubprocessTransport(BaseTransport): class _FlowControlMixin(Transport): def __init__(self, extra: Mapping[Any, Any] | None = ..., loop: AbstractEventLoop | None = ...) -> None: ... - def get_write_buffer_limits(self) -> Tuple[int, int]: ... + def get_write_buffer_limits(self) -> tuple[int, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi index 16c65d5683f6..5bbdadf8c269 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/trsock.pyi @@ -1,47 +1,53 @@ import socket import sys +from builtins import type as Type # alias to avoid name clashes with property named "type" from types import TracebackType -from typing import Any, BinaryIO, Iterable, NoReturn, Tuple, Type, Union, overload +from typing import Any, BinaryIO, Iterable, NoReturn, Union, overload -if sys.version_info >= (3, 8): - # These are based in socket, maybe move them out into _typeshed.pyi or such - _Address = Union[Tuple[Any, ...], str] - _RetAddress = Any - _WriteBuffer = Union[bytearray, memoryview] - _CMSG = Tuple[int, int, bytes] - class TransportSocket: - def __init__(self, sock: socket.socket) -> None: ... - def _na(self, what: str) -> None: ... - @property - def family(self) -> int: ... - @property - def type(self) -> int: ... - @property - def proto(self) -> int: ... - def __getstate__(self) -> NoReturn: ... - def fileno(self) -> int: ... - def dup(self) -> socket.socket: ... - def get_inheritable(self) -> bool: ... - def shutdown(self, how: int) -> None: ... - @overload - def getsockopt(self, level: int, optname: int) -> int: ... - @overload - def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... - @overload - def setsockopt(self, level: int, optname: int, value: int | bytes) -> None: ... - @overload - def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... - def getpeername(self) -> _RetAddress: ... - def getsockname(self) -> _RetAddress: ... - def getsockbyname(self) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through? - def accept(self) -> Tuple[socket.socket, _RetAddress]: ... +# These are based in socket, maybe move them out into _typeshed.pyi or such +_Address = Union[tuple[Any, ...], str] +_RetAddress = Any +_WriteBuffer = Union[bytearray, memoryview] +_CMSG = tuple[int, int, bytes] + +class TransportSocket: + def __init__(self, sock: socket.socket) -> None: ... + def _na(self, what: str) -> None: ... + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + def __getstate__(self) -> NoReturn: ... + def fileno(self) -> int: ... + def dup(self) -> socket.socket: ... + def get_inheritable(self) -> bool: ... + def shutdown(self, how: int) -> None: ... + @overload + def getsockopt(self, level: int, optname: int) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + @overload + def setsockopt(self, level: int, optname: int, value: int | bytes) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + def getsockbyname(self) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through? + def settimeout(self, value: float | None) -> None: ... + def gettimeout(self) -> float | None: ... + def setblocking(self, flag: bool) -> None: ... + if sys.version_info < (3, 11): + def accept(self) -> tuple[socket.socket, _RetAddress]: ... def connect(self, address: _Address | bytes) -> None: ... def connect_ex(self, address: _Address | bytes) -> int: ... def bind(self, address: _Address | bytes) -> None: ... if sys.platform == "win32": - def ioctl(self, control: int, option: int | Tuple[int, int, int] | bool) -> None: ... + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... else: - def ioctl(self, control: int, option: int | Tuple[int, int, int] | bool) -> NoReturn: ... + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> NoReturn: ... + def listen(self, __backlog: int = ...) -> None: ... def makefile(self) -> BinaryIO: ... def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ... @@ -55,6 +61,7 @@ if sys.version_info >= (3, 8): def sendmsg_afalg( self, msg: Iterable[bytes] = ..., *, op: int, iv: Any = ..., assoclen: int = ..., flags: int = ... ) -> NoReturn: ... + def sendmsg( self, __buffers: Iterable[bytes], __ancdata: Iterable[_CMSG] = ..., __flags: int = ..., __address: _Address = ... ) -> int: ... @@ -69,17 +76,15 @@ if sys.version_info >= (3, 8): def share(self, process_id: int) -> bytes: ... else: def share(self, process_id: int) -> NoReturn: ... + def recv_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> int: ... - def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> Tuple[int, _RetAddress]: ... + def recvfrom_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> tuple[int, _RetAddress]: ... def recvmsg_into( self, __buffers: Iterable[_WriteBuffer], __ancbufsize: int = ..., __flags: int = ... - ) -> Tuple[int, list[_CMSG], int, Any]: ... - def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> Tuple[bytes, list[_CMSG], int, Any]: ... - def recvfrom(self, bufsize: int, flags: int = ...) -> Tuple[bytes, _RetAddress]: ... + ) -> tuple[int, list[_CMSG], int, Any]: ... + def recvmsg(self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ...) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvfrom(self, bufsize: int, flags: int = ...) -> tuple[bytes, _RetAddress]: ... def recv(self, bufsize: int, flags: int = ...) -> bytes: ... - def settimeout(self, value: float | None) -> None: ... - def gettimeout(self) -> float | None: ... - def setblocking(self, flag: bool) -> None: ... def __enter__(self) -> socket.socket: ... def __exit__( self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/unix_events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/unix_events.pyi index e8e57a20a765..64fecc6b536f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/unix_events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/unix_events.pyi @@ -2,60 +2,66 @@ import sys import types from _typeshed import Self from socket import socket -from typing import Any, Callable, Type +from typing import Any, Callable from .base_events import Server from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy, _ProtocolFactory, _SSLContext from .selector_events import BaseSelectorEventLoop +# This is also technically not available on Win, +# but other parts of typeshed need this defintion. +# So, it is special cased. class AbstractChildWatcher: def add_child_handler(self, pid: int, callback: Callable[..., Any], *args: Any) -> None: ... def remove_child_handler(self, pid: int) -> bool: ... def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, typ: Type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... if sys.version_info >= (3, 8): def is_active(self) -> bool: ... -class BaseChildWatcher(AbstractChildWatcher): - def __init__(self) -> None: ... +if sys.platform != "win32": + class BaseChildWatcher(AbstractChildWatcher): + def __init__(self) -> None: ... -class SafeChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... - -class FastChildWatcher(BaseChildWatcher): - def __enter__(self: Self) -> Self: ... - -class _UnixSelectorEventLoop(BaseSelectorEventLoop): - if sys.version_info < (3, 7): - async def create_unix_server( - self, - protocol_factory: _ProtocolFactory, - path: str | None = ..., - *, - sock: socket | None = ..., - backlog: int = ..., - ssl: _SSLContext = ..., - ) -> Server: ... - -class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): - def get_child_watcher(self) -> AbstractChildWatcher: ... - def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... - -SelectorEventLoop = _UnixSelectorEventLoop - -DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy - -if sys.version_info >= (3, 8): - - from typing import Protocol - class _Warn(Protocol): - def __call__( - self, message: str, category: Type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... - ) -> None: ... - class MultiLoopChildWatcher(AbstractChildWatcher): + class SafeChildWatcher(BaseChildWatcher): def __enter__(self: Self) -> Self: ... - class ThreadedChildWatcher(AbstractChildWatcher): + + class FastChildWatcher(BaseChildWatcher): def __enter__(self: Self) -> Self: ... - def __del__(self, _warn: _Warn = ...) -> None: ... + + class _UnixSelectorEventLoop(BaseSelectorEventLoop): + if sys.version_info < (3, 7): + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: str | None = ..., + *, + sock: socket | None = ..., + backlog: int = ..., + ssl: _SSLContext = ..., + ) -> Server: ... + + class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + SelectorEventLoop = _UnixSelectorEventLoop + + DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy + + if sys.version_info >= (3, 8): + + from typing import Protocol + + class _Warn(Protocol): + def __call__( + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + ) -> None: ... + + class MultiLoopChildWatcher(AbstractChildWatcher): + def __enter__(self: Self) -> Self: ... + + class ThreadedChildWatcher(AbstractChildWatcher): + def __enter__(self: Self) -> Self: ... + def __del__(self, _warn: _Warn = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_events.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_events.pyi index 19e456138394..1e4d286386c8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_events.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_events.pyi @@ -1,77 +1,83 @@ import socket import sys from _typeshed import WriteableBuffer -from typing import IO, Any, Callable, ClassVar, NoReturn, Tuple, Type +from typing import IO, Any, Callable, ClassVar, NoReturn +from typing_extensions import Literal from . import events, futures, proactor_events, selector_events, streams, windows_utils -__all__ = [ - "SelectorEventLoop", - "ProactorEventLoop", - "IocpProactor", - "DefaultEventLoopPolicy", - "WindowsSelectorEventLoopPolicy", - "WindowsProactorEventLoopPolicy", -] +if sys.platform == "win32": + if sys.version_info >= (3, 7): + __all__ = ( + "SelectorEventLoop", + "ProactorEventLoop", + "IocpProactor", + "DefaultEventLoopPolicy", + "WindowsSelectorEventLoopPolicy", + "WindowsProactorEventLoopPolicy", + ) + else: + __all__ = ["SelectorEventLoop", "ProactorEventLoop", "IocpProactor", "DefaultEventLoopPolicy"] -NULL: int -INFINITE: int -ERROR_CONNECTION_REFUSED: int -ERROR_CONNECTION_ABORTED: int -CONNECT_PIPE_INIT_DELAY: float -CONNECT_PIPE_MAX_DELAY: float + NULL: Literal[0] + INFINITE: Literal[0xFFFFFFFF] + ERROR_CONNECTION_REFUSED: Literal[1225] + ERROR_CONNECTION_ABORTED: Literal[1236] + CONNECT_PIPE_INIT_DELAY: float + CONNECT_PIPE_MAX_DELAY: float -class PipeServer: - def __init__(self, address: str) -> None: ... - def __del__(self) -> None: ... - def closed(self) -> bool: ... - def close(self) -> None: ... + class PipeServer: + def __init__(self, address: str) -> None: ... + def __del__(self) -> None: ... + def closed(self) -> bool: ... + def close(self) -> None: ... -class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... + class _WindowsSelectorEventLoop(selector_events.BaseSelectorEventLoop): ... -class ProactorEventLoop(proactor_events.BaseProactorEventLoop): - def __init__(self, proactor: IocpProactor | None = ...) -> None: ... - async def create_pipe_connection( - self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str - ) -> Tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... - async def start_serving_pipe( - self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str - ) -> list[PipeServer]: ... + class ProactorEventLoop(proactor_events.BaseProactorEventLoop): + def __init__(self, proactor: IocpProactor | None = ...) -> None: ... + async def create_pipe_connection( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> tuple[proactor_events._ProactorDuplexPipeTransport, streams.StreamReaderProtocol]: ... + async def start_serving_pipe( + self, protocol_factory: Callable[[], streams.StreamReaderProtocol], address: str + ) -> list[PipeServer]: ... -class IocpProactor: - def __init__(self, concurrency: int = ...) -> None: ... - def __repr__(self) -> str: ... - def __del__(self) -> None: ... - def set_loop(self, loop: events.AbstractEventLoop) -> None: ... - def select(self, timeout: int | None = ...) -> list[futures.Future[Any]]: ... - def recv(self, conn: socket.socket, nbytes: int, flags: int = ...) -> futures.Future[bytes]: ... - if sys.version_info >= (3, 7): - def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... - def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... - def accept(self, listener: socket.socket) -> futures.Future[Any]: ... - def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... - if sys.version_info >= (3, 7): - def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... - def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... - async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... - def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... - def close(self) -> None: ... + class IocpProactor: + def __init__(self, concurrency: int = ...) -> None: ... + def __del__(self) -> None: ... + def set_loop(self, loop: events.AbstractEventLoop) -> None: ... + def select(self, timeout: int | None = ...) -> list[futures.Future[Any]]: ... + def recv(self, conn: socket.socket, nbytes: int, flags: int = ...) -> futures.Future[bytes]: ... + if sys.version_info >= (3, 7): + def recv_into(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... -SelectorEventLoop = _WindowsSelectorEventLoop + def send(self, conn: socket.socket, buf: WriteableBuffer, flags: int = ...) -> futures.Future[Any]: ... + def accept(self, listener: socket.socket) -> futures.Future[Any]: ... + def connect(self, conn: socket.socket, address: bytes) -> futures.Future[Any]: ... + if sys.version_info >= (3, 7): + def sendfile(self, sock: socket.socket, file: IO[bytes], offset: int, count: int) -> futures.Future[Any]: ... + + def accept_pipe(self, pipe: socket.socket) -> futures.Future[Any]: ... + async def connect_pipe(self, address: bytes) -> windows_utils.PipeHandle: ... + def wait_for_handle(self, handle: windows_utils.PipeHandle, timeout: int | None = ...) -> bool: ... + def close(self) -> None: ... + SelectorEventLoop = _WindowsSelectorEventLoop + + if sys.version_info >= (3, 7): + class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[SelectorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... -if sys.version_info >= (3, 7): - class WindowsSelectorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - _loop_factory: ClassVar[Type[SelectorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... - class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - _loop_factory: ClassVar[Type[ProactorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... - DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy -else: - class _WindowsDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): - _loop_factory: ClassVar[Type[SelectorEventLoop]] - def get_child_watcher(self) -> NoReturn: ... - def set_child_watcher(self, watcher: Any) -> NoReturn: ... - DefaultEventLoopPolicy = _WindowsDefaultEventLoopPolicy + class WindowsProactorEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[ProactorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + DefaultEventLoopPolicy = WindowsSelectorEventLoopPolicy + else: + class _WindowsDefaultEventLoopPolicy(events.BaseDefaultEventLoopPolicy): + _loop_factory: ClassVar[type[SelectorEventLoop]] + def get_child_watcher(self) -> NoReturn: ... + def set_child_watcher(self, watcher: Any) -> NoReturn: ... + DefaultEventLoopPolicy = _WindowsDefaultEventLoopPolicy diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_utils.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_utils.pyi index f32ed3c80389..78eff6956519 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncio/windows_utils.pyi @@ -1,27 +1,38 @@ +import subprocess import sys from _typeshed import Self from types import TracebackType -from typing import Callable, Protocol, Tuple, Type +from typing import Callable, Protocol +from typing_extensions import Literal -class _WarnFunction(Protocol): - def __call__(self, message: str, category: Type[Warning] = ..., stacklevel: int = ..., source: PipeHandle = ...) -> None: ... +if sys.platform == "win32": + if sys.version_info >= (3, 7): + __all__ = ("pipe", "Popen", "PIPE", "PipeHandle") + else: + __all__ = ["socketpair", "pipe", "Popen", "PIPE", "PipeHandle"] + import socket -BUFSIZE: int -PIPE: int -STDOUT: int + socketpair = socket.socketpair -def pipe(*, duplex: bool = ..., overlapped: Tuple[bool, bool] = ..., bufsize: int = ...) -> Tuple[int, int]: ... + class _WarnFunction(Protocol): + def __call__( + self, message: str, category: type[Warning] = ..., stacklevel: int = ..., source: PipeHandle = ... + ) -> None: ... + BUFSIZE: Literal[8192] + PIPE = subprocess.PIPE + STDOUT = subprocess.STDOUT + def pipe(*, duplex: bool = ..., overlapped: tuple[bool, bool] = ..., bufsize: int = ...) -> tuple[int, int]: ... -class PipeHandle: - def __init__(self, handle: int) -> None: ... - def __repr__(self) -> str: ... - if sys.version_info >= (3, 8): - def __del__(self, _warn: _WarnFunction = ...) -> None: ... - else: - def __del__(self) -> None: ... - def __enter__(self: Self) -> Self: ... - def __exit__(self, t: type | None, v: BaseException | None, tb: TracebackType | None) -> None: ... - @property - def handle(self) -> int: ... - def fileno(self) -> int: ... - def close(self, *, CloseHandle: Callable[[int], None] = ...) -> None: ... + class PipeHandle: + def __init__(self, handle: int) -> None: ... + if sys.version_info >= (3, 8): + def __del__(self, _warn: _WarnFunction = ...) -> None: ... + else: + def __del__(self) -> None: ... + + def __enter__(self: Self) -> Self: ... + def __exit__(self, t: type | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + @property + def handle(self) -> int: ... + def fileno(self) -> int: ... + def close(self, *, CloseHandle: Callable[[int], None] = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/asyncore.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/asyncore.pyi index 146d91f807f2..8f77e0e45c44 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/asyncore.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/asyncore.pyi @@ -1,10 +1,10 @@ import sys from _typeshed import FileDescriptorLike from socket import socket -from typing import Any, Dict, Tuple, overload +from typing import Any, overload # cyclic dependence with asynchat -_maptype = Dict[int, Any] +_maptype = dict[int, Any] _socket = socket socket_map: _maptype # undocumented @@ -41,9 +41,9 @@ class dispatcher: def readable(self) -> bool: ... def writable(self) -> bool: ... def listen(self, num: int) -> None: ... - def bind(self, addr: Tuple[Any, ...] | str) -> None: ... - def connect(self, address: Tuple[Any, ...] | str) -> None: ... - def accept(self) -> Tuple[_socket, Any] | None: ... + def bind(self, addr: tuple[Any, ...] | str) -> None: ... + def connect(self, address: tuple[Any, ...] | str) -> None: ... + def accept(self) -> tuple[_socket, Any] | None: ... def send(self, data: bytes) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... @@ -68,7 +68,7 @@ class dispatcher_with_send(dispatcher): # incompatible signature: # def send(self, data: bytes) -> int | None: ... -def compact_traceback() -> Tuple[Tuple[str, str, str], type, type, str]: ... +def compact_traceback() -> tuple[tuple[str, str, str], type, type, str]: ... def close_all(map: _maptype | None = ..., ignore_all: bool = ...) -> None: ... if sys.platform != "win32": @@ -85,6 +85,7 @@ if sys.platform != "win32": def write(self, data: bytes, flags: int = ...) -> int: ... def close(self) -> None: ... def fileno(self) -> int: ... + class file_dispatcher(dispatcher): def __init__(self, fd: FileDescriptorLike, map: _maptype | None = ...) -> None: ... def set_file(self, fd: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/atexit.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/atexit.pyi index 9395c60678b8..ba0c7dfaf6b1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/atexit.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/atexit.pyi @@ -7,5 +7,5 @@ _P = ParamSpec("_P") def _clear() -> None: ... def _ncallbacks() -> int: ... def _run_exitfuncs() -> None: ... -def register(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... # type: ignore +def register(func: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Callable[_P, _T]: ... def unregister(func: Callable[..., Any]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/audioop.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/audioop.pyi index 71671afe487e..b08731b85b0b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/audioop.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/audioop.pyi @@ -1,12 +1,10 @@ -from typing import Tuple - -AdpcmState = Tuple[int, int] -RatecvState = Tuple[int, Tuple[Tuple[int, int], ...]] +AdpcmState = tuple[int, int] +RatecvState = tuple[int, tuple[tuple[int, int], ...]] class error(Exception): ... def add(__fragment1: bytes, __fragment2: bytes, __width: int) -> bytes: ... -def adpcm2lin(__fragment: bytes, __width: int, __state: AdpcmState | None) -> Tuple[bytes, AdpcmState]: ... +def adpcm2lin(__fragment: bytes, __width: int, __state: AdpcmState | None) -> tuple[bytes, AdpcmState]: ... def alaw2lin(__fragment: bytes, __width: int) -> bytes: ... def avg(__fragment: bytes, __width: int) -> int: ... def avgpp(__fragment: bytes, __width: int) -> int: ... @@ -14,16 +12,16 @@ def bias(__fragment: bytes, __width: int, __bias: int) -> bytes: ... def byteswap(__fragment: bytes, __width: int) -> bytes: ... def cross(__fragment: bytes, __width: int) -> int: ... def findfactor(__fragment: bytes, __reference: bytes) -> float: ... -def findfit(__fragment: bytes, __reference: bytes) -> Tuple[int, float]: ... +def findfit(__fragment: bytes, __reference: bytes) -> tuple[int, float]: ... def findmax(__fragment: bytes, __length: int) -> int: ... def getsample(__fragment: bytes, __width: int, __index: int) -> int: ... -def lin2adpcm(__fragment: bytes, __width: int, __state: AdpcmState | None) -> Tuple[bytes, AdpcmState]: ... +def lin2adpcm(__fragment: bytes, __width: int, __state: AdpcmState | None) -> tuple[bytes, AdpcmState]: ... def lin2alaw(__fragment: bytes, __width: int) -> bytes: ... def lin2lin(__fragment: bytes, __width: int, __newwidth: int) -> bytes: ... def lin2ulaw(__fragment: bytes, __width: int) -> bytes: ... def max(__fragment: bytes, __width: int) -> int: ... def maxpp(__fragment: bytes, __width: int) -> int: ... -def minmax(__fragment: bytes, __width: int) -> Tuple[int, int]: ... +def minmax(__fragment: bytes, __width: int) -> tuple[int, int]: ... def mul(__fragment: bytes, __width: int, __factor: float) -> bytes: ... def ratecv( __fragment: bytes, @@ -34,7 +32,7 @@ def ratecv( __state: RatecvState | None, __weightA: int = ..., __weightB: int = ..., -) -> Tuple[bytes, RatecvState]: ... +) -> tuple[bytes, RatecvState]: ... def reverse(__fragment: bytes, __width: int) -> bytes: ... def rms(__fragment: bytes, __width: int) -> int: ... def tomono(__fragment: bytes, __width: int, __lfactor: float, __rfactor: float) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/base64.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/base64.pyi index 8610eea17b52..70fe64292328 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/base64.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/base64.pyi @@ -1,6 +1,51 @@ import sys from typing import IO +if sys.version_info >= (3, 10): + __all__ = [ + "encode", + "decode", + "encodebytes", + "decodebytes", + "b64encode", + "b64decode", + "b32encode", + "b32decode", + "b32hexencode", + "b32hexdecode", + "b16encode", + "b16decode", + "b85encode", + "b85decode", + "a85encode", + "a85decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", + ] +else: + __all__ = [ + "encode", + "decode", + "encodebytes", + "decodebytes", + "b64encode", + "b64decode", + "b32encode", + "b32decode", + "b16encode", + "b16decode", + "b85encode", + "b85decode", + "a85encode", + "a85decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", + ] + def b64encode(s: bytes, altchars: bytes | None = ...) -> bytes: ... def b64decode(s: str | bytes, altchars: bytes | None = ..., validate: bool = ...) -> bytes: ... def standard_b64encode(s: bytes) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/bdb.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/bdb.pyi index 34eb989573aa..8f61433e0cb8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/bdb.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/bdb.pyi @@ -1,17 +1,21 @@ from types import CodeType, FrameType, TracebackType -from typing import IO, Any, Callable, Iterable, Mapping, Set, SupportsInt, Tuple, Type, TypeVar +from typing import IO, Any, Callable, Iterable, Mapping, SupportsInt, TypeVar +from typing_extensions import Literal, ParamSpec + +__all__ = ["BdbQuit", "Bdb", "Breakpoint"] _T = TypeVar("_T") +_P = ParamSpec("_P") _TraceDispatch = Callable[[FrameType, str, Any], Any] # TODO: Recursive type -_ExcInfo = Tuple[Type[BaseException], BaseException, FrameType] +_ExcInfo = tuple[type[BaseException], BaseException, FrameType] -GENERATOR_AND_COROUTINE_FLAGS: int +GENERATOR_AND_COROUTINE_FLAGS: Literal[672] class BdbQuit(Exception): ... class Bdb: - skip: Set[str] | None + skip: set[str] | None breaks: dict[str, list[int]] fncache: dict[str, str] frame_returning: FrameType | None @@ -56,17 +60,17 @@ class Bdb: def get_breaks(self, filename: str, lineno: int) -> list[Breakpoint]: ... def get_file_breaks(self, filename: str) -> list[Breakpoint]: ... def get_all_breaks(self) -> list[Breakpoint]: ... - def get_stack(self, f: FrameType | None, t: TracebackType | None) -> Tuple[list[Tuple[FrameType, int]], int]: ... + def get_stack(self, f: FrameType | None, t: TracebackType | None) -> tuple[list[tuple[FrameType, int]], int]: ... def format_stack_entry(self, frame_lineno: int, lprefix: str = ...) -> str: ... def run(self, cmd: str | CodeType, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... def runeval(self, expr: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... def runctx(self, cmd: str | CodeType, globals: dict[str, Any] | None, locals: Mapping[str, Any] | None) -> None: ... - def runcall(self, __func: Callable[..., _T], *args: Any, **kwds: Any) -> _T | None: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... class Breakpoint: next: int - bplist: dict[Tuple[str, int], list[Breakpoint]] + bplist: dict[tuple[str, int], list[Breakpoint]] bpbynumber: list[Breakpoint | None] funcname: str | None @@ -87,8 +91,7 @@ class Breakpoint: def disable(self) -> None: ... def bpprint(self, out: IO[str] | None = ...) -> None: ... def bpformat(self) -> str: ... - def __str__(self) -> str: ... def checkfuncname(b: Breakpoint, frame: FrameType) -> bool: ... -def effective(file: str, line: int, frame: FrameType) -> Tuple[Breakpoint, bool] | Tuple[None, None]: ... +def effective(file: str, line: int, frame: FrameType) -> tuple[Breakpoint, bool] | tuple[None, None]: ... def set_trace() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/binascii.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/binascii.pyi index 962f5666b284..317bb9979b92 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/binascii.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/binascii.pyi @@ -12,10 +12,13 @@ def a2b_base64(__data: str | bytes) -> bytes: ... def b2a_base64(__data: bytes, *, newline: bool = ...) -> bytes: ... def a2b_qp(data: str | bytes, header: bool = ...) -> bytes: ... def b2a_qp(data: bytes, quotetabs: bool = ..., istext: bool = ..., header: bool = ...) -> bytes: ... -def a2b_hqx(__data: str | bytes) -> bytes: ... -def rledecode_hqx(__data: bytes) -> bytes: ... -def rlecode_hqx(__data: bytes) -> bytes: ... -def b2a_hqx(__data: bytes) -> bytes: ... + +if sys.version_info < (3, 11): + def a2b_hqx(__data: str | bytes) -> bytes: ... + def rledecode_hqx(__data: bytes) -> bytes: ... + def rlecode_hqx(__data: bytes) -> bytes: ... + def b2a_hqx(__data: bytes) -> bytes: ... + def crc_hqx(__data: bytes, __crc: int) -> int: ... def crc32(__data: bytes, __crc: int = ...) -> int: ... def b2a_hex(__data: bytes) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/binhex.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/binhex.pyi index 02d094faf923..e531ccd508bf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/binhex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/binhex.pyi @@ -1,10 +1,13 @@ -from typing import IO, Any, Tuple, Union +from typing import IO, Any, Union +from typing_extensions import Literal + +__all__ = ["binhex", "hexbin", "Error"] class Error(Exception): ... -REASONABLY_LARGE: int -LINELEN: int -RUNCHAR: bytes +REASONABLY_LARGE: Literal[32768] +LINELEN: Literal[64] +RUNCHAR: Literal[b"\x90"] class FInfo: def __init__(self) -> None: ... @@ -12,7 +15,7 @@ class FInfo: Creator: str Flags: int -_FileInfoTuple = Tuple[str, FInfo, int, int] +_FileInfoTuple = tuple[str, FInfo, int, int] _FileHandleUnion = Union[str, IO[bytes]] def getfileinfo(name: str) -> _FileInfoTuple: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi index fe8b35ac5100..b327deddc7d1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/builtins.pyi @@ -1,5 +1,7 @@ import sys import types +from _ast import AST +from _collections_abc import dict_items, dict_keys, dict_values from _typeshed import ( OpenBinaryMode, OpenBinaryModeReading, @@ -13,30 +15,25 @@ from _typeshed import ( SupportsDivMod, SupportsKeysAndGetItem, SupportsLenAndGetItem, - SupportsLessThan, - SupportsLessThanT, SupportsNext, SupportsRDivMod, + SupportsRichComparison, + SupportsRichComparisonT, + SupportsTrunc, SupportsWrite, ) -from ast import AST, mod +from collections.abc import Callable from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper -from types import CodeType, MappingProxyType, TracebackType +from types import CodeType, TracebackType, _Cell from typing import ( IO, AbstractSet, Any, - AsyncIterable, - AsyncIterator, BinaryIO, ByteString, - Callable, - FrozenSet, Generic, - ItemsView, Iterable, Iterator, - KeysView, Mapping, MutableMapping, MutableSequence, @@ -45,7 +42,6 @@ from typing import ( Protocol, Reversible, Sequence, - Set, Sized, SupportsAbs, SupportsBytes, @@ -53,132 +49,144 @@ from typing import ( SupportsFloat, SupportsInt, SupportsRound, - Tuple, - Type, TypeVar, Union, - ValuesView, overload, ) -from typing_extensions import Literal, SupportsIndex, final +from typing_extensions import Literal, SupportsIndex, TypeGuard, final if sys.version_info >= (3, 9): from types import GenericAlias -class _SupportsTrunc(Protocol): - def __trunc__(self) -> int: ... - _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) _T_contra = TypeVar("_T_contra", contravariant=True) +_R_co = TypeVar("_R_co", covariant=True) _KT = TypeVar("_KT") _VT = TypeVar("_VT") -_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. -_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. _S = TypeVar("_S") _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") _T3 = TypeVar("_T3") _T4 = TypeVar("_T4") _T5 = TypeVar("_T5") -_TT = TypeVar("_TT", bound="type") -_TBE = TypeVar("_TBE", bound="BaseException") +_SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) + +class _SupportsIter(Protocol[_T_co]): + def __iter__(self) -> _T_co: ... + +class _SupportsAiter(Protocol[_T_co]): + def __aiter__(self) -> _T_co: ... class object: __doc__: str | None __dict__: dict[str, Any] - __slots__: str | Iterable[str] __module__: str __annotations__: dict[str, Any] @property - def __class__(self: _T) -> Type[_T]: ... + def __class__(self: Self) -> type[Self]: ... # Ignore errors about type mismatch between property getter and setter @__class__.setter - def __class__(self, __type: Type[object]) -> None: ... # type: ignore # noqa: F811 + def __class__(self, __type: type[object]) -> None: ... # type: ignore # noqa: F811 def __init__(self) -> None: ... - def __new__(cls: Type[_T]) -> _T: ... - def __setattr__(self, name: str, value: Any) -> None: ... - def __eq__(self, o: object) -> bool: ... - def __ne__(self, o: object) -> bool: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... + def __new__(cls: type[Self]) -> Self: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __eq__(self, __o: object) -> bool: ... + def __ne__(self, __o: object) -> bool: ... + def __str__(self) -> str: ... # noqa Y029 + def __repr__(self) -> str: ... # noqa Y029 def __hash__(self) -> int: ... - def __format__(self, format_spec: str) -> str: ... - def __getattribute__(self, name: str) -> Any: ... - def __delattr__(self, name: str) -> None: ... + def __format__(self, __format_spec: str) -> str: ... + def __getattribute__(self, __name: str) -> Any: ... + def __delattr__(self, __name: str) -> None: ... def __sizeof__(self) -> int: ... - def __reduce__(self) -> str | Tuple[Any, ...]: ... + # return type of pickle methods is rather hard to express in the current type system + # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__ + def __reduce__(self) -> str | tuple[Any, ...]: ... if sys.version_info >= (3, 8): - def __reduce_ex__(self, protocol: SupportsIndex) -> str | Tuple[Any, ...]: ... + def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ... else: - def __reduce_ex__(self, protocol: int) -> str | Tuple[Any, ...]: ... + def __reduce_ex__(self, __protocol: int) -> str | tuple[Any, ...]: ... + def __dir__(self) -> Iterable[str]: ... def __init_subclass__(cls) -> None: ... -class staticmethod(object): # Special, only valid as a decorator. - __func__: Callable[..., Any] +class staticmethod(Generic[_R_co]): + __func__: Callable[..., _R_co] __isabstractmethod__: bool - def __init__(self, f: Callable[..., Any]) -> None: ... - def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... - def __get__(self, obj: _T, type: Type[_T] | None = ...) -> Callable[..., Any]: ... + def __init__(self: staticmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __obj: _T, __type: type[_T] | None = ...) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + __wrapped__: Callable[..., _R_co] + def __call__(self, *args: Any, **kwargs: Any) -> _R_co: ... -class classmethod(object): # Special, only valid as a decorator. - __func__: Callable[..., Any] +class classmethod(Generic[_R_co]): + __func__: Callable[..., _R_co] __isabstractmethod__: bool - def __init__(self, f: Callable[..., Any]) -> None: ... - def __new__(cls: Type[_T], *args: Any, **kwargs: Any) -> _T: ... - def __get__(self, obj: _T, type: Type[_T] | None = ...) -> Callable[..., Any]: ... + def __init__(self: classmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __obj: _T, __type: type[_T] | None = ...) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + __wrapped__: Callable[..., _R_co] -class type(object): +class type: __base__: type - __bases__: Tuple[type, ...] + __bases__: tuple[type, ...] __basicsize__: int __dict__: dict[str, Any] __dictoffset__: int __flags__: int __itemsize__: int __module__: str - __mro__: Tuple[type, ...] + __mro__: tuple[type, ...] __name__: str __qualname__: str __text_signature__: str | None __weakrefoffset__: int @overload - def __init__(self, o: object) -> None: ... + def __init__(self, __o: object) -> None: ... @overload - def __init__(self, name: str, bases: Tuple[type, ...], dict: dict[str, Any], **kwds: Any) -> None: ... + def __init__(self, __name: str, __bases: tuple[type, ...], __dict: dict[str, Any], **kwds: Any) -> None: ... @overload - def __new__(cls, o: object) -> type: ... + def __new__(cls, __o: object) -> type: ... @overload - def __new__(cls: Type[_TT], name: str, bases: Tuple[type, ...], namespace: dict[str, Any], **kwds: Any) -> _TT: ... + def __new__(cls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any) -> Self: ... def __call__(self, *args: Any, **kwds: Any) -> Any: ... - def __subclasses__(self: _TT) -> list[_TT]: ... - # Note: the documentation doesnt specify what the return type is, the standard + def __subclasses__(self: Self) -> list[Self]: ... + # Note: the documentation doesn't specify what the return type is, the standard # implementation seems to be returning a list. def mro(self) -> list[type]: ... - def __instancecheck__(self, instance: Any) -> bool: ... - def __subclasscheck__(self, subclass: type) -> bool: ... + def __instancecheck__(self, __instance: Any) -> bool: ... + def __subclasscheck__(self, __subclass: type) -> bool: ... @classmethod - def __prepare__(metacls, __name: str, __bases: Tuple[type, ...], **kwds: Any) -> Mapping[str, Any]: ... + def __prepare__(metacls, __name: str, __bases: tuple[type, ...], **kwds: Any) -> Mapping[str, object]: ... if sys.version_info >= (3, 10): - def __or__(self, t: Any) -> types.UnionType: ... - def __ror__(self, t: Any) -> types.UnionType: ... + def __or__(self, __t: Any) -> types.UnionType: ... + def __ror__(self, __t: Any) -> types.UnionType: ... -class super(object): +class super: @overload - def __init__(self, t: Any, obj: Any) -> None: ... + def __init__(self, __t: Any, __obj: Any) -> None: ... @overload - def __init__(self, t: Any) -> None: ... + def __init__(self, __t: Any) -> None: ... @overload def __init__(self) -> None: ... +_PositiveInteger = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] + class int: @overload - def __new__(cls: Type[_T], x: str | bytes | SupportsInt | SupportsIndex | _SupportsTrunc = ...) -> _T: ... + def __new__(cls: type[Self], __x: str | bytes | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... @overload - def __new__(cls: Type[_T], x: str | bytes | bytearray, base: SupportsIndex) -> _T: ... + def __new__(cls: type[Self], __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... if sys.version_info >= (3, 8): - def as_integer_ratio(self) -> Tuple[int, Literal[1]]: ... + def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... + @property def real(self) -> int: ... @property @@ -191,55 +199,69 @@ class int: def bit_length(self) -> int: ... if sys.version_info >= (3, 10): def bit_count(self) -> int: ... + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = ...) -> bytes: ... @classmethod def from_bytes( - cls, bytes: Iterable[SupportsIndex] | SupportsBytes, byteorder: Literal["little", "big"], *, signed: bool = ... - ) -> int: ... # TODO buffer object argument - def __add__(self, x: int) -> int: ... - def __sub__(self, x: int) -> int: ... - def __mul__(self, x: int) -> int: ... - def __floordiv__(self, x: int) -> int: ... - def __truediv__(self, x: int) -> float: ... - def __mod__(self, x: int) -> int: ... - def __divmod__(self, x: int) -> Tuple[int, int]: ... - def __radd__(self, x: int) -> int: ... - def __rsub__(self, x: int) -> int: ... - def __rmul__(self, x: int) -> int: ... - def __rfloordiv__(self, x: int) -> int: ... - def __rtruediv__(self, x: int) -> float: ... - def __rmod__(self, x: int) -> int: ... - def __rdivmod__(self, x: int) -> Tuple[int, int]: ... - @overload - def __pow__(self, __x: Literal[2], __modulo: int | None = ...) -> int: ... - @overload - def __pow__(self, __x: int, __modulo: int | None = ...) -> Any: ... # Return type can be int or float, depending on x. - def __rpow__(self, x: int, mod: int | None = ...) -> Any: ... - def __and__(self, n: int) -> int: ... - def __or__(self, n: int) -> int: ... - def __xor__(self, n: int) -> int: ... - def __lshift__(self, n: int) -> int: ... - def __rshift__(self, n: int) -> int: ... - def __rand__(self, n: int) -> int: ... - def __ror__(self, n: int) -> int: ... - def __rxor__(self, n: int) -> int: ... - def __rlshift__(self, n: int) -> int: ... - def __rrshift__(self, n: int) -> int: ... + cls: type[Self], + bytes: Iterable[SupportsIndex] | SupportsBytes, # TODO buffer object argument + byteorder: Literal["little", "big"], + *, + signed: bool = ..., + ) -> Self: ... + def __add__(self, __x: int) -> int: ... + def __sub__(self, __x: int) -> int: ... + def __mul__(self, __x: int) -> int: ... + def __floordiv__(self, __x: int) -> int: ... + def __truediv__(self, __x: int) -> float: ... + def __mod__(self, __x: int) -> int: ... + def __divmod__(self, __x: int) -> tuple[int, int]: ... + def __radd__(self, __x: int) -> int: ... + def __rsub__(self, __x: int) -> int: ... + def __rmul__(self, __x: int) -> int: ... + def __rfloordiv__(self, __x: int) -> int: ... + def __rtruediv__(self, __x: int) -> float: ... + def __rmod__(self, __x: int) -> int: ... + def __rdivmod__(self, __x: int) -> tuple[int, int]: ... + @overload + def __pow__(self, __x: int, __modulo: Literal[0]) -> NoReturn: ... + @overload + def __pow__(self, __x: int, __modulo: int) -> int: ... + @overload + def __pow__(self, __x: Literal[0], __modulo: None = ...) -> Literal[1]: ... + @overload + def __pow__(self, __x: _PositiveInteger, __modulo: None = ...) -> int: ... + @overload + def __pow__(self, __x: _NegativeInteger, __modulo: None = ...) -> float: ... + # positive x -> int; negative x -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def __pow__(self, __x: int, __modulo: None = ...) -> Any: ... + def __rpow__(self, __x: int, __mod: int | None = ...) -> Any: ... + def __and__(self, __n: int) -> int: ... + def __or__(self, __n: int) -> int: ... + def __xor__(self, __n: int) -> int: ... + def __lshift__(self, __n: int) -> int: ... + def __rshift__(self, __n: int) -> int: ... + def __rand__(self, __n: int) -> int: ... + def __ror__(self, __n: int) -> int: ... + def __rxor__(self, __n: int) -> int: ... + def __rlshift__(self, __n: int) -> int: ... + def __rrshift__(self, __n: int) -> int: ... def __neg__(self) -> int: ... def __pos__(self) -> int: ... def __invert__(self) -> int: ... def __trunc__(self) -> int: ... def __ceil__(self) -> int: ... def __floor__(self) -> int: ... - def __round__(self, ndigits: SupportsIndex = ...) -> int: ... - def __getnewargs__(self) -> Tuple[int]: ... - def __eq__(self, x: object) -> bool: ... - def __ne__(self, x: object) -> bool: ... - def __lt__(self, x: int) -> bool: ... - def __le__(self, x: int) -> bool: ... - def __gt__(self, x: int) -> bool: ... - def __ge__(self, x: int) -> bool: ... - def __str__(self) -> str: ... + def __round__(self, __ndigits: SupportsIndex = ...) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: int) -> bool: ... + def __le__(self, __x: int) -> bool: ... + def __gt__(self, __x: int) -> bool: ... + def __ge__(self, __x: int) -> bool: ... def __float__(self) -> float: ... def __int__(self) -> int: ... def __abs__(self) -> int: ... @@ -248,53 +270,57 @@ class int: def __index__(self) -> int: ... class float: - def __new__(cls: Type[_T], x: SupportsFloat | SupportsIndex | str | bytes | bytearray = ...) -> _T: ... - def as_integer_ratio(self) -> Tuple[int, int]: ... + def __new__(cls: type[Self], x: SupportsFloat | SupportsIndex | str | bytes | bytearray = ...) -> Self: ... + def as_integer_ratio(self) -> tuple[int, int]: ... def hex(self) -> str: ... def is_integer(self) -> bool: ... @classmethod - def fromhex(cls, __s: str) -> float: ... + def fromhex(cls: type[Self], __s: str) -> Self: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> float: ... - def __add__(self, x: float) -> float: ... - def __sub__(self, x: float) -> float: ... - def __mul__(self, x: float) -> float: ... - def __floordiv__(self, x: float) -> float: ... - def __truediv__(self, x: float) -> float: ... - def __mod__(self, x: float) -> float: ... - def __divmod__(self, x: float) -> Tuple[float, float]: ... - def __pow__( - self, x: float, mod: None = ... - ) -> float: ... # In Python 3, returns complex if self is negative and x is not whole - def __radd__(self, x: float) -> float: ... - def __rsub__(self, x: float) -> float: ... - def __rmul__(self, x: float) -> float: ... - def __rfloordiv__(self, x: float) -> float: ... - def __rtruediv__(self, x: float) -> float: ... - def __rmod__(self, x: float) -> float: ... - def __rdivmod__(self, x: float) -> Tuple[float, float]: ... - def __rpow__(self, x: float, mod: None = ...) -> float: ... - def __getnewargs__(self) -> Tuple[float]: ... + def __add__(self, __x: float) -> float: ... + def __sub__(self, __x: float) -> float: ... + def __mul__(self, __x: float) -> float: ... + def __floordiv__(self, __x: float) -> float: ... + def __truediv__(self, __x: float) -> float: ... + def __mod__(self, __x: float) -> float: ... + def __divmod__(self, __x: float) -> tuple[float, float]: ... + @overload + def __pow__(self, __x: int, __mod: None = ...) -> float: ... + # positive x -> float; negative x -> complex + # return type must be Any as `float | complex` causes too many false-positive errors + @overload + def __pow__(self, __x: float, __mod: None = ...) -> Any: ... + def __radd__(self, __x: float) -> float: ... + def __rsub__(self, __x: float) -> float: ... + def __rmul__(self, __x: float) -> float: ... + def __rfloordiv__(self, __x: float) -> float: ... + def __rtruediv__(self, __x: float) -> float: ... + def __rmod__(self, __x: float) -> float: ... + def __rdivmod__(self, __x: float) -> tuple[float, float]: ... + # Returns complex if the argument is negative. + def __rpow__(self, __x: float, __mod: None = ...) -> Any: ... + def __getnewargs__(self) -> tuple[float]: ... def __trunc__(self) -> int: ... if sys.version_info >= (3, 9): def __ceil__(self) -> int: ... def __floor__(self) -> int: ... + @overload - def __round__(self, ndigits: None = ...) -> int: ... + def __round__(self, __ndigits: None = ...) -> int: ... @overload - def __round__(self, ndigits: SupportsIndex) -> float: ... - def __eq__(self, x: object) -> bool: ... - def __ne__(self, x: object) -> bool: ... - def __lt__(self, x: float) -> bool: ... - def __le__(self, x: float) -> bool: ... - def __gt__(self, x: float) -> bool: ... - def __ge__(self, x: float) -> bool: ... + def __round__(self, __ndigits: SupportsIndex) -> float: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: float) -> bool: ... + def __le__(self, __x: float) -> bool: ... + def __gt__(self, __x: float) -> bool: ... + def __ge__(self, __x: float) -> bool: ... def __neg__(self) -> float: ... def __pos__(self) -> float: ... - def __str__(self) -> str: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __abs__(self) -> float: ... @@ -303,53 +329,55 @@ class float: class complex: @overload - def __new__(cls: Type[_T], real: float = ..., imag: float = ...) -> _T: ... + def __new__(cls: type[Self], real: float = ..., imag: float = ...) -> Self: ... @overload - def __new__(cls: Type[_T], real: str | SupportsComplex | SupportsIndex | complex) -> _T: ... + def __new__(cls: type[Self], real: str | SupportsComplex | SupportsIndex | complex) -> Self: ... @property def real(self) -> float: ... @property def imag(self) -> float: ... def conjugate(self) -> complex: ... - def __add__(self, x: complex) -> complex: ... - def __sub__(self, x: complex) -> complex: ... - def __mul__(self, x: complex) -> complex: ... - def __pow__(self, x: complex, mod: None = ...) -> complex: ... - def __truediv__(self, x: complex) -> complex: ... - def __radd__(self, x: complex) -> complex: ... - def __rsub__(self, x: complex) -> complex: ... - def __rmul__(self, x: complex) -> complex: ... - def __rpow__(self, x: complex, mod: None = ...) -> complex: ... - def __rtruediv__(self, x: complex) -> complex: ... - def __eq__(self, x: object) -> bool: ... - def __ne__(self, x: object) -> bool: ... + def __add__(self, __x: complex) -> complex: ... + def __sub__(self, __x: complex) -> complex: ... + def __mul__(self, __x: complex) -> complex: ... + def __pow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __truediv__(self, __x: complex) -> complex: ... + def __radd__(self, __x: complex) -> complex: ... + def __rsub__(self, __x: complex) -> complex: ... + def __rmul__(self, __x: complex) -> complex: ... + def __rpow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __rtruediv__(self, __x: complex) -> complex: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... def __neg__(self) -> complex: ... def __pos__(self) -> complex: ... - def __str__(self) -> str: ... def __abs__(self) -> float: ... def __hash__(self) -> int: ... def __bool__(self) -> bool: ... + if sys.version_info >= (3, 11): + def __complex__(self) -> complex: ... class _FormatMapMapping(Protocol): def __getitem__(self, __key: str) -> Any: ... class str(Sequence[str]): @overload - def __new__(cls: Type[_T], o: object = ...) -> _T: ... + def __new__(cls: type[Self], object: object = ...) -> Self: ... @overload - def __new__(cls: Type[_T], o: bytes, encoding: str = ..., errors: str = ...) -> _T: ... + def __new__(cls: type[Self], o: bytes, encoding: str = ..., errors: str = ...) -> Self: ... def capitalize(self) -> str: ... def casefold(self) -> str: ... def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... def endswith( - self, __suffix: str | Tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... else: def expandtabs(self, tabsize: int = ...) -> str: ... + def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def format(self, *args: object, **kwargs: object) -> str: ... def format_map(self, map: _FormatMapMapping) -> str: ... @@ -358,6 +386,7 @@ class str(Sequence[str]): def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... + def isdecimal(self) -> bool: ... def isdigit(self) -> bool: ... def isidentifier(self) -> bool: ... @@ -371,21 +400,22 @@ class str(Sequence[str]): def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... def lower(self) -> str: ... def lstrip(self, __chars: str | None = ...) -> str: ... - def partition(self, __sep: str) -> Tuple[str, str, str]: ... + def partition(self, __sep: str) -> tuple[str, str, str]: ... def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: str) -> str: ... def removesuffix(self, __suffix: str) -> str: ... + def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... - def rpartition(self, __sep: str) -> Tuple[str, str, str]: ... + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... def rstrip(self, __chars: str | None = ...) -> str: ... def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... def splitlines(self, keepends: bool = ...) -> list[str]: ... def startswith( - self, __prefix: str | Tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... def strip(self, __chars: str | None = ...) -> str: ... def swapcase(self) -> str: ... @@ -399,37 +429,35 @@ class str(Sequence[str]): @staticmethod @overload def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... - def __add__(self, s: str) -> str: ... + def __add__(self, __s: str) -> str: ... # Incompatible with Sequence.__contains__ - def __contains__(self, o: str) -> bool: ... # type: ignore - def __eq__(self, x: object) -> bool: ... - def __ge__(self, x: str) -> bool: ... - def __getitem__(self, i: int | slice) -> str: ... - def __gt__(self, x: str) -> bool: ... + def __contains__(self, __o: str) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ge__(self, __x: str) -> bool: ... + def __getitem__(self, __i: SupportsIndex | slice) -> str: ... + def __gt__(self, __x: str) -> bool: ... def __hash__(self) -> int: ... def __iter__(self) -> Iterator[str]: ... - def __le__(self, x: str) -> bool: ... + def __le__(self, __x: str) -> bool: ... def __len__(self) -> int: ... - def __lt__(self, x: str) -> bool: ... - def __mod__(self, x: Any) -> str: ... - def __mul__(self, n: SupportsIndex) -> str: ... - def __ne__(self, x: object) -> bool: ... - def __repr__(self) -> str: ... - def __rmul__(self, n: SupportsIndex) -> str: ... - def __str__(self) -> str: ... - def __getnewargs__(self) -> Tuple[str]: ... + def __lt__(self, __x: str) -> bool: ... + def __mod__(self, __x: Any) -> str: ... + def __mul__(self, __n: SupportsIndex) -> str: ... + def __ne__(self, __x: object) -> bool: ... + def __rmul__(self, __n: SupportsIndex) -> str: ... + def __getnewargs__(self) -> tuple[str]: ... class bytes(ByteString): @overload - def __new__(cls: Type[_T], ints: Iterable[SupportsIndex]) -> _T: ... + def __new__(cls: type[Self], __ints: Iterable[SupportsIndex]) -> Self: ... @overload - def __new__(cls: Type[_T], string: str, encoding: str, errors: str = ...) -> _T: ... + def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... @overload - def __new__(cls: Type[_T], length: SupportsIndex) -> _T: ... + def __new__(cls: type[Self], __length: SupportsIndex) -> Self: ... @overload - def __new__(cls: Type[_T]) -> _T: ... + def __new__(cls: type[Self]) -> Self: ... @overload - def __new__(cls: Type[_T], o: SupportsBytes) -> _T: ... + def __new__(cls: type[Self], __o: SupportsBytes) -> Self: ... def capitalize(self) -> bytes: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... def count( @@ -437,12 +465,13 @@ class bytes(ByteString): ) -> int: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith( - self, __suffix: bytes | Tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __suffix: bytes | tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): def expandtabs(self, tabsize: SupportsIndex = ...) -> bytes: ... else: def expandtabs(self, tabsize: int = ...) -> bytes: ... + def find( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... @@ -450,6 +479,7 @@ class bytes(ByteString): def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... else: def hex(self) -> str: ... + def index( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... @@ -457,6 +487,7 @@ class bytes(ByteString): def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... @@ -466,11 +497,12 @@ class bytes(ByteString): def ljust(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... def lower(self) -> bytes: ... def lstrip(self, __bytes: bytes | None = ...) -> bytes: ... - def partition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ... + def partition(self, __sep: bytes) -> tuple[bytes, bytes, bytes]: ... def replace(self, __old: bytes, __new: bytes, __count: SupportsIndex = ...) -> bytes: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: bytes) -> bytes: ... def removesuffix(self, __suffix: bytes) -> bytes: ... + def rfind( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... @@ -478,13 +510,13 @@ class bytes(ByteString): self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def rjust(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... - def rpartition(self, __sep: bytes) -> Tuple[bytes, bytes, bytes]: ... + def rpartition(self, __sep: bytes) -> tuple[bytes, bytes, bytes]: ... def rsplit(self, sep: bytes | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... def rstrip(self, __bytes: bytes | None = ...) -> bytes: ... def split(self, sep: bytes | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... def splitlines(self, keepends: bool = ...) -> list[bytes]: ... def startswith( - self, __prefix: bytes | Tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __prefix: bytes | tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... def strip(self, __bytes: bytes | None = ...) -> bytes: ... def swapcase(self) -> bytes: ... @@ -493,41 +525,41 @@ class bytes(ByteString): def upper(self) -> bytes: ... def zfill(self, __width: SupportsIndex) -> bytes: ... @classmethod - def fromhex(cls, __s: str) -> bytes: ... + def fromhex(cls: type[Self], __s: str) -> Self: ... @staticmethod def maketrans(__frm: bytes, __to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... def __hash__(self) -> int: ... @overload - def __getitem__(self, i: SupportsIndex) -> int: ... + def __getitem__(self, __i: SupportsIndex) -> int: ... @overload - def __getitem__(self, s: slice) -> bytes: ... - def __add__(self, s: bytes) -> bytes: ... - def __mul__(self, n: SupportsIndex) -> bytes: ... - def __rmul__(self, n: SupportsIndex) -> bytes: ... - def __mod__(self, value: Any) -> bytes: ... + def __getitem__(self, __s: slice) -> bytes: ... + def __add__(self, __s: bytes) -> bytes: ... + def __mul__(self, __n: SupportsIndex) -> bytes: ... + def __rmul__(self, __n: SupportsIndex) -> bytes: ... + def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, o: SupportsIndex | bytes) -> bool: ... # type: ignore - def __eq__(self, x: object) -> bool: ... - def __ne__(self, x: object) -> bool: ... - def __lt__(self, x: bytes) -> bool: ... - def __le__(self, x: bytes) -> bool: ... - def __gt__(self, x: bytes) -> bool: ... - def __ge__(self, x: bytes) -> bool: ... - def __getnewargs__(self) -> Tuple[bytes]: ... + def __contains__(self, __o: SupportsIndex | bytes) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: bytes) -> bool: ... + def __le__(self, __x: bytes) -> bool: ... + def __gt__(self, __x: bytes) -> bool: ... + def __ge__(self, __x: bytes) -> bool: ... + def __getnewargs__(self) -> tuple[bytes]: ... + if sys.version_info >= (3, 11): + def __bytes__(self) -> bytes: ... class bytearray(MutableSequence[int], ByteString): @overload def __init__(self) -> None: ... @overload - def __init__(self, ints: Iterable[SupportsIndex]) -> None: ... + def __init__(self, __ints: Iterable[SupportsIndex]) -> None: ... @overload - def __init__(self, string: str, encoding: str, errors: str = ...) -> None: ... + def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... @overload - def __init__(self, length: SupportsIndex) -> None: ... + def __init__(self, __length: SupportsIndex) -> None: ... def append(self, __item: SupportsIndex) -> None: ... def capitalize(self) -> bytearray: ... def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... @@ -537,12 +569,13 @@ class bytearray(MutableSequence[int], ByteString): def copy(self) -> bytearray: ... def decode(self, encoding: str = ..., errors: str = ...) -> str: ... def endswith( - self, __suffix: bytes | Tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __suffix: bytes | tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... if sys.version_info >= (3, 8): def expandtabs(self, tabsize: SupportsIndex = ...) -> bytearray: ... else: def expandtabs(self, tabsize: int = ...) -> bytearray: ... + def extend(self, __iterable_of_ints: Iterable[SupportsIndex]) -> None: ... def find( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... @@ -551,6 +584,7 @@ class bytearray(MutableSequence[int], ByteString): def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... else: def hex(self) -> str: ... + def index( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... @@ -559,6 +593,7 @@ class bytearray(MutableSequence[int], ByteString): def isalpha(self) -> bool: ... if sys.version_info >= (3, 7): def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... def islower(self) -> bool: ... def isspace(self) -> bool: ... @@ -568,10 +603,13 @@ class bytearray(MutableSequence[int], ByteString): def ljust(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... def lower(self) -> bytearray: ... def lstrip(self, __bytes: bytes | None = ...) -> bytearray: ... - def partition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... + def partition(self, __sep: bytes) -> tuple[bytearray, bytearray, bytearray]: ... + def pop(self, __index: int = ...) -> int: ... + def remove(self, __value: int) -> None: ... if sys.version_info >= (3, 9): def removeprefix(self, __prefix: bytes) -> bytearray: ... def removesuffix(self, __suffix: bytes) -> bytearray: ... + def replace(self, __old: bytes, __new: bytes, __count: SupportsIndex = ...) -> bytearray: ... def rfind( self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... @@ -580,13 +618,13 @@ class bytearray(MutableSequence[int], ByteString): self, __sub: bytes | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> int: ... def rjust(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... - def rpartition(self, __sep: bytes) -> Tuple[bytearray, bytearray, bytearray]: ... + def rpartition(self, __sep: bytes) -> tuple[bytearray, bytearray, bytearray]: ... def rsplit(self, sep: bytes | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... def rstrip(self, __bytes: bytes | None = ...) -> bytearray: ... def split(self, sep: bytes | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... def splitlines(self, keepends: bool = ...) -> list[bytearray]: ... def startswith( - self, __prefix: bytes | Tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + self, __prefix: bytes | tuple[bytes, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... ) -> bool: ... def strip(self, __bytes: bytes | None = ...) -> bytearray: ... def swapcase(self) -> bytearray: ... @@ -595,76 +633,89 @@ class bytearray(MutableSequence[int], ByteString): def upper(self) -> bytearray: ... def zfill(self, __width: SupportsIndex) -> bytearray: ... @classmethod - def fromhex(cls, __string: str) -> bytearray: ... + def fromhex(cls: type[Self], __string: str) -> Self: ... @staticmethod def maketrans(__frm: bytes, __to: bytes) -> bytes: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[int]: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... - __hash__: None # type: ignore + __hash__: None # type: ignore[assignment] @overload - def __getitem__(self, i: SupportsIndex) -> int: ... + def __getitem__(self, __i: SupportsIndex) -> int: ... @overload - def __getitem__(self, s: slice) -> bytearray: ... + def __getitem__(self, __s: slice) -> bytearray: ... @overload - def __setitem__(self, i: SupportsIndex, x: SupportsIndex) -> None: ... + def __setitem__(self, __i: SupportsIndex, __x: SupportsIndex) -> None: ... @overload - def __setitem__(self, s: slice, x: Iterable[SupportsIndex] | bytes) -> None: ... - def __delitem__(self, i: SupportsIndex | slice) -> None: ... - def __add__(self, s: bytes) -> bytearray: ... - def __iadd__(self, s: Iterable[int]) -> bytearray: ... - def __mul__(self, n: SupportsIndex) -> bytearray: ... - def __rmul__(self, n: SupportsIndex) -> bytearray: ... - def __imul__(self, n: SupportsIndex) -> bytearray: ... - def __mod__(self, value: Any) -> bytes: ... + def __setitem__(self, __s: slice, __x: Iterable[SupportsIndex] | bytes) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __s: bytes) -> bytearray: ... + def __iadd__(self: Self, __s: Iterable[int]) -> Self: ... + def __mul__(self, __n: SupportsIndex) -> bytearray: ... + def __rmul__(self, __n: SupportsIndex) -> bytearray: ... + def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __mod__(self, __value: Any) -> bytes: ... # Incompatible with Sequence.__contains__ - def __contains__(self, o: SupportsIndex | bytes) -> bool: ... # type: ignore - def __eq__(self, x: object) -> bool: ... - def __ne__(self, x: object) -> bool: ... - def __lt__(self, x: bytes) -> bool: ... - def __le__(self, x: bytes) -> bool: ... - def __gt__(self, x: bytes) -> bool: ... - def __ge__(self, x: bytes) -> bool: ... + def __contains__(self, __o: SupportsIndex | bytes) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: bytes) -> bool: ... + def __le__(self, __x: bytes) -> bool: ... + def __gt__(self, __x: bytes) -> bool: ... + def __ge__(self, __x: bytes) -> bool: ... + def __alloc__(self) -> int: ... +@final class memoryview(Sized, Sequence[int]): - format: str - itemsize: int - shape: Tuple[int, ...] | None - strides: Tuple[int, ...] | None - suboffsets: Tuple[int, ...] | None - readonly: bool - ndim: int - - obj: bytes | bytearray - c_contiguous: bool - f_contiguous: bool - contiguous: bool - nbytes: int + @property + def format(self) -> str: ... + @property + def itemsize(self) -> int: ... + @property + def shape(self) -> tuple[int, ...] | None: ... + @property + def strides(self) -> tuple[int, ...] | None: ... + @property + def suboffsets(self) -> tuple[int, ...] | None: ... + @property + def readonly(self) -> bool: ... + @property + def ndim(self) -> int: ... + @property + def obj(self) -> bytes | bytearray: ... + @property + def c_contiguous(self) -> bool: ... + @property + def f_contiguous(self) -> bool: ... + @property + def contiguous(self) -> bool: ... + @property + def nbytes(self) -> int: ... def __init__(self, obj: ReadableBuffer) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None ) -> None: ... - def cast(self, format: str, shape: list[int] | Tuple[int, ...] = ...) -> memoryview: ... + def cast(self, format: str, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... @overload - def __getitem__(self, i: SupportsIndex) -> int: ... + def __getitem__(self, __i: SupportsIndex) -> int: ... @overload - def __getitem__(self, s: slice) -> memoryview: ... - def __contains__(self, x: object) -> bool: ... + def __getitem__(self, __s: slice) -> memoryview: ... + def __contains__(self, __x: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... def __len__(self) -> int: ... @overload - def __setitem__(self, s: slice, o: bytes) -> None: ... + def __setitem__(self, __s: slice, __o: bytes) -> None: ... @overload - def __setitem__(self, i: SupportsIndex, o: SupportsIndex) -> None: ... + def __setitem__(self, __i: SupportsIndex, __o: SupportsIndex) -> None: ... if sys.version_info >= (3, 8): def tobytes(self, order: Literal["C", "F", "A"] | None = ...) -> bytes: ... else: def tobytes(self) -> bytes: ... + def tolist(self) -> list[int]: ... if sys.version_info >= (3, 8): def toreadonly(self) -> memoryview: ... + def release(self) -> None: ... if sys.version_info >= (3, 8): def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... @@ -673,279 +724,286 @@ class memoryview(Sized, Sequence[int]): @final class bool(int): - def __new__(cls: Type[_T], __o: object = ...) -> _T: ... + def __new__(cls: type[Self], __o: object = ...) -> Self: ... + # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), + # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). @overload - def __and__(self, x: bool) -> bool: ... + def __and__(self, __x: bool) -> bool: ... @overload - def __and__(self, x: int) -> int: ... + def __and__(self, __x: int) -> int: ... @overload - def __or__(self, x: bool) -> bool: ... + def __or__(self, __x: bool) -> bool: ... @overload - def __or__(self, x: int) -> int: ... + def __or__(self, __x: int) -> int: ... @overload - def __xor__(self, x: bool) -> bool: ... + def __xor__(self, __x: bool) -> bool: ... @overload - def __xor__(self, x: int) -> int: ... + def __xor__(self, __x: int) -> int: ... @overload - def __rand__(self, x: bool) -> bool: ... + def __rand__(self, __x: bool) -> bool: ... @overload - def __rand__(self, x: int) -> int: ... + def __rand__(self, __x: int) -> int: ... @overload - def __ror__(self, x: bool) -> bool: ... + def __ror__(self, __x: bool) -> bool: ... @overload - def __ror__(self, x: int) -> int: ... + def __ror__(self, __x: int) -> int: ... @overload - def __rxor__(self, x: bool) -> bool: ... + def __rxor__(self, __x: bool) -> bool: ... @overload - def __rxor__(self, x: int) -> int: ... - def __getnewargs__(self) -> Tuple[int]: ... + def __rxor__(self, __x: int) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... -class slice(object): - start: Any - step: Any - stop: Any +@final +class slice: + @property + def start(self) -> Any: ... + @property + def step(self) -> Any: ... + @property + def stop(self) -> Any: ... @overload - def __init__(self, stop: Any) -> None: ... + def __init__(self, __stop: Any) -> None: ... @overload - def __init__(self, start: Any, stop: Any, step: Any = ...) -> None: ... - __hash__: None # type: ignore - def indices(self, len: SupportsIndex) -> Tuple[int, int, int]: ... + def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ... + __hash__: None # type: ignore[assignment] + def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... class tuple(Sequence[_T_co], Generic[_T_co]): - def __new__(cls: Type[_T], iterable: Iterable[_T_co] = ...) -> _T: ... + def __new__(cls: type[Self], __iterable: Iterable[_T_co] = ...) -> Self: ... def __len__(self) -> int: ... - def __contains__(self, x: object) -> bool: ... + def __contains__(self, __x: object) -> bool: ... @overload - def __getitem__(self, x: int) -> _T_co: ... + def __getitem__(self, __x: SupportsIndex) -> _T_co: ... @overload - def __getitem__(self, x: slice) -> Tuple[_T_co, ...]: ... + def __getitem__(self, __x: slice) -> tuple[_T_co, ...]: ... def __iter__(self) -> Iterator[_T_co]: ... - def __lt__(self, x: Tuple[_T_co, ...]) -> bool: ... - def __le__(self, x: Tuple[_T_co, ...]) -> bool: ... - def __gt__(self, x: Tuple[_T_co, ...]) -> bool: ... - def __ge__(self, x: Tuple[_T_co, ...]) -> bool: ... + def __lt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __le__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __gt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __ge__(self, __x: tuple[_T_co, ...]) -> bool: ... @overload - def __add__(self, x: Tuple[_T_co, ...]) -> Tuple[_T_co, ...]: ... + def __add__(self, __x: tuple[_T_co, ...]) -> tuple[_T_co, ...]: ... @overload - def __add__(self, x: Tuple[_T, ...]) -> Tuple[_T_co | _T, ...]: ... - def __mul__(self, n: SupportsIndex) -> Tuple[_T_co, ...]: ... - def __rmul__(self, n: SupportsIndex) -> Tuple[_T_co, ...]: ... + def __add__(self, __x: tuple[_T, ...]) -> tuple[_T_co | _T, ...]: ... + def __mul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... + def __rmul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... def count(self, __value: Any) -> int: ... def index(self, __value: Any, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... +# Doesn't exist at runtime, but deleting this breaks mypy. See #2999 +@final class function: - # TODO not defined in builtins! - __name__: str - __module__: str + # Make sure this class definition stays roughly in line with `types.FunctionType` + __closure__: tuple[_Cell, ...] | None __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + __globals__: dict[str, Any] + __name__: str __qualname__: str __annotations__: dict[str, Any] + __kwdefaults__: dict[str, Any] + __module__: str + # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. + def __get__(self, obj: object | None, type: type | None = ...) -> Any: ... class list(MutableSequence[_T], Generic[_T]): @overload def __init__(self) -> None: ... @overload - def __init__(self, iterable: Iterable[_T]) -> None: ... - def clear(self) -> None: ... + def __init__(self, __iterable: Iterable[_T]) -> None: ... def copy(self) -> list[_T]: ... def append(self, __object: _T) -> None: ... def extend(self, __iterable: Iterable[_T]) -> None: ... def pop(self, __index: SupportsIndex = ...) -> _T: ... + # Signature of `list.index` should be kept in line with `collections.UserList.index()` def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... def count(self, __value: _T) -> int: ... def insert(self, __index: SupportsIndex, __object: _T) -> None: ... def remove(self, __value: _T) -> None: ... - def reverse(self) -> None: ... + # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` @overload - def sort(self: list[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> None: ... + def sort(self: list[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... @overload - def sort(self, *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> None: ... + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_T]: ... - def __str__(self) -> str: ... - __hash__: None # type: ignore + __hash__: None # type: ignore[assignment] @overload - def __getitem__(self, i: SupportsIndex) -> _T: ... + def __getitem__(self, __i: SupportsIndex) -> _T: ... @overload - def __getitem__(self, s: slice) -> list[_T]: ... + def __getitem__(self, __s: slice) -> list[_T]: ... @overload - def __setitem__(self, i: SupportsIndex, o: _T) -> None: ... + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... @overload - def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... - def __delitem__(self, i: SupportsIndex | slice) -> None: ... - def __add__(self, x: list[_T]) -> list[_T]: ... - def __iadd__(self: _S, x: Iterable[_T]) -> _S: ... - def __mul__(self, n: SupportsIndex) -> list[_T]: ... - def __rmul__(self, n: SupportsIndex) -> list[_T]: ... - def __imul__(self: _S, n: SupportsIndex) -> _S: ... - def __contains__(self, o: object) -> bool: ... + def __setitem__(self, __s: slice, __o: Iterable[_T]) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __x: list[_T]) -> list[_T]: ... + def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... + def __mul__(self, __n: SupportsIndex) -> list[_T]: ... + def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... + def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __contains__(self, __o: object) -> bool: ... def __reversed__(self) -> Iterator[_T]: ... - def __gt__(self, x: list[_T]) -> bool: ... - def __ge__(self, x: list[_T]) -> bool: ... - def __lt__(self, x: list[_T]) -> bool: ... - def __le__(self, x: list[_T]) -> bool: ... + def __gt__(self, __x: list[_T]) -> bool: ... + def __ge__(self, __x: list[_T]) -> bool: ... + def __lt__(self, __x: list[_T]) -> bool: ... + def __le__(self, __x: list[_T]) -> bool: ... if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... - -class _dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): - if sys.version_info >= (3, 10): - mapping: MappingProxyType[_KT_co, _VT_co] - -# The generics are the wrong way around because of a mypy limitation -# https://github.com/python/mypy/issues/11138 -class _dict_values(ValuesView[_VT_co], Generic[_VT_co, _KT_co]): - if sys.version_info >= (3, 10): - mapping: MappingProxyType[_KT_co, _VT_co] - -class _dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): - if sys.version_info >= (3, 10): - mapping: MappingProxyType[_KT_co, _VT_co] + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics @overload def __init__(self: dict[_KT, _VT]) -> None: ... @overload def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... @overload - def __init__(self, map: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... + def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... @overload - def __init__(self, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... # Next overload is for dict(string.split(sep) for string in iterable) # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error @overload - def __init__(self: dict[str, str], iterable: Iterable[list[str]]) -> None: ... - def __new__(cls: Type[_T1], *args: Any, **kwargs: Any) -> _T1: ... - def clear(self) -> None: ... + def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... + def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: ... def copy(self) -> dict[_KT, _VT]: ... - def popitem(self) -> Tuple[_KT, _VT]: ... - def setdefault(self, __key: _KT, __default: _VT = ...) -> _VT: ... - @overload - def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... - @overload - def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... - @overload - def update(self, **kwargs: _VT) -> None: ... - def keys(self) -> _dict_keys[_KT, _VT]: ... - def values(self) -> _dict_values[_VT, _KT]: ... - def items(self) -> _dict_items[_KT, _VT]: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + def items(self) -> dict_items[_KT, _VT]: ... + # Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` + # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: None = ...) -> dict[_T, Any | None]: ... @classmethod @overload def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... + # Positional-only in dict, but not in MutableMapping + @overload + def get(self, __key: _KT) -> _VT | None: ... + @overload + def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _VT | _T = ...) -> _VT | _T: ... def __len__(self) -> int: ... - def __getitem__(self, k: _KT) -> _VT: ... - def __setitem__(self, k: _KT, v: _VT) -> None: ... - def __delitem__(self, v: _KT) -> None: ... + def __getitem__(self, __k: _KT) -> _VT: ... + def __setitem__(self, __k: _KT, __v: _VT) -> None: ... + def __delitem__(self, __v: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT]: ... - def __str__(self) -> str: ... - __hash__: None # type: ignore + __hash__: None # type: ignore[assignment] if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... - def __ior__(self, __value: Mapping[_KT, _VT]) -> dict[_KT, _VT]: ... # type: ignore + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... class set(MutableSet[_T], Generic[_T]): - def __init__(self, iterable: Iterable[_T] = ...) -> None: ... - def add(self, element: _T) -> None: ... - def clear(self) -> None: ... - def copy(self) -> Set[_T]: ... - def difference(self, *s: Iterable[Any]) -> Set[_T]: ... + def __init__(self, __iterable: Iterable[_T] = ...) -> None: ... + def add(self, __element: _T) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... def difference_update(self, *s: Iterable[Any]) -> None: ... - def discard(self, element: _T) -> None: ... - def intersection(self, *s: Iterable[Any]) -> Set[_T]: ... + def discard(self, __element: _T) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... def intersection_update(self, *s: Iterable[Any]) -> None: ... - def isdisjoint(self, s: Iterable[Any]) -> bool: ... - def issubset(self, s: Iterable[Any]) -> bool: ... - def issuperset(self, s: Iterable[Any]) -> bool: ... - def pop(self) -> _T: ... - def remove(self, element: _T) -> None: ... - def symmetric_difference(self, s: Iterable[_T]) -> Set[_T]: ... - def symmetric_difference_update(self, s: Iterable[_T]) -> None: ... - def union(self, *s: Iterable[_T]) -> Set[_T]: ... + def isdisjoint(self, __s: Iterable[Any]) -> bool: ... + def issubset(self, __s: Iterable[Any]) -> bool: ... + def issuperset(self, __s: Iterable[Any]) -> bool: ... + def remove(self, __element: _T) -> None: ... + def symmetric_difference(self, __s: Iterable[_T]) -> set[_T]: ... + def symmetric_difference_update(self, __s: Iterable[_T]) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... def update(self, *s: Iterable[_T]) -> None: ... def __len__(self) -> int: ... - def __contains__(self, o: object) -> bool: ... + def __contains__(self, __o: object) -> bool: ... def __iter__(self) -> Iterator[_T]: ... - def __str__(self) -> str: ... - def __and__(self, s: AbstractSet[object]) -> Set[_T]: ... - def __iand__(self, s: AbstractSet[object]) -> Set[_T]: ... - def __or__(self, s: AbstractSet[_S]) -> Set[_T | _S]: ... - def __ior__(self, s: AbstractSet[_S]) -> Set[_T | _S]: ... - def __sub__(self, s: AbstractSet[_T | None]) -> Set[_T]: ... - def __isub__(self, s: AbstractSet[_T | None]) -> Set[_T]: ... - def __xor__(self, s: AbstractSet[_S]) -> Set[_T | _S]: ... - def __ixor__(self, s: AbstractSet[_S]) -> Set[_T | _S]: ... - def __le__(self, s: AbstractSet[object]) -> bool: ... - def __lt__(self, s: AbstractSet[object]) -> bool: ... - def __ge__(self, s: AbstractSet[object]) -> bool: ... - def __gt__(self, s: AbstractSet[object]) -> bool: ... - __hash__: None # type: ignore + def __and__(self, __s: AbstractSet[object]) -> set[_T]: ... + def __iand__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __or__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ior__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __sub__(self, __s: AbstractSet[_T | None]) -> set[_T]: ... + def __isub__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __xor__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ixor__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... + __hash__: None # type: ignore[assignment] if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class frozenset(AbstractSet[_T_co], Generic[_T_co]): - def __init__(self, iterable: Iterable[_T_co] = ...) -> None: ... - def copy(self) -> FrozenSet[_T_co]: ... - def difference(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ... - def intersection(self, *s: Iterable[object]) -> FrozenSet[_T_co]: ... - def isdisjoint(self, s: Iterable[_T_co]) -> bool: ... - def issubset(self, s: Iterable[object]) -> bool: ... - def issuperset(self, s: Iterable[object]) -> bool: ... - def symmetric_difference(self, s: Iterable[_T_co]) -> FrozenSet[_T_co]: ... - def union(self, *s: Iterable[_T_co]) -> FrozenSet[_T_co]: ... + def __init__(self, __iterable: Iterable[_T_co] = ...) -> None: ... + def copy(self) -> frozenset[_T_co]: ... + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def isdisjoint(self, __s: Iterable[_T_co]) -> bool: ... + def issubset(self, __s: Iterable[object]) -> bool: ... + def issuperset(self, __s: Iterable[object]) -> bool: ... + def symmetric_difference(self, __s: Iterable[_T_co]) -> frozenset[_T_co]: ... + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... def __len__(self) -> int: ... - def __contains__(self, o: object) -> bool: ... + def __contains__(self, __o: object) -> bool: ... def __iter__(self) -> Iterator[_T_co]: ... - def __str__(self) -> str: ... - def __and__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ... - def __or__(self, s: AbstractSet[_S]) -> FrozenSet[_T_co | _S]: ... - def __sub__(self, s: AbstractSet[_T_co]) -> FrozenSet[_T_co]: ... - def __xor__(self, s: AbstractSet[_S]) -> FrozenSet[_T_co | _S]: ... - def __le__(self, s: AbstractSet[object]) -> bool: ... - def __lt__(self, s: AbstractSet[object]) -> bool: ... - def __ge__(self, s: AbstractSet[object]) -> bool: ... - def __gt__(self, s: AbstractSet[object]) -> bool: ... + def __and__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __or__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __sub__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __xor__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... -class enumerate(Iterator[Tuple[int, _T]], Generic[_T]): +class enumerate(Iterator[tuple[int, _T]], Generic[_T]): def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... - def __iter__(self) -> Iterator[Tuple[int, _T]]: ... - def __next__(self) -> Tuple[int, _T]: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[int, _T]: ... if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... +@final class range(Sequence[int]): - start: int - stop: int - step: int + @property + def start(self) -> int: ... + @property + def stop(self) -> int: ... + @property + def step(self) -> int: ... @overload - def __init__(self, stop: SupportsIndex) -> None: ... + def __init__(self, __stop: SupportsIndex) -> None: ... @overload - def __init__(self, start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = ...) -> None: ... - def count(self, value: int) -> int: ... - def index(self, value: int) -> int: ... # type: ignore + def __init__(self, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> None: ... + def count(self, __value: int) -> int: ... + def index(self, __value: int) -> int: ... # type: ignore[override] def __len__(self) -> int: ... - def __contains__(self, o: object) -> bool: ... + def __contains__(self, __o: object) -> bool: ... def __iter__(self) -> Iterator[int]: ... @overload - def __getitem__(self, i: SupportsIndex) -> int: ... + def __getitem__(self, __i: SupportsIndex) -> int: ... @overload - def __getitem__(self, s: slice) -> range: ... - def __repr__(self) -> str: ... + def __getitem__(self, __s: slice) -> range: ... def __reversed__(self) -> Iterator[int]: ... -class property(object): +class property: fget: Callable[[Any], Any] | None fset: Callable[[Any, Any], None] | None fdel: Callable[[Any], None] | None + __isabstractmethod__: bool def __init__( self, fget: Callable[[Any], Any] | None = ..., @@ -953,17 +1011,18 @@ class property(object): fdel: Callable[[Any], None] | None = ..., doc: str | None = ..., ) -> None: ... - def getter(self, fget: Callable[[Any], Any]) -> property: ... - def setter(self, fset: Callable[[Any, Any], None]) -> property: ... - def deleter(self, fdel: Callable[[Any], None]) -> property: ... - def __get__(self, obj: Any, type: type | None = ...) -> Any: ... - def __set__(self, obj: Any, value: Any) -> None: ... - def __delete__(self, obj: Any) -> None: ... - -class _NotImplementedType(Any): # type: ignore + def getter(self, __fget: Callable[[Any], Any]) -> property: ... + def setter(self, __fset: Callable[[Any, Any], None]) -> property: ... + def deleter(self, __fdel: Callable[[Any], None]) -> property: ... + def __get__(self, __obj: Any, __type: type | None = ...) -> Any: ... + def __set__(self, __obj: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +@final +class _NotImplementedType(Any): # type: ignore[misc] # A little weird, but typing the __call__ as NotImplemented makes the error message # for NotImplemented() much better - __call__: NotImplemented # type: ignore + __call__: NotImplemented # type: ignore[valid-type] NotImplemented: _NotImplementedType @@ -976,7 +1035,7 @@ def bin(__number: int | SupportsIndex) -> str: ... if sys.version_info >= (3, 7): def breakpoint(*args: Any, **kws: Any) -> None: ... -def callable(__obj: object) -> bool: ... +def callable(__obj: object) -> TypeGuard[Callable[..., object]]: ... def chr(__i: int) -> str: ... # We define this here instead of using os.PathLike to avoid import cycle issues. @@ -987,15 +1046,16 @@ class _PathLike(Protocol[_AnyStr_co]): def __fspath__(self) -> _AnyStr_co: ... if sys.version_info >= (3, 10): - def aiter(__iterable: AsyncIterable[_T]) -> AsyncIterator[_T]: ... + def aiter(__async_iterable: _SupportsAiter[_SupportsAnextT]) -> _SupportsAnextT: ... @overload async def anext(__i: SupportsAnext[_T]) -> _T: ... @overload async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ... +# TODO: `compile` has a more precise return type in reality; work on a way of expressing that? if sys.version_info >= (3, 8): def compile( - source: str | bytes | mod | AST, + source: str | bytes | AST, filename: str | bytes | _PathLike[Any], mode: str, flags: int = ..., @@ -1007,7 +1067,7 @@ if sys.version_info >= (3, 8): else: def compile( - source: str | bytes | mod | AST, + source: str | bytes | AST, filename: str | bytes | _PathLike[Any], mode: str, flags: int = ..., @@ -1017,65 +1077,78 @@ else: def copyright() -> None: ... def credits() -> None: ... -def delattr(__obj: Any, __name: str) -> None: ... +def delattr(__obj: object, __name: str) -> None: ... def dir(__o: object = ...) -> list[str]: ... @overload def divmod(__x: SupportsDivMod[_T_contra, _T_co], __y: _T_contra) -> _T_co: ... @overload def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ... + +# The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. +# (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) def eval( - __source: str | bytes | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, Any] | None = ... + __source: str | bytes | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, object] | None = ... ) -> Any: ... + +# Comment above regarding `eval` applies to `exec` as well def exec( - __source: str | bytes | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, Any] | None = ... -) -> Any: ... + __source: str | bytes | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, object] | None = ... +) -> None: ... def exit(code: object = ...) -> NoReturn: ... class filter(Iterator[_T], Generic[_T]): @overload def __init__(self, __function: None, __iterable: Iterable[_T | None]) -> None: ... @overload + def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... + @overload def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... def format(__value: object, __format_spec: str = ...) -> str: ... # TODO unicode @overload -def getattr(__o: object, name: str) -> Any: ... +def getattr(__o: object, __name: str) -> Any: ... -# While technically covered by the last overload, spelling out the types for None and bool -# help mypy out in some tricky situations involving type context (aka bidirectional inference) +# While technically covered by the last overload, spelling out the types for None, bool +# and basic containers help mypy out in some tricky situations involving type context +# (aka bidirectional inference) +@overload +def getattr(__o: object, __name: str, __default: None) -> Any | None: ... @overload -def getattr(__o: object, name: str, __default: None) -> Any | None: ... +def getattr(__o: object, __name: str, __default: bool) -> Any | bool: ... @overload -def getattr(__o: object, name: str, __default: bool) -> Any | bool: ... +def getattr(__o: object, name: str, __default: list[Any]) -> Any | list[Any]: ... @overload -def getattr(__o: object, name: str, __default: _T) -> Any | _T: ... +def getattr(__o: object, name: str, __default: dict[Any, Any]) -> Any | dict[Any, Any]: ... +@overload +def getattr(__o: object, __name: str, __default: _T) -> Any | _T: ... def globals() -> dict[str, Any]: ... def hasattr(__obj: object, __name: str) -> bool: ... def hash(__obj: object) -> int: ... -def help(*args: Any, **kwds: Any) -> None: ... +def help(request: object = ...) -> None: ... def hex(__number: int | SupportsIndex) -> str: ... def id(__obj: object) -> int: ... -def input(__prompt: Any = ...) -> str: ... +def input(__prompt: object = ...) -> str: ... @overload -def iter(__iterable: Iterable[_T]) -> Iterator[_T]: ... +def iter(__iterable: _SupportsIter[_SupportsNextT]) -> _SupportsNextT: ... @overload def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: ... @overload -def iter(__function: Callable[[], _T], __sentinel: Any) -> Iterator[_T]: ... +def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... +# We need recursive types to express the type of the second argument to `isinstance` properly, hence the use of `Any` if sys.version_info >= (3, 10): def isinstance( - __obj: object, __class_or_tuple: type | types.UnionType | Tuple[type | types.UnionType | Tuple[Any, ...], ...] + __obj: object, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] ) -> bool: ... def issubclass( - __cls: type, __class_or_tuple: type | types.UnionType | Tuple[type | types.UnionType | Tuple[Any, ...], ...] + __cls: type, __class_or_tuple: type | types.UnionType | tuple[type | types.UnionType | tuple[Any, ...], ...] ) -> bool: ... else: - def isinstance(__obj: object, __class_or_tuple: type | Tuple[type | Tuple[Any, ...], ...]) -> bool: ... - def issubclass(__cls: type, __class_or_tuple: type | Tuple[type | Tuple[Any, ...], ...]) -> bool: ... + def isinstance(__obj: object, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... + def issubclass(__cls: type, __class_or_tuple: type | tuple[type | tuple[Any, ...], ...]) -> bool: ... def len(__obj: Sized) -> int: ... def license() -> None: ... @@ -1121,41 +1194,41 @@ class map(Iterator[_S], Generic[_S]): __iter6: Iterable[Any], *iterables: Iterable[Any], ) -> None: ... - def __iter__(self) -> Iterator[_S]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _S: ... @overload def max( - __arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ... -) -> SupportsLessThanT: ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... +) -> SupportsRichComparisonT: ... @overload -def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThan]) -> _T: ... +def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... @overload -def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThan]) -> _T: ... +def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def max(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> SupportsLessThanT | _T: ... +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... @overload -def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThan], default: _T2) -> _T1 | _T2: ... +def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload def min( - __arg1: SupportsLessThanT, __arg2: SupportsLessThanT, *_args: SupportsLessThanT, key: None = ... -) -> SupportsLessThanT: ... + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... +) -> SupportsRichComparisonT: ... @overload -def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsLessThan]) -> _T: ... +def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ...) -> SupportsLessThanT: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... @overload -def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThan]) -> _T: ... +def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... @overload -def min(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., default: _T) -> SupportsLessThanT | _T: ... +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... @overload -def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsLessThan], default: _T2) -> _T1 | _T2: ... +def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... @overload def next(__i: SupportsNext[_T]) -> _T: ... @overload -def next(__i: SupportsNext[_T], default: _VT) -> _T | _VT: ... +def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... def oct(__number: int | SupportsIndex) -> str: ... _OpenFile = Union[StrOrBytesPath, int] @@ -1248,8 +1321,21 @@ def open( opener: _Opener | None = ..., ) -> IO[Any]: ... def ord(__c: str | bytes) -> int: ... + +class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]): + def flush(self) -> None: ... + +@overload def print( - *values: object, sep: str | None = ..., end: str | None = ..., file: SupportsWrite[str] | None = ..., flush: bool = ... + *values: object, + sep: str | None = ..., + end: str | None = ..., + file: SupportsWrite[str] | None = ..., + flush: Literal[False] = ..., +) -> None: ... +@overload +def print( + *values: object, sep: str | None = ..., end: str | None = ..., file: _SupportsWriteAndFlush[str] | None = ..., flush: bool ) -> None: ... _E = TypeVar("_E", contravariant=True) @@ -1258,34 +1344,78 @@ _M = TypeVar("_M", contravariant=True) class _SupportsPow2(Protocol[_E, _T_co]): def __pow__(self, __other: _E) -> _T_co: ... +class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): + def __pow__(self, __other: _E, __modulo: None = ...) -> _T_co: ... + class _SupportsPow3(Protocol[_E, _M, _T_co]): def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ... +_SupportsSomeKindOfPow = Union[_SupportsPow2[Any, Any], _SupportsPow3NoneOnly[Any, Any], _SupportsPow3[Any, Any, Any]] + if sys.version_info >= (3, 8): @overload - def pow(base: int, exp: int, mod: None = ...) -> Any: ... # returns int or float depending on whether exp is non-negative + def pow(base: int, exp: int, mod: Literal[0]) -> NoReturn: ... @overload def pow(base: int, exp: int, mod: int) -> int: ... @overload - def pow(base: float, exp: float, mod: None = ...) -> float: ... + def pow(base: int, exp: Literal[0], mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _PositiveInteger, mod: None = ...) -> int: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _NegativeInteger, mod: None = ...) -> float: ... # type: ignore[misc] + # int base & positive-int exp -> int; int base & negative-int exp -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def pow(base: int, exp: int, mod: None = ...) -> Any: ... + @overload + def pow(base: float, exp: int, mod: None = ...) -> float: ... + # float base & float exp could return float or complex + # return type must be Any (same as complex base, complex exp), + # as `float | complex` causes too many false-positive errors + @overload + def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> Any: ... + @overload + def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> complex: ... + @overload + def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + @overload + def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... @overload - def pow(base: _SupportsPow2[_E, _T_co], exp: _E) -> _T_co: ... + def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M = ...) -> _T_co: ... @overload - def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M) -> _T_co: ... + def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = ...) -> Any: ... + @overload + def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = ...) -> complex: ... else: @overload - def pow( - __base: int, __exp: int, __mod: None = ... - ) -> Any: ... # returns int or float depending on whether exp is non-negative + def pow(__base: int, __exp: int, __mod: Literal[0]) -> NoReturn: ... @overload def pow(__base: int, __exp: int, __mod: int) -> int: ... @overload - def pow(__base: float, __exp: float, __mod: None = ...) -> float: ... + def pow(__base: int, __exp: Literal[0], __mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _PositiveInteger, __mod: None = ...) -> int: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _NegativeInteger, __mod: None = ...) -> float: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: int, __mod: None = ...) -> Any: ... + @overload + def pow(__base: float, __exp: int, __mod: None = ...) -> float: ... + @overload + def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> Any: ... + @overload + def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> complex: ... + @overload + def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... @overload - def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E) -> _T_co: ... + def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... @overload - def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M) -> _T_co: ... + def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M = ...) -> _T_co: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = ...) -> Any: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = ...) -> complex: ... def quit(code: object = ...) -> NoReturn: ... @@ -1294,8 +1424,9 @@ class reversed(Iterator[_T], Generic[_T]): def __init__(self, __sequence: Reversible[_T]) -> None: ... @overload def __init__(self, __sequence: SupportsLenAndGetItem[_T]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... + def __length_hint__(self) -> int: ... def repr(__obj: object) -> str: ... @overload @@ -1304,84 +1435,139 @@ def round(number: SupportsRound[Any]) -> int: ... def round(number: SupportsRound[Any], ndigits: None) -> int: ... @overload def round(number: SupportsRound[_T], ndigits: SupportsIndex) -> _T: ... + +# See https://github.com/python/typeshed/pull/6292#discussion_r748875189 +# for why arg 3 of `setattr` should be annotated with `Any` and not `object` def setattr(__obj: object, __name: str, __value: Any) -> None: ... @overload -def sorted(__iterable: Iterable[SupportsLessThanT], *, key: None = ..., reverse: bool = ...) -> list[SupportsLessThanT]: ... +def sorted( + __iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ... +) -> list[SupportsRichComparisonT]: ... @overload -def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsLessThan], reverse: bool = ...) -> list[_T]: ... +def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> list[_T]: ... if sys.version_info >= (3, 8): @overload - def sum(__iterable: Iterable[_T]) -> _T | int: ... + def sum(__iterable: Iterable[_T]) -> _T | Literal[0]: ... @overload def sum(__iterable: Iterable[_T], start: _S) -> _T | _S: ... else: @overload - def sum(__iterable: Iterable[_T]) -> _T | int: ... + def sum(__iterable: Iterable[_T]) -> _T | Literal[0]: ... @overload def sum(__iterable: Iterable[_T], __start: _S) -> _T | _S: ... +# The argument to `vars()` has to have a `__dict__` attribute, so can't be annotated with `object` +# (A "SupportsDunderDict" protocol doesn't work) def vars(__object: Any = ...) -> dict[str, Any]: ... class zip(Iterator[_T_co], Generic[_T_co]): - @overload - def __new__(cls, __iter1: Iterable[_T1]) -> zip[Tuple[_T1]]: ... - @overload - def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> zip[Tuple[_T1, _T2]]: ... - @overload - def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> zip[Tuple[_T1, _T2, _T3]]: ... - @overload - def __new__( - cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] - ) -> zip[Tuple[_T1, _T2, _T3, _T4]]: ... - @overload - def __new__( - cls, - __iter1: Iterable[_T1], - __iter2: Iterable[_T2], - __iter3: Iterable[_T3], - __iter4: Iterable[_T4], - __iter5: Iterable[_T5], - ) -> zip[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... - @overload - def __new__( - cls, - __iter1: Iterable[Any], - __iter2: Iterable[Any], - __iter3: Iterable[Any], - __iter4: Iterable[Any], - __iter5: Iterable[Any], - __iter6: Iterable[Any], - *iterables: Iterable[Any], - ) -> zip[Tuple[Any, ...]]: ... - def __iter__(self) -> Iterator[_T_co]: ... + if sys.version_info >= (3, 10): + @overload + def __new__(cls, __iter1: Iterable[_T1], *, strict: bool = ...) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], *, strict: bool = ...) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], *, strict: bool = ... + ) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + strict: bool = ..., + ) -> zip[tuple[Any, ...]]: ... + else: + @overload + def __new__(cls, __iter1: Iterable[_T1]) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + ) -> zip[tuple[Any, ...]]: ... + + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +# Return type of `__import__` should be kept the same as return type of `importlib.import_module` def __import__( name: str, - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., + globals: Mapping[str, object] | None = ..., + locals: Mapping[str, object] | None = ..., fromlist: Sequence[str] = ..., level: int = ..., -) -> Any: ... +) -> types.ModuleType: ... +def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... # Actually the type of Ellipsis is , but since it's # not exposed anywhere under that name, we make it private here. +@final class ellipsis: ... Ellipsis: ellipsis -class BaseException(object): - args: Tuple[Any, ...] +class BaseException: + args: tuple[Any, ...] __cause__: BaseException | None __context__: BaseException | None __suppress_context__: bool __traceback__: TracebackType | None + if sys.version_info >= (3, 11): + __note__: str | None def __init__(self, *args: object) -> None: ... - def __str__(self) -> str: ... - def __repr__(self) -> str: ... - def with_traceback(self: _TBE, tb: TracebackType | None) -> _TBE: ... + def with_traceback(self: Self, __tb: TracebackType | None) -> Self: ... class GeneratorExit(BaseException): ... class KeyboardInterrupt(BaseException): ... @@ -1500,7 +1686,14 @@ class UnicodeEncodeError(UnicodeError): reason: str def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ... -class UnicodeTranslateError(UnicodeError): ... +class UnicodeTranslateError(UnicodeError): + encoding: None + object: str + start: int + end: int + reason: str + def __init__(self, __object: str, __start: int, __end: int, __reason: str) -> None: ... + class Warning(Exception): ... class UserWarning(Warning): ... class DeprecationWarning(Warning): ... @@ -1515,3 +1708,21 @@ class ResourceWarning(Warning): ... if sys.version_info >= (3, 10): class EncodingWarning(Warning): ... + +if sys.version_info >= (3, 11): + _SplitCondition = type[BaseException] | tuple[type[BaseException], ...] | Callable[[BaseException], bool] + + class BaseExceptionGroup(BaseException): + def __new__(cls: type[Self], __message: str, __exceptions: Sequence[BaseException]) -> Self: ... + @property + def message(self) -> str: ... + @property + def exceptions(self) -> tuple[BaseException, ...]: ... + def subgroup(self: Self, __condition: _SplitCondition) -> Self | None: ... + def split(self: Self, __condition: _SplitCondition) -> tuple[Self | None, Self | None]: ... + def derive(self: Self, __excs: Sequence[BaseException]) -> Self: ... + + class ExceptionGroup(BaseExceptionGroup, Exception): + def __new__(cls: type[Self], __message: str, __exceptions: Sequence[Exception]) -> Self: ... + @property + def exceptions(self) -> tuple[Exception, ...]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/bz2.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/bz2.pyi index e7b57ec54db0..f1467acadd10 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/bz2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/bz2.pyi @@ -2,8 +2,10 @@ import _compression import sys from _compression import BaseStream from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer -from typing import IO, Any, Iterable, Protocol, TextIO, TypeVar, overload -from typing_extensions import Literal, SupportsIndex +from typing import IO, Any, Iterable, Protocol, TextIO, overload +from typing_extensions import Literal, SupportsIndex, final + +__all__ = ["BZ2File", "BZ2Compressor", "BZ2Decompressor", "open", "compress", "decompress"] # The following attributes and methods are optional: # def fileno(self) -> int: ... @@ -16,8 +18,6 @@ class _WritableFileobj(Protocol): # def fileno(self) -> int: ... # def close(self) -> object: ... -_T = TypeVar("_T") - def compress(data: bytes, compresslevel: int = ...) -> bytes: ... def decompress(data: bytes) -> bytes: ... @@ -80,6 +80,15 @@ def open( errors: str | None = ..., newline: str | None = ..., ) -> TextIO: ... +@overload +def open( + filename: StrOrBytesPath | _ReadableFileobj | _WritableFileobj, + mode: str, + compresslevel: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., +) -> BZ2File | TextIO: ... class BZ2File(BaseStream, IO[bytes]): def __enter__(self: Self) -> Self: ... @@ -109,21 +118,24 @@ class BZ2File(BaseStream, IO[bytes]): buffering: Any | None = ..., compresslevel: int = ..., ) -> None: ... + def read(self, size: int | None = ...) -> bytes: ... def read1(self, size: int = ...) -> bytes: ... - def readline(self, size: SupportsIndex = ...) -> bytes: ... # type: ignore + def readline(self, size: SupportsIndex = ...) -> bytes: ... # type: ignore[override] def readinto(self, b: WriteableBuffer) -> int: ... def readlines(self, size: SupportsIndex = ...) -> list[bytes]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def write(self, data: ReadableBuffer) -> int: ... def writelines(self, seq: Iterable[ReadableBuffer]) -> None: ... -class BZ2Compressor(object): +@final +class BZ2Compressor: def __init__(self, compresslevel: int = ...) -> None: ... def compress(self, __data: bytes) -> bytes: ... def flush(self) -> bytes: ... -class BZ2Decompressor(object): +@final +class BZ2Decompressor: def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... @property def eof(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/cProfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/cProfile.pyi index f4a7ab50cc11..edaa67109952 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/cProfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/cProfile.pyi @@ -1,7 +1,10 @@ import sys from _typeshed import Self, StrOrBytesPath from types import CodeType -from typing import Any, Callable, Tuple, TypeVar +from typing import Any, Callable, TypeVar +from typing_extensions import ParamSpec + +__all__ = ["run", "runctx", "Profile"] def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... def runctx( @@ -9,7 +12,8 @@ def runctx( ) -> None: ... _T = TypeVar("_T") -_Label = Tuple[str, int, str] +_P = ParamSpec("_P") +_Label = tuple[str, int, str] class Profile: stats: dict[_Label, tuple[int, int, int, int, dict[_Label, tuple[int, int, int, int]]]] # undocumented @@ -24,7 +28,7 @@ class Profile: def snapshot_stats(self) -> None: ... def run(self: Self, cmd: str) -> Self: ... def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... - def runcall(self, __func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... if sys.version_info >= (3, 8): def __enter__(self: Self) -> Self: ... def __exit__(self, *exc_info: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi index 9d701a788a80..17b59b9bc2c9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/calendar.pyi @@ -1,22 +1,48 @@ import datetime import sys from time import struct_time -from typing import Any, Iterable, Optional, Sequence, Tuple - -_LocaleType = Tuple[Optional[str], Optional[str]] +from typing import Any, Iterable, Optional, Sequence +from typing_extensions import Literal + +__all__ = [ + "IllegalMonthError", + "IllegalWeekdayError", + "setfirstweekday", + "firstweekday", + "isleap", + "leapdays", + "weekday", + "monthrange", + "monthcalendar", + "prmonth", + "month", + "prcal", + "calendar", + "timegm", + "month_name", + "month_abbr", + "day_name", + "day_abbr", + "Calendar", + "TextCalendar", + "HTMLCalendar", + "LocaleTextCalendar", + "LocaleHTMLCalendar", + "weekheader", +] + +_LocaleType = tuple[Optional[str], Optional[str]] class IllegalMonthError(ValueError): def __init__(self, month: int) -> None: ... - def __str__(self) -> str: ... class IllegalWeekdayError(ValueError): def __init__(self, weekday: int) -> None: ... - def __str__(self) -> str: ... def isleap(year: int) -> bool: ... def leapdays(y1: int, y2: int) -> int: ... def weekday(year: int, month: int, day: int) -> int: ... -def monthrange(year: int, month: int) -> Tuple[int, int]: ... +def monthrange(year: int, month: int) -> tuple[int, int]: ... class Calendar: firstweekday: int @@ -25,17 +51,17 @@ class Calendar: def setfirstweekday(self, firstweekday: int) -> None: ... def iterweekdays(self) -> Iterable[int]: ... def itermonthdates(self, year: int, month: int) -> Iterable[datetime.date]: ... - def itermonthdays2(self, year: int, month: int) -> Iterable[Tuple[int, int]]: ... + def itermonthdays2(self, year: int, month: int) -> Iterable[tuple[int, int]]: ... def itermonthdays(self, year: int, month: int) -> Iterable[int]: ... def monthdatescalendar(self, year: int, month: int) -> list[list[datetime.date]]: ... - def monthdays2calendar(self, year: int, month: int) -> list[list[Tuple[int, int]]]: ... + def monthdays2calendar(self, year: int, month: int) -> list[list[tuple[int, int]]]: ... def monthdayscalendar(self, year: int, month: int) -> list[list[int]]: ... def yeardatescalendar(self, year: int, width: int = ...) -> list[list[int]]: ... - def yeardays2calendar(self, year: int, width: int = ...) -> list[list[Tuple[int, int]]]: ... + def yeardays2calendar(self, year: int, width: int = ...) -> list[list[tuple[int, int]]]: ... def yeardayscalendar(self, year: int, width: int = ...) -> list[list[int]]: ... if sys.version_info >= (3, 7): - def itermonthdays3(self, year: int, month: int) -> Iterable[Tuple[int, int, int]]: ... - def itermonthdays4(self, year: int, month: int) -> Iterable[Tuple[int, int, int, int]]: ... + def itermonthdays3(self, year: int, month: int) -> Iterable[tuple[int, int, int]]: ... + def itermonthdays4(self, year: int, month: int) -> Iterable[tuple[int, int, int, int]]: ... class TextCalendar(Calendar): def prweek(self, theweek: int, width: int) -> None: ... @@ -79,7 +105,7 @@ class HTMLCalendar(Calendar): class different_locale: def __init__(self, locale: _LocaleType) -> None: ... - def __enter__(self) -> _LocaleType: ... + def __enter__(self) -> None: ... def __exit__(self, *args: Any) -> None: ... class LocaleTextCalendar(TextCalendar): @@ -97,7 +123,7 @@ c: TextCalendar def setfirstweekday(firstweekday: int) -> None: ... def format(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... def formatstring(cols: int, colwidth: int = ..., spacing: int = ...) -> str: ... -def timegm(tuple: Tuple[int, ...] | struct_time) -> int: ... +def timegm(tuple: tuple[int, ...] | struct_time) -> int: ... # Data attributes day_name: Sequence[str] @@ -105,13 +131,12 @@ day_abbr: Sequence[str] month_name: Sequence[str] month_abbr: Sequence[str] -# Below constants are not in docs or __all__, but enough people have used them -# they are now effectively public. +MONDAY: Literal[0] +TUESDAY: Literal[1] +WEDNESDAY: Literal[2] +THURSDAY: Literal[3] +FRIDAY: Literal[4] +SATURDAY: Literal[5] +SUNDAY: Literal[6] -MONDAY: int -TUESDAY: int -WEDNESDAY: int -THURSDAY: int -FRIDAY: int -SATURDAY: int -SUNDAY: int +EPOCH: Literal[1970] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/cgi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/cgi.pyi index 3821de46ed75..1c11beec0c35 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/cgi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/cgi.pyi @@ -1,10 +1,43 @@ import sys -from _typeshed import SupportsGetItem, SupportsItemAccess +from _typeshed import Self, SupportsGetItem, SupportsItemAccess from builtins import type as _type from collections.abc import Iterable, Iterator, Mapping -from typing import IO, Any, Protocol, TypeVar +from types import TracebackType +from typing import IO, Any, Protocol -_T = TypeVar("_T", bound=FieldStorage) +if sys.version_info >= (3, 8): + __all__ = [ + "MiniFieldStorage", + "FieldStorage", + "parse", + "parse_multipart", + "parse_header", + "test", + "print_exception", + "print_environ", + "print_form", + "print_directory", + "print_arguments", + "print_environ_usage", + ] +else: + __all__ = [ + "MiniFieldStorage", + "FieldStorage", + "parse", + "parse_qs", + "parse_qsl", + "parse_multipart", + "parse_header", + "test", + "print_exception", + "print_environ", + "print_form", + "print_directory", + "print_arguments", + "print_environ_usage", + "escape", + ] def parse( fp: IO[Any] | None = ..., @@ -53,11 +86,10 @@ class MiniFieldStorage: name: Any value: Any def __init__(self, name: Any, value: Any) -> None: ... - def __repr__(self) -> str: ... _list = list -class FieldStorage(object): +class FieldStorage: FieldStorageClass: _type | None keep_blank_values: int strict_parsing: int @@ -94,9 +126,8 @@ class FieldStorage(object): max_num_fields: int | None = ..., separator: str = ..., ) -> None: ... - def __enter__(self: _T) -> _T: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... - def __repr__(self) -> str: ... def __iter__(self) -> Iterator[str]: ... def __getitem__(self, key: str) -> Any: ... def getvalue(self, key: str, default: Any = ...) -> Any: ... @@ -108,3 +139,11 @@ class FieldStorage(object): def __bool__(self) -> bool: ... # In Python 3 it returns bytes or str IO depending on an internal flag def make_file(self) -> IO[Any]: ... + +def print_exception( + type: type[BaseException] | None = ..., + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = ..., +) -> None: ... +def print_arguments() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/cgitb.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/cgitb.pyi index 90226dc134e8..fb9d69161d5b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/cgitb.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/cgitb.pyi @@ -1,17 +1,19 @@ from _typeshed import StrOrBytesPath from types import FrameType, TracebackType -from typing import IO, Any, Callable, Optional, Tuple, Type +from typing import IO, Any, Callable, Optional -_ExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] +_ExcInfo = tuple[Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType]] + +__UNDEF__: object # undocumented sentinel def reset() -> str: ... # undocumented def small(text: str) -> str: ... # undocumented def strong(text: str) -> str: ... # undocumented def grey(text: str) -> str: ... # undocumented -def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> Tuple[str | None, Any]: ... # undocumented +def lookup(name: str, frame: FrameType, locals: dict[str, Any]) -> tuple[str | None, Any]: ... # undocumented def scanvars( reader: Callable[[], bytes], frame: FrameType, locals: dict[str, Any] -) -> list[Tuple[str, str | None, Any]]: ... # undocumented +) -> list[tuple[str, str | None, Any]]: ... # undocumented def html(einfo: _ExcInfo, context: int = ...) -> str: ... def text(einfo: _ExcInfo, context: int = ...) -> str: ... @@ -24,7 +26,7 @@ class Hook: # undocumented file: IO[str] | None = ..., format: str = ..., ) -> None: ... - def __call__(self, etype: Type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... + def __call__(self, etype: type[BaseException] | None, evalue: BaseException | None, etb: TracebackType | None) -> None: ... def handle(self, info: _ExcInfo | None = ...) -> None: ... def handler(info: _ExcInfo | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/cmath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/cmath.pyi index 8d67e6c47bb4..04c2b632d411 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/cmath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/cmath.pyi @@ -1,4 +1,8 @@ -from typing import SupportsComplex, SupportsFloat, Tuple, Union +import sys +from typing import SupportsComplex, SupportsFloat, Union + +if sys.version_info >= (3, 8): + from typing import SupportsIndex e: float pi: float @@ -8,7 +12,10 @@ nan: float nanj: complex tau: float -_C = Union[SupportsFloat, SupportsComplex, complex] +if sys.version_info >= (3, 8): + _C = Union[SupportsFloat, SupportsComplex, SupportsIndex, complex] +else: + _C = Union[SupportsFloat, SupportsComplex, complex] def acos(__z: _C) -> complex: ... def acosh(__z: _C) -> complex: ... @@ -25,7 +32,7 @@ def isnan(__z: _C) -> bool: ... def log(__x: _C, __y_obj: _C = ...) -> complex: ... def log10(__z: _C) -> complex: ... def phase(__z: _C) -> float: ... -def polar(__z: _C) -> Tuple[float, float]: ... +def polar(__z: _C) -> tuple[float, float]: ... def rect(__r: float, __phi: float) -> complex: ... def sin(__z: _C) -> complex: ... def sinh(__z: _C) -> complex: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/cmd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/cmd.pyi index 9f2593d3bfdf..d1166db0f507 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/cmd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/cmd.pyi @@ -1,4 +1,10 @@ -from typing import IO, Any, Callable, Tuple +from typing import IO, Any, Callable +from typing_extensions import Literal + +__all__ = ["Cmd"] + +PROMPT: Literal["(Cmd) "] +IDENTCHARS: str # Too big to be `Literal` class Cmd: prompt: str @@ -23,10 +29,10 @@ class Cmd: def postcmd(self, stop: bool, line: str) -> bool: ... def preloop(self) -> None: ... def postloop(self) -> None: ... - def parseline(self, line: str) -> Tuple[str | None, str | None, str]: ... + def parseline(self, line: str) -> tuple[str | None, str | None, str]: ... def onecmd(self, line: str) -> bool: ... def emptyline(self) -> bool: ... - def default(self, line: str) -> bool: ... + def default(self, line: str) -> None: ... def completedefault(self, *ignored: Any) -> list[str]: ... def completenames(self, text: str, *ignored: Any) -> list[str]: ... completion_matches: list[str] | None diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/code.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/code.pyi index ed00eaf96a5c..185c15853b82 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/code.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/code.pyi @@ -2,6 +2,8 @@ from codeop import CommandCompiler from types import CodeType from typing import Any, Callable, Mapping +__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact", "compile_command"] + class InteractiveInterpreter: locals: Mapping[str, Any] # undocumented compile: CommandCompiler # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi index 9f27cfebf099..c70c2b9acaec 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/codecs.pyi @@ -2,9 +2,61 @@ import sys import types from _typeshed import Self from abc import abstractmethod -from typing import IO, Any, BinaryIO, Callable, Generator, Iterable, Iterator, Protocol, TextIO, Tuple, Type, TypeVar, overload +from typing import IO, Any, BinaryIO, Callable, Generator, Iterable, Iterator, Protocol, TextIO, overload from typing_extensions import Literal +__all__ = [ + "register", + "lookup", + "open", + "EncodedFile", + "BOM", + "BOM_BE", + "BOM_LE", + "BOM32_BE", + "BOM32_LE", + "BOM64_BE", + "BOM64_LE", + "BOM_UTF8", + "BOM_UTF16", + "BOM_UTF16_LE", + "BOM_UTF16_BE", + "BOM_UTF32", + "BOM_UTF32_LE", + "BOM_UTF32_BE", + "CodecInfo", + "Codec", + "IncrementalEncoder", + "IncrementalDecoder", + "StreamReader", + "StreamWriter", + "StreamReaderWriter", + "StreamRecoder", + "getencoder", + "getdecoder", + "getincrementalencoder", + "getincrementaldecoder", + "getreader", + "getwriter", + "encode", + "decode", + "iterencode", + "iterdecode", + "strict_errors", + "ignore_errors", + "replace_errors", + "xmlcharrefreplace_errors", + "backslashreplace_errors", + "namereplace_errors", + "register_error", + "lookup_error", +] + +BOM32_BE: Literal[b"\xfe\xff"] +BOM32_LE: Literal[b"\xff\xfe"] +BOM64_BE: Literal[b"\x00\x00\xfe\xff"] +BOM64_LE: Literal[b"\xff\xfe\x00\x00"] + # TODO: this only satisfies the most common interface, where # bytes is the raw form and str is the cooked form. # In the long run, both should become template parameters maybe? @@ -12,10 +64,10 @@ from typing_extensions import Literal # They are much more common in Python 2 than in Python 3. class _Encoder(Protocol): - def __call__(self, input: str, errors: str = ...) -> Tuple[bytes, int]: ... # signature of Codec().encode + def __call__(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... # signature of Codec().encode class _Decoder(Protocol): - def __call__(self, input: bytes, errors: str = ...) -> Tuple[str, int]: ... # signature of Codec().decode + def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode class _StreamReader(Protocol): def __call__(self, stream: IO[bytes], errors: str = ...) -> StreamReader: ... @@ -30,8 +82,8 @@ class _IncrementalDecoder(Protocol): def __call__(self, errors: str = ...) -> IncrementalDecoder: ... # The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 -# mypy and pytype disagree about where the type ignore can and cannot go, so alias the long type -_BytesToBytesEncodingT = Literal[ +# https://docs.python.org/3/library/codecs.html#binary-transforms +_BytesToBytesEncoding = Literal[ "base64", "base_64", "base64_codec", @@ -49,24 +101,30 @@ _BytesToBytesEncodingT = Literal[ "zlib", "zlib_codec", ] +# https://docs.python.org/3/library/codecs.html#text-transforms +_StrToStrEncoding = Literal["rot13", "rot_13"] @overload -def encode(obj: bytes, encoding: _BytesToBytesEncodingT, errors: str = ...) -> bytes: ... +def encode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... @overload -def encode(obj: str, encoding: Literal["rot13", "rot_13"] = ..., errors: str = ...) -> str: ... # type: ignore +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] @overload def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... @overload -def decode(obj: bytes, encoding: _BytesToBytesEncodingT, errors: str = ...) -> bytes: ... # type: ignore +def decode(obj: bytes, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +@overload +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... + +# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str @overload -def decode(obj: str, encoding: Literal["rot13", "rot_13"] = ..., errors: str = ...) -> str: ... +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... @overload def decode(obj: bytes, encoding: str = ..., errors: str = ...) -> str: ... def lookup(__encoding: str) -> CodecInfo: ... -def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> Tuple[str, int]: ... # undocumented -def utf_16_be_encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... # undocumented +def utf_16_be_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... # undocumented +def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... # undocumented -class CodecInfo(Tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): +class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): @property def encode(self) -> _Encoder: ... @property @@ -81,7 +139,7 @@ class CodecInfo(Tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): def incrementaldecoder(self) -> _IncrementalDecoder: ... name: str def __new__( - cls, + cls: type[Self], encode: _Encoder, decode: _Decoder, streamreader: _StreamReader | None = ..., @@ -91,7 +149,7 @@ class CodecInfo(Tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): name: str | None = ..., *, _is_text_encoding: bool | None = ..., - ) -> CodecInfo: ... + ) -> Self: ... def getencoder(encoding: str) -> _Encoder: ... def getdecoder(encoding: str) -> _Decoder: ... @@ -110,33 +168,34 @@ def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = ...) -> G if sys.version_info >= (3, 10): def unregister(__search_function: Callable[[str], CodecInfo | None]) -> None: ... -BOM: bytes -BOM_BE: bytes -BOM_LE: bytes -BOM_UTF8: bytes -BOM_UTF16: bytes -BOM_UTF16_BE: bytes -BOM_UTF16_LE: bytes -BOM_UTF32: bytes -BOM_UTF32_BE: bytes -BOM_UTF32_LE: bytes +BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_BE: Literal[b"\xfe\xff"] +BOM_LE: Literal[b"\xff\xfe"] +BOM_UTF8: Literal[b"\xef\xbb\xbf"] +BOM_UTF16: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF16_BE: Literal[b"\xfe\xff"] +BOM_UTF16_LE: Literal[b"\xff\xfe"] +BOM_UTF32: Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF32_BE: Literal[b"\x00\x00\xfe\xff"] +BOM_UTF32_LE: Literal[b"\xff\xfe\x00\x00"] # It is expected that different actions be taken depending on which of the # three subclasses of `UnicodeError` is actually ...ed. However, the Union # is still needed for at least one of the cases. -def register_error(__errors: str, __handler: Callable[[UnicodeError], Tuple[str | bytes, int]]) -> None: ... -def lookup_error(__name: str) -> Callable[[UnicodeError], Tuple[str | bytes, int]]: ... -def strict_errors(exception: UnicodeError) -> Tuple[str | bytes, int]: ... -def replace_errors(exception: UnicodeError) -> Tuple[str | bytes, int]: ... -def ignore_errors(exception: UnicodeError) -> Tuple[str | bytes, int]: ... -def xmlcharrefreplace_errors(exception: UnicodeError) -> Tuple[str | bytes, int]: ... -def backslashreplace_errors(exception: UnicodeError) -> Tuple[str | bytes, int]: ... +def register_error(__errors: str, __handler: Callable[[UnicodeError], tuple[str | bytes, int]]) -> None: ... +def lookup_error(__name: str) -> Callable[[UnicodeError], tuple[str | bytes, int]]: ... +def strict_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def replace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def ignore_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def xmlcharrefreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def backslashreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... class Codec: # These are sort of @abstractmethod but sort of not. # The StreamReader and StreamWriter subclasses only implement one. - def encode(self, input: str, errors: str = ...) -> Tuple[bytes, int]: ... - def decode(self, input: bytes, errors: str = ...) -> Tuple[str, int]: ... + def encode(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... class IncrementalEncoder: errors: str @@ -154,8 +213,8 @@ class IncrementalDecoder: @abstractmethod def decode(self, input: bytes, final: bool = ...) -> str: ... def reset(self) -> None: ... - def getstate(self) -> Tuple[bytes, int]: ... - def setstate(self, state: Tuple[bytes, int]) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... # These are not documented but used in encodings/*.py implementations. class BufferedIncrementalEncoder(IncrementalEncoder): @@ -169,7 +228,7 @@ class BufferedIncrementalDecoder(IncrementalDecoder): buffer: bytes def __init__(self, errors: str = ...) -> None: ... @abstractmethod - def _buffer_decode(self, input: bytes, errors: str, final: bool) -> Tuple[str, int]: ... + def _buffer_decode(self, input: bytes, errors: str, final: bool) -> tuple[str, int]: ... def decode(self, input: bytes, final: bool = ...) -> str: ... # TODO: it is not possible to specify the requirement that all other @@ -181,10 +240,10 @@ class StreamWriter(Codec): def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, typ: Type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... -class StreamReader(Codec): +class StreamReader(Codec, Iterator[str]): errors: str def __init__(self, stream: IO[bytes], errors: str = ...) -> None: ... def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> str: ... @@ -192,12 +251,11 @@ class StreamReader(Codec): def readlines(self, sizehint: int | None = ..., keepends: bool = ...) -> list[str]: ... def reset(self) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, typ: Type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... - def __iter__(self) -> Iterator[str]: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> str: ... def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... -_T = TypeVar("_T", bound=StreamReaderWriter) - # Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing # and delegates attributes to the underlying binary stream with __getattr__. class StreamReaderWriter(TextIO): @@ -206,15 +264,13 @@ class StreamReaderWriter(TextIO): def readline(self, size: int | None = ...) -> str: ... def readlines(self, sizehint: int | None = ...) -> list[str]: ... def __next__(self) -> str: ... - def __iter__(self: _T) -> _T: ... - # This actually returns None, but that's incompatible with the supertype - def write(self, data: str) -> int: ... + def __iter__(self: Self) -> Self: ... + def write(self, data: str) -> None: ... # type: ignore[override] def writelines(self, list: Iterable[str]) -> None: ... def reset(self) -> None: ... - # Same as write() - def seek(self, offset: int, whence: int = ...) -> int: ... + def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] def __enter__(self: Self) -> Self: ... - def __exit__(self, typ: Type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... def __getattr__(self, name: str) -> Any: ... # These methods don't actually exist directly, but they are needed to satisfy the TextIO # interface. At runtime, they are delegated through __getattr__. @@ -228,8 +284,6 @@ class StreamReaderWriter(TextIO): def tell(self) -> int: ... def writable(self) -> bool: ... -_SRT = TypeVar("_SRT", bound=StreamRecoder) - class StreamRecoder(BinaryIO): def __init__( self, @@ -244,16 +298,16 @@ class StreamRecoder(BinaryIO): def readline(self, size: int | None = ...) -> bytes: ... def readlines(self, sizehint: int | None = ...) -> list[bytes]: ... def __next__(self) -> bytes: ... - def __iter__(self: _SRT) -> _SRT: ... - def write(self, data: bytes) -> int: ... - def writelines(self, list: Iterable[bytes]) -> int: ... # type: ignore # it's supposed to return None + def __iter__(self: Self) -> Self: ... + def write(self, data: bytes) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[bytes]) -> None: ... def reset(self) -> None: ... def __getattr__(self, name: str) -> Any: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, type: Type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO # interface. At runtime, they are delegated through __getattr__. - def seek(self, offset: int, whence: int = ...) -> int: ... def close(self) -> None: ... def fileno(self) -> int: ... def flush(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/codeop.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/codeop.pyi index 8ed5710c9891..1c00e13fd501 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/codeop.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/codeop.pyi @@ -1,5 +1,7 @@ from types import CodeType +__all__ = ["compile_command", "Compile", "CommandCompiler"] + def compile_command(source: str, filename: str = ..., symbol: str = ...) -> CodeType | None: ... class Compile: diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/collections/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/collections/__init__.pyi index 2cfd187eb35a..c5fe236db63b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/collections/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/collections/__init__.pyi @@ -1,15 +1,23 @@ import sys -from _typeshed import Self -from builtins import _dict_items, _dict_keys, _dict_values -from typing import Any, Dict, Generic, NoReturn, Tuple, Type, TypeVar, overload +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from typing import Any, Generic, NoReturn, TypeVar, overload +from typing_extensions import SupportsIndex, final + +if sys.version_info >= (3, 9): + from types import GenericAlias if sys.version_info >= (3, 10): from typing import Callable, Iterable, Iterator, Mapping, MutableMapping, MutableSequence, Reversible, Sequence else: from _collections_abc import * +__all__ = ["ChainMap", "Counter", "OrderedDict", "UserDict", "UserList", "UserString", "defaultdict", "deque", "namedtuple"] + _S = TypeVar("_S") _T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") _KT = TypeVar("_KT") _VT = TypeVar("_VT") _KT_co = TypeVar("_KT_co", covariant=True) @@ -24,92 +32,135 @@ if sys.version_info >= (3, 7): rename: bool = ..., module: str | None = ..., defaults: Iterable[Any] | None = ..., - ) -> Type[Tuple[Any, ...]]: ... + ) -> type[tuple[Any, ...]]: ... else: def namedtuple( typename: str, field_names: str | Iterable[str], *, verbose: bool = ..., rename: bool = ..., module: str | None = ... - ) -> Type[Tuple[Any, ...]]: ... + ) -> type[tuple[Any, ...]]: ... -class UserDict(MutableMapping[_KT, _VT]): +class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): data: dict[_KT, _VT] - def __init__(self, __dict: Mapping[_KT, _VT] | None = ..., **kwargs: _VT) -> None: ... + # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics + @overload + def __init__(self: UserDict[_KT, _VT], __dict: None = ...) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __dict: None = ..., **kwargs: _VT) -> None: ... + @overload + def __init__(self, __dict: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + @overload + def __init__(self: UserDict[str, str], __iterable: Iterable[list[str]]) -> None: ... def __len__(self) -> int: ... def __getitem__(self, key: _KT) -> _VT: ... def __setitem__(self, key: _KT, item: _VT) -> None: ... def __delitem__(self, key: _KT) -> None: ... def __iter__(self) -> Iterator[_KT]: ... def __contains__(self, key: object) -> bool: ... - def copy(self: _S) -> _S: ... + def copy(self: Self) -> Self: ... + if sys.version_info >= (3, 7): + def __copy__(self: Self) -> Self: ... + + # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. + # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> UserDict[_T, Any | None]: ... @classmethod - def fromkeys(cls: Type[_S], iterable: Iterable[_KT], value: _VT | None = ...) -> _S: ... + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... class UserList(MutableSequence[_T]): data: list[_T] def __init__(self, initlist: Iterable[_T] | None = ...) -> None: ... - def __lt__(self, other: object) -> bool: ... - def __le__(self, other: object) -> bool: ... - def __gt__(self, other: object) -> bool: ... - def __ge__(self, other: object) -> bool: ... + def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __le__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... def __contains__(self, item: object) -> bool: ... def __len__(self) -> int: ... @overload - def __getitem__(self, i: int) -> _T: ... + def __getitem__(self, i: SupportsIndex) -> _T: ... @overload def __getitem__(self: Self, i: slice) -> Self: ... @overload - def __setitem__(self, i: int, o: _T) -> None: ... + def __setitem__(self, i: SupportsIndex, o: _T) -> None: ... @overload def __setitem__(self, i: slice, o: Iterable[_T]) -> None: ... - def __delitem__(self, i: int | slice) -> None: ... - def __add__(self: _S, other: Iterable[_T]) -> _S: ... - def __iadd__(self: _S, other: Iterable[_T]) -> _S: ... - def __mul__(self: _S, n: int) -> _S: ... - def __imul__(self: _S, n: int) -> _S: ... + def __delitem__(self, i: SupportsIndex | slice) -> None: ... + def __add__(self: Self, other: Iterable[_T]) -> Self: ... + def __radd__(self: Self, other: Iterable[_T]) -> Self: ... + def __iadd__(self: Self, other: Iterable[_T]) -> Self: ... + def __mul__(self: Self, n: int) -> Self: ... + def __rmul__(self: Self, n: int) -> Self: ... + def __imul__(self: Self, n: int) -> Self: ... def append(self, item: _T) -> None: ... def insert(self, i: int, item: _T) -> None: ... def pop(self, i: int = ...) -> _T: ... def remove(self, item: _T) -> None: ... - def clear(self) -> None: ... - def copy(self: _S) -> _S: ... + def copy(self: Self) -> Self: ... + if sys.version_info >= (3, 7): + def __copy__(self: Self) -> Self: ... + def count(self, item: _T) -> int: ... - def index(self, item: _T, *args: Any) -> int: ... - def reverse(self) -> None: ... - def sort(self, *args: Any, **kwds: Any) -> None: ... + # All arguments are passed to `list.index` at runtime, so the signature should be kept in line with `list.index`. + def index(self, item: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. + @overload + def sort(self: UserList[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... def extend(self, other: Iterable[_T]) -> None: ... -_UserStringT = TypeVar("_UserStringT", bound=UserString) - -class UserString(Sequence[str]): +class UserString(Sequence[UserString]): data: str def __init__(self, seq: object) -> None: ... def __int__(self) -> int: ... def __float__(self) -> float: ... def __complex__(self) -> complex: ... - def __getnewargs__(self) -> Tuple[str]: ... + def __getnewargs__(self) -> tuple[str]: ... def __lt__(self, string: str | UserString) -> bool: ... def __le__(self, string: str | UserString) -> bool: ... def __gt__(self, string: str | UserString) -> bool: ... def __ge__(self, string: str | UserString) -> bool: ... + def __eq__(self, string: object) -> bool: ... def __contains__(self, char: object) -> bool: ... def __len__(self) -> int: ... - # It should return a str to implement Sequence correctly, but it doesn't. - def __getitem__(self: _UserStringT, i: int | slice) -> _UserStringT: ... # type: ignore - def __iter__(self: _UserStringT) -> Iterator[_UserStringT]: ... # type: ignore - def __reversed__(self: _UserStringT) -> Iterator[_UserStringT]: ... # type: ignore - def __add__(self: _UserStringT, other: object) -> _UserStringT: ... - def __mul__(self: _UserStringT, n: int) -> _UserStringT: ... - def __mod__(self: _UserStringT, args: Any) -> _UserStringT: ... - def capitalize(self: _UserStringT) -> _UserStringT: ... - def casefold(self: _UserStringT) -> _UserStringT: ... - def center(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... + def __getitem__(self: Self, i: SupportsIndex | slice) -> Self: ... + def __iter__(self: Self) -> Iterator[Self]: ... + def __reversed__(self: Self) -> Iterator[Self]: ... + def __add__(self: Self, other: object) -> Self: ... + def __radd__(self: Self, other: object) -> Self: ... + def __mul__(self: Self, n: int) -> Self: ... + def __rmul__(self: Self, n: int) -> Self: ... + def __mod__(self: Self, args: Any) -> Self: ... + if sys.version_info >= (3, 8): + def __rmod__(self: Self, template: object) -> Self: ... + else: + def __rmod__(self: Self, format: Any) -> Self: ... + + def capitalize(self: Self) -> Self: ... + def casefold(self: Self) -> Self: ... + def center(self: Self, width: int, *args: Any) -> Self: ... def count(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... if sys.version_info >= (3, 8): def encode(self: UserString, encoding: str | None = ..., errors: str | None = ...) -> bytes: ... else: - def encode(self: _UserStringT, encoding: str | None = ..., errors: str | None = ...) -> _UserStringT: ... - def endswith(self, suffix: str | Tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def expandtabs(self: _UserStringT, tabsize: int = ...) -> _UserStringT: ... + def encode(self: Self, encoding: str | None = ..., errors: str | None = ...) -> Self: ... + + def endswith(self, suffix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... + def expandtabs(self: Self, tabsize: int = ...) -> Self: ... def find(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... def format(self, *args: Any, **kwds: Any) -> str: ... def format_map(self, mapping: Mapping[str, Any]) -> str: ... @@ -125,91 +176,90 @@ class UserString(Sequence[str]): def isspace(self) -> bool: ... def istitle(self) -> bool: ... def isupper(self) -> bool: ... + if sys.version_info >= (3, 7): + def isascii(self) -> bool: ... + def join(self, seq: Iterable[str]) -> str: ... - def ljust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... - def lower(self: _UserStringT) -> _UserStringT: ... - def lstrip(self: _UserStringT, chars: str | None = ...) -> _UserStringT: ... + def ljust(self: Self, width: int, *args: Any) -> Self: ... + def lower(self: Self) -> Self: ... + def lstrip(self: Self, chars: str | None = ...) -> Self: ... @staticmethod @overload def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... @staticmethod @overload def maketrans(x: str, y: str, z: str = ...) -> dict[int, int | None]: ... - def partition(self, sep: str) -> Tuple[str, str, str]: ... + def partition(self, sep: str) -> tuple[str, str, str]: ... if sys.version_info >= (3, 9): - def removeprefix(self: _UserStringT, __prefix: str | UserString) -> _UserStringT: ... - def removesuffix(self: _UserStringT, __suffix: str | UserString) -> _UserStringT: ... - def replace(self: _UserStringT, old: str | UserString, new: str | UserString, maxsplit: int = ...) -> _UserStringT: ... + def removeprefix(self: Self, __prefix: str | UserString) -> Self: ... + def removesuffix(self: Self, __suffix: str | UserString) -> Self: ... + + def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = ...) -> Self: ... def rfind(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... def rindex(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... - def rjust(self: _UserStringT, width: int, *args: Any) -> _UserStringT: ... - def rpartition(self, sep: str) -> Tuple[str, str, str]: ... - def rstrip(self: _UserStringT, chars: str | None = ...) -> _UserStringT: ... + def rjust(self: Self, width: int, *args: Any) -> Self: ... + def rpartition(self, sep: str) -> tuple[str, str, str]: ... + def rstrip(self: Self, chars: str | None = ...) -> Self: ... def split(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... def rsplit(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... def splitlines(self, keepends: bool = ...) -> list[str]: ... - def startswith(self, prefix: str | Tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... - def strip(self: _UserStringT, chars: str | None = ...) -> _UserStringT: ... - def swapcase(self: _UserStringT) -> _UserStringT: ... - def title(self: _UserStringT) -> _UserStringT: ... - def translate(self: _UserStringT, *args: Any) -> _UserStringT: ... - def upper(self: _UserStringT) -> _UserStringT: ... - def zfill(self: _UserStringT, width: int) -> _UserStringT: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... + def strip(self: Self, chars: str | None = ...) -> Self: ... + def swapcase(self: Self) -> Self: ... + def title(self: Self) -> Self: ... + def translate(self: Self, *args: Any) -> Self: ... + def upper(self: Self) -> Self: ... + def zfill(self: Self, width: int) -> Self: ... class deque(MutableSequence[_T], Generic[_T]): @property def maxlen(self) -> int | None: ... def __init__(self, iterable: Iterable[_T] = ..., maxlen: int | None = ...) -> None: ... - def append(self, x: _T) -> None: ... - def appendleft(self, x: _T) -> None: ... - def clear(self) -> None: ... - def copy(self) -> deque[_T]: ... - def count(self, x: _T) -> int: ... - def extend(self, iterable: Iterable[_T]) -> None: ... - def extendleft(self, iterable: Iterable[_T]) -> None: ... - def insert(self, i: int, x: _T) -> None: ... - def index(self, x: _T, start: int = ..., stop: int = ...) -> int: ... - def pop(self) -> _T: ... # type: ignore + def append(self, __x: _T) -> None: ... + def appendleft(self, __x: _T) -> None: ... + def copy(self: Self) -> Self: ... + def count(self, __x: _T) -> int: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def extendleft(self, __iterable: Iterable[_T]) -> None: ... + def insert(self, __i: int, __x: _T) -> None: ... + def index(self, __x: _T, __start: int = ..., __stop: int = ...) -> int: ... + def pop(self) -> _T: ... # type: ignore[override] def popleft(self) -> _T: ... - def remove(self, value: _T) -> None: ... - def reverse(self) -> None: ... - def rotate(self, n: int = ...) -> None: ... + def remove(self, __value: _T) -> None: ... + def rotate(self, __n: int = ...) -> None: ... + def __copy__(self: Self) -> Self: ... def __len__(self) -> int: ... - def __iter__(self) -> Iterator[_T]: ... - def __str__(self) -> str: ... - # These methods of deque don't really take slices, but we need to - # define them as taking a slice to satisfy MutableSequence. - @overload - def __getitem__(self, index: int) -> _T: ... - @overload - def __getitem__(self, s: slice) -> MutableSequence[_T]: ... - @overload - def __setitem__(self, i: int, x: _T) -> None: ... - @overload - def __setitem__(self, s: slice, o: Iterable[_T]) -> None: ... - @overload - def __delitem__(self, i: int) -> None: ... - @overload - def __delitem__(self, s: slice) -> None: ... - def __contains__(self, o: object) -> bool: ... - def __reversed__(self) -> Iterator[_T]: ... - def __iadd__(self: _S, iterable: Iterable[_T]) -> _S: ... - def __add__(self, other: deque[_T]) -> deque[_T]: ... - def __mul__(self, other: int) -> deque[_T]: ... - def __imul__(self, other: int) -> None: ... + # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores + def __getitem__(self, __index: SupportsIndex) -> _T: ... # type: ignore[override] + def __setitem__(self, __i: SupportsIndex, __x: _T) -> None: ... # type: ignore[override] + def __delitem__(self, __i: SupportsIndex) -> None: ... # type: ignore[override] + def __contains__(self, __o: object) -> bool: ... + def __reduce__(self: Self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self: Self, __iterable: Iterable[_T]) -> Self: ... + def __add__(self: Self, __other: Self) -> Self: ... + def __mul__(self: Self, __other: int) -> Self: ... + def __imul__(self: Self, __other: int) -> Self: ... + def __lt__(self, __other: deque[_T]) -> bool: ... + def __le__(self, __other: deque[_T]) -> bool: ... + def __gt__(self, __other: deque[_T]) -> bool: ... + def __ge__(self, __other: deque[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... -class Counter(Dict[_T, int], Generic[_T]): +class Counter(dict[_T, int], Generic[_T]): @overload - def __init__(self, __iterable: None = ..., **kwargs: int) -> None: ... + def __init__(self: Counter[_T], __iterable: None = ...) -> None: ... @overload - def __init__(self, __mapping: Mapping[_T, int]) -> None: ... + def __init__(self: Counter[str], __iterable: None = ..., **kwargs: int) -> None: ... + @overload + def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... @overload def __init__(self, __iterable: Iterable[_T]) -> None: ... - def copy(self: _S) -> _S: ... + def copy(self: Self) -> Self: ... def elements(self) -> Iterator[_T]: ... - def most_common(self, n: int | None = ...) -> list[Tuple[_T, int]]: ... + def most_common(self, n: int | None = ...) -> list[tuple[_T, int]]: ... @classmethod - def fromkeys(cls, iterable: Any, v: int | None = ...) -> NoReturn: ... # type: ignore + def fromkeys(cls, iterable: Any, v: int | None = ...) -> NoReturn: ... # type: ignore[override] @overload def subtract(self, __iterable: None = ...) -> None: ... @overload @@ -222,66 +272,105 @@ class Counter(Dict[_T, int], Generic[_T]): # Dict.update. Not sure if we should use '# type: ignore' instead # and omit the type from the union. @overload - def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... + def update(self, __m: SupportsKeysAndGetItem[_T, int], **kwargs: int) -> None: ... @overload - def update(self, __m: Iterable[_T] | Iterable[Tuple[_T, int]], **kwargs: int) -> None: ... + def update(self, __m: Iterable[_T] | Iterable[tuple[_T, int]], **kwargs: int) -> None: ... @overload def update(self, __m: None = ..., **kwargs: int) -> None: ... + def __missing__(self, key: _T) -> int: ... + def __delitem__(self, elem: object) -> None: ... + if sys.version_info >= (3, 10): + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __add__(self, other: Counter[_T]) -> Counter[_T]: ... def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... def __and__(self, other: Counter[_T]) -> Counter[_T]: ... - def __or__(self, other: Counter[_T]) -> Counter[_T]: ... # type: ignore + def __or__(self, other: Counter[_T]) -> Counter[_T]: ... # type: ignore[override] def __pos__(self) -> Counter[_T]: ... def __neg__(self) -> Counter[_T]: ... - def __iadd__(self, other: Counter[_T]) -> Counter[_T]: ... - def __isub__(self, other: Counter[_T]) -> Counter[_T]: ... - def __iand__(self, other: Counter[_T]) -> Counter[_T]: ... - def __ior__(self, other: Counter[_T]) -> Counter[_T]: ... # type: ignore + def __iadd__(self: Self, other: Counter[_T]) -> Self: ... + def __isub__(self: Self, other: Counter[_T]) -> Self: ... + def __iand__(self: Self, other: Counter[_T]) -> Self: ... + def __ior__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[override] + if sys.version_info >= (3, 10): + def total(self) -> int: ... + def __le__(self, other: Counter[object]) -> bool: ... + def __lt__(self, other: Counter[object]) -> bool: ... + def __ge__(self, other: Counter[object]) -> bool: ... + def __gt__(self, other: Counter[object]) -> bool: ... -class _OrderedDictKeysView(_dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): +@final +class _OrderedDictKeysView(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] def __reversed__(self) -> Iterator[_KT_co]: ... -class _OrderedDictItemsView(_dict_items[_KT_co, _VT_co], Reversible[Tuple[_KT_co, _VT_co]]): - def __reversed__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... +@final +class _OrderedDictItemsView(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... -# The generics are the wrong way around because of a mypy limitation -# https://github.com/python/mypy/issues/11138 -class _OrderedDictValuesView(_dict_values[_VT_co, _KT_co], Reversible[_VT_co], Generic[_VT_co, _KT_co]): +@final +class _OrderedDictValuesView(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] def __reversed__(self) -> Iterator[_VT_co]: ... -class OrderedDict(Dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): - def popitem(self, last: bool = ...) -> Tuple[_KT, _VT]: ... +class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): + def popitem(self, last: bool = ...) -> tuple[_KT, _VT]: ... def move_to_end(self, key: _KT, last: bool = ...) -> None: ... - def copy(self: _S) -> _S: ... + def copy(self: Self) -> Self: ... def __reversed__(self) -> Iterator[_KT]: ... def keys(self) -> _OrderedDictKeysView[_KT, _VT]: ... def items(self) -> _OrderedDictItemsView[_KT, _VT]: ... - def values(self) -> _OrderedDictValuesView[_VT, _KT]: ... + def values(self) -> _OrderedDictValuesView[_KT, _VT]: ... + # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences. + # Like dict.fromkeys, its true signature is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> OrderedDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... -class defaultdict(Dict[_KT, _VT], Generic[_KT, _VT]): +class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): default_factory: Callable[[], _VT] | None @overload - def __init__(self, **kwargs: _VT) -> None: ... + def __init__(self: defaultdict[_KT, _VT]) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None) -> None: ... + def __init__(self, __default_factory: Callable[[], _VT] | None) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None, **kwargs: _VT) -> None: ... + def __init__(self: defaultdict[str, _VT], __default_factory: Callable[[], _VT] | None, **kwargs: _VT) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None, map: Mapping[_KT, _VT]) -> None: ... + def __init__(self, __default_factory: Callable[[], _VT] | None, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None, map: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __map: SupportsKeysAndGetItem[str, _VT], + **kwargs: _VT, + ) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... + def __init__(self, __default_factory: Callable[[], _VT] | None, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... @overload - def __init__(self, default_factory: Callable[[], _VT] | None, iterable: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... - def __missing__(self, key: _KT) -> _VT: ... - # TODO __reversed__ - def copy(self: _S) -> _S: ... + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __iterable: Iterable[tuple[str, _VT]], + **kwargs: _VT, + ) -> None: ... + def __missing__(self, __key: _KT) -> _VT: ... + def __copy__(self: Self) -> Self: ... + def copy(self: Self) -> Self: ... class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): - maps: list[Mapping[_KT, _VT]] - def __init__(self, *maps: Mapping[_KT, _VT]) -> None: ... - def new_child(self: Self, m: Mapping[_KT, _VT] | None = ...) -> Self: ... + maps: list[MutableMapping[_KT, _VT]] + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... + def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = ...) -> Self: ... @property def parents(self: Self) -> Self: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... @@ -289,4 +378,27 @@ class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): def __getitem__(self, k: _KT) -> _VT: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... + def __contains__(self, key: object) -> bool: ... def __missing__(self, key: _KT) -> _VT: ... # undocumented + def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + def copy(self: Self) -> Self: ... + __copy__ = copy + # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], __value: None = ...) -> ChainMap[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/colorsys.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/colorsys.pyi index 8db2e2c9ab3a..443ee828ebfe 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/colorsys.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/colorsys.pyi @@ -1,11 +1,11 @@ -from typing import Tuple +__all__ = ["rgb_to_yiq", "yiq_to_rgb", "rgb_to_hls", "hls_to_rgb", "rgb_to_hsv", "hsv_to_rgb"] -def rgb_to_yiq(r: float, g: float, b: float) -> Tuple[float, float, float]: ... -def yiq_to_rgb(y: float, i: float, q: float) -> Tuple[float, float, float]: ... -def rgb_to_hls(r: float, g: float, b: float) -> Tuple[float, float, float]: ... -def hls_to_rgb(h: float, l: float, s: float) -> Tuple[float, float, float]: ... -def rgb_to_hsv(r: float, g: float, b: float) -> Tuple[float, float, float]: ... -def hsv_to_rgb(h: float, s: float, v: float) -> Tuple[float, float, float]: ... +def rgb_to_yiq(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def yiq_to_rgb(y: float, i: float, q: float) -> tuple[float, float, float]: ... +def rgb_to_hls(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hls_to_rgb(h: float, l: float, s: float) -> tuple[float, float, float]: ... +def rgb_to_hsv(r: float, g: float, b: float) -> tuple[float, float, float]: ... +def hsv_to_rgb(h: float, s: float, v: float) -> tuple[float, float, float]: ... # TODO undocumented ONE_SIXTH: float diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/compileall.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/compileall.pyi index 64b4dc2bc5f7..7101fd05f717 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/compileall.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/compileall.pyi @@ -5,6 +5,8 @@ from typing import Any, Protocol if sys.version_info >= (3, 7): from py_compile import PycInvalidationMode +__all__ = ["compile_dir", "compile_file", "compile_path"] + class _SupportsSearch(Protocol): def search(self, string: str) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi index f3b54e54228c..dbf8ea3df857 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/__init__.pyi @@ -1,5 +1,21 @@ import sys +if sys.version_info >= (3, 7): + __all__ = ( + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "CancelledError", + "TimeoutError", + "BrokenExecutor", + "Future", + "Executor", + "wait", + "as_completed", + "ProcessPoolExecutor", + "ThreadPoolExecutor", + ) + from ._base import ( ALL_COMPLETED as ALL_COMPLETED, FIRST_COMPLETED as FIRST_COMPLETED, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/_base.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/_base.pyi index fd7333420b39..7edcef223e91 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/_base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/_base.pyi @@ -4,19 +4,20 @@ from _typeshed import Self from abc import abstractmethod from collections.abc import Container, Iterable, Iterator, Sequence from logging import Logger -from typing import Any, Callable, Generic, Protocol, Set, TypeVar, overload +from typing import Any, Callable, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal, ParamSpec, SupportsIndex if sys.version_info >= (3, 9): from types import GenericAlias -FIRST_COMPLETED: str -FIRST_EXCEPTION: str -ALL_COMPLETED: str -PENDING: str -RUNNING: str -CANCELLED: str -CANCELLED_AND_NOTIFIED: str -FINISHED: str +FIRST_COMPLETED: Literal["FIRST_COMPLETED"] +FIRST_EXCEPTION: Literal["FIRST_EXCEPTION"] +ALL_COMPLETED: Literal["ALL_COMPLETED"] +PENDING: Literal["PENDING"] +RUNNING: Literal["RUNNING"] +CANCELLED: Literal["CANCELLED"] +CANCELLED_AND_NOTIFIED: Literal["CANCELLED_AND_NOTIFIED"] +FINISHED: Literal["FINISHED"] _FUTURE_STATES: list[str] _STATE_TO_DESCRIPTION_MAP: dict[str, str] LOGGER: Logger @@ -32,8 +33,8 @@ if sys.version_info >= (3, 7): class BrokenExecutor(RuntimeError): ... _T = TypeVar("_T") - _T_co = TypeVar("_T_co", covariant=True) +_P = ParamSpec("_P") # Copied over Collection implementation as it does not exist in Python 2 and <3.6. # Also to solve pytype issues with _Collection. @@ -59,9 +60,10 @@ class Future(Generic[_T]): class Executor: if sys.version_info >= (3, 9): - def submit(self, __fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... + def submit(self, __fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... else: - def submit(self, fn: Callable[..., _T], *args: Any, **kwargs: Any) -> Future[_T]: ... + def submit(self, fn: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> Future[_T]: ... + def map( self, fn: Callable[..., _T], *iterables: Iterable[Any], timeout: float | None = ..., chunksize: int = ... ) -> Iterator[_T]: ... @@ -69,21 +71,22 @@ class Executor: def shutdown(self, wait: bool = ..., *, cancel_futures: bool = ...) -> None: ... else: def shutdown(self, wait: bool = ...) -> None: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> bool | None: ... def as_completed(fs: Iterable[Future[_T]], timeout: float | None = ...) -> Iterator[Future[_T]]: ... # Ideally this would be a namedtuple, but mypy doesn't support generic tuple types. See #1976 -class DoneAndNotDoneFutures(Sequence[Set[Future[_T]]]): +class DoneAndNotDoneFutures(Sequence[set[Future[_T]]]): done: set[Future[_T]] not_done: set[Future[_T]] def __new__(_cls, done: set[Future[_T]], not_done: set[Future[_T]]) -> DoneAndNotDoneFutures[_T]: ... def __len__(self) -> int: ... @overload - def __getitem__(self, i: int) -> set[Future[_T]]: ... + def __getitem__(self, __i: SupportsIndex) -> set[Future[_T]]: ... @overload - def __getitem__(self, s: slice) -> DoneAndNotDoneFutures[_T]: ... + def __getitem__(self, __s: slice) -> DoneAndNotDoneFutures[_T]: ... def wait(fs: Iterable[Future[_T]], timeout: float | None = ..., return_when: str = ...) -> DoneAndNotDoneFutures[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/process.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/process.pyi index 4ae791361bb1..0c6c403949ad 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/process.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/process.pyi @@ -5,7 +5,7 @@ from multiprocessing.context import BaseContext, Process from multiprocessing.queues import Queue, SimpleQueue from threading import Lock, Semaphore, Thread from types import TracebackType -from typing import Any, Callable, Generic, Tuple, TypeVar +from typing import Any, Callable, Generic, TypeVar from weakref import ref from ._base import Executor, Future @@ -31,13 +31,12 @@ _MAX_WINDOWS_WORKERS: int class _RemoteTraceback(Exception): tb: str def __init__(self, tb: TracebackType) -> None: ... - def __str__(self) -> str: ... class _ExceptionWithTraceback: exc: BaseException tb: TracebackType def __init__(self, exc: BaseException, tb: TracebackType) -> None: ... - def __reduce__(self) -> str | Tuple[Any, ...]: ... + def __reduce__(self) -> str | tuple[Any, ...]: ... def _rebuild_exc(exc: Exception, tb: str) -> Exception: ... @@ -82,10 +81,11 @@ if sys.version_info >= (3, 7): def __init__( self, max_size: int | None = ..., *, ctx: BaseContext, pending_work_items: dict[int, _WorkItem[Any]] ) -> None: ... + def _on_queue_feeder_error(self, e: Exception, obj: _CallItem) -> None: ... -def _get_chunks(*iterables: Any, chunksize: int) -> Generator[Tuple[Any, ...], None, None]: ... -def _process_chunk(fn: Callable[..., Any], chunk: Tuple[Any, None, None]) -> Generator[Any, None, None]: ... +def _get_chunks(*iterables: Any, chunksize: int) -> Generator[tuple[Any, ...], None, None]: ... +def _process_chunk(fn: Callable[..., Any], chunk: tuple[Any, None, None]) -> Generator[Any, None, None]: ... def _sendback_result( result_queue: SimpleQueue[_WorkItem[Any]], work_id: int, result: Any | None = ..., exception: Exception | None = ... ) -> None: ... @@ -95,7 +95,7 @@ if sys.version_info >= (3, 7): call_queue: Queue[_CallItem], result_queue: SimpleQueue[_ResultItem], initializer: Callable[..., None] | None, - initargs: Tuple[Any, ...], + initargs: tuple[Any, ...], ) -> None: ... else: @@ -131,6 +131,7 @@ def _chain_from_iterable_of_lists(iterable: Iterable[MutableSequence[Any]]) -> A if sys.version_info >= (3, 7): from ._base import BrokenExecutor + class BrokenProcessPool(BrokenExecutor): ... else: @@ -139,7 +140,7 @@ else: class ProcessPoolExecutor(Executor): _mp_context: BaseContext | None = ... _initializer: Callable[..., None] | None = ... - _initargs: Tuple[Any, ...] = ... + _initargs: tuple[Any, ...] = ... _executor_manager_thread: _ThreadWakeup _processes: MutableMapping[int, Process] _shutdown_thread: bool @@ -158,10 +159,11 @@ class ProcessPoolExecutor(Executor): max_workers: int | None = ..., mp_context: BaseContext | None = ..., initializer: Callable[..., None] | None = ..., - initargs: Tuple[Any, ...] = ..., + initargs: tuple[Any, ...] = ..., ) -> None: ... else: def __init__(self, max_workers: int | None = ...) -> None: ... if sys.version_info >= (3, 9): def _start_executor_manager_thread(self) -> None: ... + def _adjust_process_count(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/thread.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/thread.pyi index 7a35bfc6ed77..46ca681c54fc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/thread.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/concurrent/futures/thread.pyi @@ -1,8 +1,8 @@ import queue import sys -from collections.abc import Iterable, Mapping, Set +from collections.abc import Iterable, Mapping, Set as AbstractSet from threading import Lock, Semaphore, Thread -from typing import Any, Callable, Generic, Tuple, TypeVar +from typing import Any, Callable, Generic, TypeVar from weakref import ref from ._base import Executor, Future @@ -33,7 +33,7 @@ if sys.version_info >= (3, 7): executor_reference: ref[Any], work_queue: queue.SimpleQueue[Any], initializer: Callable[..., None], - initargs: Tuple[Any, ...], + initargs: tuple[Any, ...], ) -> None: ... else: @@ -41,18 +41,19 @@ else: if sys.version_info >= (3, 7): from ._base import BrokenExecutor + class BrokenThreadPool(BrokenExecutor): ... class ThreadPoolExecutor(Executor): _max_workers: int _idle_semaphore: Semaphore - _threads: Set[Thread] + _threads: AbstractSet[Thread] _broken: bool _shutdown: bool _shutdown_lock: Lock _thread_name_prefix: str | None = ... _initializer: Callable[..., None] | None = ... - _initargs: Tuple[Any, ...] = ... + _initargs: tuple[Any, ...] = ... if sys.version_info >= (3, 7): _work_queue: queue.SimpleQueue[_WorkItem[Any]] else: @@ -63,10 +64,11 @@ class ThreadPoolExecutor(Executor): max_workers: int | None = ..., thread_name_prefix: str = ..., initializer: Callable[..., None] | None = ..., - initargs: Tuple[Any, ...] = ..., + initargs: tuple[Any, ...] = ..., ) -> None: ... else: def __init__(self, max_workers: int | None = ..., thread_name_prefix: str = ...) -> None: ... + def _adjust_thread_count(self) -> None: ... if sys.version_info >= (3, 7): def _initializer_failed(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/configparser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/configparser.pyi index c278b82a6823..edcdc99ca184 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/configparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/configparser.pyi @@ -1,14 +1,38 @@ import sys from _typeshed import StrOrBytesPath, StrPath, SupportsWrite from collections.abc import Callable, ItemsView, Iterable, Iterator, Mapping, MutableMapping, Sequence -from typing import Any, ClassVar, Dict, Optional, Pattern, Tuple, Type, TypeVar, overload +from typing import Any, ClassVar, Optional, Pattern, TypeVar, overload from typing_extensions import Literal +__all__ = [ + "NoSectionError", + "DuplicateOptionError", + "DuplicateSectionError", + "NoOptionError", + "InterpolationError", + "InterpolationDepthError", + "InterpolationMissingOptionError", + "InterpolationSyntaxError", + "ParsingError", + "MissingSectionHeaderError", + "ConfigParser", + "SafeConfigParser", + "RawConfigParser", + "Interpolation", + "BasicInterpolation", + "ExtendedInterpolation", + "LegacyInterpolation", + "SectionProxy", + "ConverterMapping", + "DEFAULTSECT", + "MAX_INTERPOLATION_DEPTH", +] + # Internal type aliases _section = Mapping[str, str] _parser = MutableMapping[str, _section] _converter = Callable[[str], Any] -_converters = Dict[str, _converter] +_converters = dict[str, _converter] _T = TypeVar("_T") if sys.version_info >= (3, 7): @@ -16,8 +40,8 @@ if sys.version_info >= (3, 7): else: _Path = StrPath -DEFAULTSECT: str -MAX_INTERPOLATION_DEPTH: int +DEFAULTSECT: Literal["DEFAULT"] +MAX_INTERPOLATION_DEPTH: Literal[10] class Interpolation: def before_get(self, parser: _parser, section: str, option: str, value: str, defaults: _section) -> str: ... @@ -47,7 +71,7 @@ class RawConfigParser(_parser): def __init__( self, defaults: Mapping[str, str | None] | None = ..., - dict_type: Type[Mapping[str, str]] = ..., + dict_type: type[Mapping[str, str]] = ..., allow_no_value: Literal[True] = ..., *, delimiters: Sequence[str] = ..., @@ -63,7 +87,7 @@ class RawConfigParser(_parser): def __init__( self, defaults: _section | None = ..., - dict_type: Type[Mapping[str, str]] = ..., + dict_type: type[Mapping[str, str]] = ..., allow_no_value: bool = ..., *, delimiters: Sequence[str] = ..., @@ -80,6 +104,7 @@ class RawConfigParser(_parser): def __setitem__(self, section: str, options: _section) -> None: ... def __delitem__(self, section: str) -> None: ... def __iter__(self) -> Iterator[str]: ... + def __contains__(self, key: object) -> bool: ... def defaults(self) -> _section: ... def sections(self) -> list[str]: ... def add_section(self, section: str) -> None: ... @@ -122,7 +147,7 @@ class RawConfigParser(_parser): fallback: _T = ..., ) -> _T: ... # This is incompatible with MutableMapping so we ignore the type - @overload # type: ignore + @overload # type: ignore[override] def get(self, section: str, option: str, *, raw: bool = ..., vars: _section | None = ...) -> str: ... @overload def get(self, section: str, option: str, *, raw: bool = ..., vars: _section | None = ..., fallback: _T) -> str | _T: ... @@ -137,7 +162,9 @@ class RawConfigParser(_parser): def optionxform(self, optionstr: str) -> str: ... class ConfigParser(RawConfigParser): ... -class SafeConfigParser(ConfigParser): ... + +if sys.version_info < (3, 12): + class SafeConfigParser(ConfigParser): ... # deprecated alias class SectionProxy(MutableMapping[str, str]): def __init__(self, parser: RawConfigParser, name: str) -> None: ... @@ -151,7 +178,16 @@ class SectionProxy(MutableMapping[str, str]): def parser(self) -> RawConfigParser: ... @property def name(self) -> str: ... - def get(self, option: str, fallback: str | None = ..., *, raw: bool = ..., vars: _section | None = ..., _impl: Any | None = ..., **kwargs: Any) -> str: ... # type: ignore + def get( # type: ignore[override] + self, + option: str, + fallback: str | None = ..., + *, + raw: bool = ..., + vars: _section | None = ..., + _impl: Any | None = ..., + **kwargs: Any, + ) -> str: ... # These are partially-applied version of the methods with the same names in # RawConfigParser; the stubs should be kept updated together @overload @@ -220,7 +256,7 @@ class InterpolationSyntaxError(InterpolationError): ... class ParsingError(Error): source: str - errors: list[Tuple[int, str]] + errors: list[tuple[int, str]] def __init__(self, source: str | None = ..., filename: str | None = ...) -> None: ... def append(self, lineno: int, line: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi index 69f62575633c..4795aac67c23 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/contextlib.pyi @@ -1,24 +1,88 @@ import sys -from _typeshed import Self +from _typeshed import Self, StrOrBytesPath from types import TracebackType -from typing import ( +from typing import ( # noqa Y027 IO, Any, - AsyncContextManager, + AsyncGenerator, AsyncIterator, Awaitable, Callable, ContextManager, + Generator, + Generic, Iterator, Optional, - Type, + Protocol, TypeVar, overload, ) -from typing_extensions import ParamSpec, Protocol +from typing_extensions import ParamSpec + +if sys.version_info >= (3, 11): + __all__ = [ + "asynccontextmanager", + "contextmanager", + "closing", + "nullcontext", + "AbstractContextManager", + "AbstractAsyncContextManager", + "AsyncExitStack", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + "aclosing", + "chdir", + ] +elif sys.version_info >= (3, 10): + __all__ = [ + "asynccontextmanager", + "contextmanager", + "closing", + "nullcontext", + "AbstractContextManager", + "AbstractAsyncContextManager", + "AsyncExitStack", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + "aclosing", + ] +elif sys.version_info >= (3, 7): + __all__ = [ + "asynccontextmanager", + "contextmanager", + "closing", + "nullcontext", + "AbstractContextManager", + "AbstractAsyncContextManager", + "AsyncExitStack", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + ] +else: + __all__ = [ + "contextmanager", + "closing", + "AbstractContextManager", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + ] AbstractContextManager = ContextManager if sys.version_info >= (3, 7): + from typing import AsyncContextManager # noqa Y022 + AbstractAsyncContextManager = AsyncContextManager _T = TypeVar("_T") @@ -27,81 +91,116 @@ _T_io = TypeVar("_T_io", bound=Optional[IO[str]]) _F = TypeVar("_F", bound=Callable[..., Any]) _P = ParamSpec("_P") -_ExitFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], bool] -_CM_EF = TypeVar("_CM_EF", ContextManager[Any], _ExitFunc) +_ExitFunc = Callable[[Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType]], Optional[bool]] +_CM_EF = TypeVar("_CM_EF", AbstractContextManager[Any], _ExitFunc) -class _GeneratorContextManager(ContextManager[_T_co]): +class ContextDecorator: def __call__(self, func: _F) -> _F: ... -# type ignore to deal with incomplete ParamSpec support in mypy -def contextmanager(func: Callable[_P, Iterator[_T]]) -> Callable[_P, _GeneratorContextManager[_T]]: ... # type: ignore +class _GeneratorContextManager(AbstractContextManager[_T_co], ContextDecorator, Generic[_T_co]): + # In Python <= 3.6, __init__ and all instance attributes are defined directly on this class. + # In Python >= 3.7, __init__ and all instance attributes are inherited from _GeneratorContextManagerBase + # _GeneratorContextManagerBase is more trouble than it's worth to include in the stub; see #6676 + def __init__(self, func: Callable[..., Iterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: Generator[_T_co, Any, Any] + func: Callable[..., Generator[_T_co, Any, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + def __exit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... + +if sys.version_info >= (3, 10): + _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) + + class AsyncContextDecorator: + def __call__(self, func: _AF) -> _AF: ... + + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], AsyncContextDecorator, Generic[_T_co]): + # __init__ and these attributes are actually defined in the base class _GeneratorContextManagerBase, + # which is more trouble than it's worth to include in the stub (see #6676) + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +elif sys.version_info >= (3, 7): + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], Generic[_T_co]): + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] if sys.version_info >= (3, 7): - def asynccontextmanager(func: Callable[_P, AsyncIterator[_T]]) -> Callable[_P, AsyncContextManager[_T]]: ... # type: ignore + def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... class _SupportsClose(Protocol): def close(self) -> object: ... _SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) -class closing(ContextManager[_SupportsCloseT]): +class closing(AbstractContextManager[_SupportsCloseT]): def __init__(self, thing: _SupportsCloseT) -> None: ... if sys.version_info >= (3, 10): class _SupportsAclose(Protocol): - def aclose(self) -> Awaitable[object]: ... + async def aclose(self) -> object: ... _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) - class aclosing(AsyncContextManager[_SupportsAcloseT]): + + class aclosing(AbstractAsyncContextManager[_SupportsAcloseT]): def __init__(self, thing: _SupportsAcloseT) -> None: ... - _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) - class AsyncContextDecorator: - def __call__(self, func: _AF) -> _AF: ... -class suppress(ContextManager[None]): - def __init__(self, *exceptions: Type[BaseException]) -> None: ... +class suppress(AbstractContextManager[None]): + def __init__(self, *exceptions: type[BaseException]) -> None: ... def __exit__( - self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None ) -> bool: ... -class redirect_stdout(ContextManager[_T_io]): +class redirect_stdout(AbstractContextManager[_T_io]): def __init__(self, new_target: _T_io) -> None: ... -class redirect_stderr(ContextManager[_T_io]): +class redirect_stderr(AbstractContextManager[_T_io]): def __init__(self, new_target: _T_io) -> None: ... -class ContextDecorator: - def __call__(self, func: _F) -> _F: ... - -class ExitStack(ContextManager[ExitStack]): +class ExitStack(AbstractContextManager[ExitStack]): def __init__(self) -> None: ... - def enter_context(self, cm: ContextManager[_T]) -> _T: ... + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... - def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... def pop_all(self: Self) -> Self: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, __exc_type: Type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None ) -> bool: ... if sys.version_info >= (3, 7): - _ExitCoroFunc = Callable[[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]], Awaitable[bool]] - _CallbackCoroFunc = Callable[..., Awaitable[Any]] - _ACM_EF = TypeVar("_ACM_EF", AsyncContextManager[Any], _ExitCoroFunc) - class AsyncExitStack(AsyncContextManager[AsyncExitStack]): + _ExitCoroFunc = Callable[[Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType]], Awaitable[bool]] + _ACM_EF = TypeVar("_ACM_EF", AbstractAsyncContextManager[Any], _ExitCoroFunc) + + class AsyncExitStack(AbstractAsyncContextManager[AsyncExitStack]): def __init__(self) -> None: ... - def enter_context(self, cm: ContextManager[_T]) -> _T: ... - def enter_async_context(self, cm: AsyncContextManager[_T]) -> Awaitable[_T]: ... + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... def push(self, exit: _CM_EF) -> _CM_EF: ... def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... - def callback(self, callback: Callable[..., Any], *args: Any, **kwds: Any) -> Callable[..., Any]: ... - def push_async_callback(self, callback: _CallbackCoroFunc, *args: Any, **kwds: Any) -> _CallbackCoroFunc: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def push_async_callback( + self, __callback: Callable[_P, Awaitable[_T]], *args: _P.args, **kwds: _P.kwargs + ) -> Callable[_P, Awaitable[_T]]: ... def pop_all(self: Self) -> Self: ... - def aclose(self) -> Awaitable[None]: ... - def __aenter__(self: Self) -> Awaitable[Self]: ... - def __aexit__( - self, __exc_type: Type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None - ) -> Awaitable[bool]: ... + async def aclose(self) -> None: ... + async def __aenter__(self: Self) -> Self: ... + async def __aexit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool: ... if sys.version_info >= (3, 10): class nullcontext(AbstractContextManager[_T], AbstractAsyncContextManager[_T]): @@ -124,3 +223,12 @@ elif sys.version_info >= (3, 7): def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... def __enter__(self) -> _T: ... def __exit__(self, *exctype: Any) -> None: ... + +if sys.version_info >= (3, 11): + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=int | StrOrBytesPath) + + class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): + path: _T_fd_or_any_path + def __init__(self, path: _T_fd_or_any_path) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *excinfo: object) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/contextvars.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/contextvars.pyi index e97f62188dcb..6b5661dd69eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/contextvars.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/contextvars.pyi @@ -1,12 +1,17 @@ import sys from typing import Any, Callable, ClassVar, Generic, Iterator, Mapping, TypeVar, overload +from typing_extensions import ParamSpec, final if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = ("Context", "ContextVar", "Token", "copy_context") + _T = TypeVar("_T") _D = TypeVar("_D") +_P = ParamSpec("_P") +@final class ContextVar(Generic[_T]): def __init__(self, name: str, *, default: _T = ...) -> None: ... @property @@ -15,11 +20,12 @@ class ContextVar(Generic[_T]): def get(self) -> _T: ... @overload def get(self, default: _D | _T) -> _D | _T: ... - def set(self, value: _T) -> Token[_T]: ... - def reset(self, token: Token[_T]) -> None: ... + def set(self, __value: _T) -> Token[_T]: ... + def reset(self, __token: Token[_T]) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... +@final class Token(Generic[_T]): @property def var(self) -> ContextVar[_T]: ... @@ -33,14 +39,15 @@ def copy_context() -> Context: ... # It doesn't make sense to make this generic, because for most Contexts each ContextVar will have # a different value. +@final class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... @overload - def get(self, __key: ContextVar[Any]) -> Any | None: ... + def get(self, __key: ContextVar[_T]) -> _T | None: ... @overload - def get(self, __key: ContextVar[Any], __default: Any | None) -> Any: ... - def run(self, callable: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... + def get(self, __key: ContextVar[_T], __default: _D) -> _T | _D: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... def copy(self) -> Context: ... - def __getitem__(self, key: ContextVar[Any]) -> Any: ... + def __getitem__(self, __key: ContextVar[_T]) -> _T: ... def __iter__(self) -> Iterator[ContextVar[Any]]: ... def __len__(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/copy.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/copy.pyi index a5f9420e3811..b53f418b3930 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/copy.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/copy.pyi @@ -1,5 +1,7 @@ from typing import Any, TypeVar +__all__ = ["Error", "copy", "deepcopy"] + _T = TypeVar("_T") # None in CPython but non-None in Jython diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/copyreg.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/copyreg.pyi index 320097b3a204..d9d7a7cef935 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/copyreg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/copyreg.pyi @@ -1,9 +1,9 @@ -from typing import Any, Callable, Hashable, Optional, SupportsInt, Tuple, TypeVar, Union +from typing import Any, Callable, Hashable, Optional, SupportsInt, TypeVar, Union _TypeT = TypeVar("_TypeT", bound=type) -_Reduce = Union[Tuple[Callable[..., _TypeT], Tuple[Any, ...]], Tuple[Callable[..., _TypeT], Tuple[Any, ...], Optional[Any]]] +_Reduce = Union[tuple[Callable[..., _TypeT], tuple[Any, ...]], tuple[Callable[..., _TypeT], tuple[Any, ...], Optional[Any]]] -__all__: list[str] +__all__ = ["pickle", "constructor", "add_extension", "remove_extension", "clear_extension_cache"] def pickle( ob_type: _TypeT, @@ -15,4 +15,5 @@ def add_extension(module: Hashable, name: Hashable, code: SupportsInt) -> None: def remove_extension(module: Hashable, name: Hashable, code: int) -> None: ... def clear_extension_cache() -> None: ... -dispatch_table: dict[type, Callable[[type], str | _Reduce[type]]] # undocumented +_DispatchTableType = dict[type, Callable[[type], str | _Reduce[type]]] # imported by multiprocessing.reduction +dispatch_table: _DispatchTableType # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/crypt.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/crypt.pyi index 27e30433f702..5083f1eebeed 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/crypt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/crypt.pyi @@ -1,20 +1,19 @@ import sys -class _Method: ... +if sys.platform != "win32": + class _Method: ... + METHOD_CRYPT: _Method + METHOD_MD5: _Method + METHOD_SHA256: _Method + METHOD_SHA512: _Method + if sys.version_info >= (3, 7): + METHOD_BLOWFISH: _Method -METHOD_CRYPT: _Method -METHOD_MD5: _Method -METHOD_SHA256: _Method -METHOD_SHA512: _Method -if sys.version_info >= (3, 7): - METHOD_BLOWFISH: _Method + methods: list[_Method] -methods: list[_Method] + if sys.version_info >= (3, 7): + def mksalt(method: _Method | None = ..., *, rounds: int | None = ...) -> str: ... + else: + def mksalt(method: _Method | None = ...) -> str: ... -if sys.version_info >= (3, 7): - def mksalt(method: _Method | None = ..., *, rounds: int | None = ...) -> str: ... - -else: - def mksalt(method: _Method | None = ...) -> str: ... - -def crypt(word: str, salt: str | _Method | None = ...) -> str: ... + def crypt(word: str, salt: str | _Method | None = ...) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/csv.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/csv.pyi index 0b69cb2272d3..dcb3f19bebe1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/csv.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/csv.pyi @@ -6,6 +6,7 @@ from _csv import ( QUOTE_NONNUMERIC as QUOTE_NONNUMERIC, Dialect as Dialect, Error as Error, + __version__ as __version__, _DialectLike, _reader, _writer, @@ -17,14 +18,39 @@ from _csv import ( unregister_dialect as unregister_dialect, writer as writer, ) +from _typeshed import Self from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence -from typing import Any, Generic, Type, TypeVar, overload +from typing import Any, Generic, TypeVar, overload if sys.version_info >= (3, 8): - from typing import Dict as _DictReadMapping + from builtins import dict as _DictReadMapping else: from collections import OrderedDict as _DictReadMapping +__all__ = [ + "QUOTE_MINIMAL", + "QUOTE_ALL", + "QUOTE_NONNUMERIC", + "QUOTE_NONE", + "Error", + "Dialect", + "__doc__", + "excel", + "excel_tab", + "field_size_limit", + "reader", + "writer", + "register_dialect", + "get_dialect", + "list_dialects", + "Sniffer", + "unregister_dialect", + "__version__", + "DictReader", + "DictWriter", + "unix_dialect", +] + _T = TypeVar("_T") class excel(Dialect): @@ -75,7 +101,7 @@ class DictReader(Generic[_T], Iterator[_DictReadMapping[_T, str]]): *args: Any, **kwds: Any, ) -> None: ... - def __iter__(self) -> DictReader[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _DictReadMapping[_T, str]: ... class DictWriter(Generic[_T]): @@ -97,11 +123,12 @@ class DictWriter(Generic[_T]): def writeheader(self) -> Any: ... else: def writeheader(self) -> None: ... + def writerow(self, rowdict: Mapping[_T, Any]) -> Any: ... def writerows(self, rowdicts: Iterable[Mapping[_T, Any]]) -> None: ... -class Sniffer(object): +class Sniffer: preferred: list[str] def __init__(self) -> None: ... - def sniff(self, sample: str, delimiters: str | None = ...) -> Type[Dialect]: ... + def sniff(self, sample: str, delimiters: str | None = ...) -> type[Dialect]: ... def has_header(self, sample: str) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi index 03e9affd5267..88a9567e1070 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ctypes/__init__.pyi @@ -1,5 +1,6 @@ import sys -from array import array +from _typeshed import ReadableBuffer, Self, WriteableBuffer +from abc import abstractmethod from typing import ( Any, Callable, @@ -10,8 +11,6 @@ from typing import ( Mapping, Optional, Sequence, - Tuple, - Type, TypeVar, Union as _UnionT, overload, @@ -28,12 +27,12 @@ RTLD_GLOBAL: int RTLD_LOCAL: int DEFAULT_MODE: int -class CDLL(object): +class CDLL: _func_flags_: ClassVar[int] _func_restype_: ClassVar[_CData] _name: str _handle: int - _FuncPtr: Type[_FuncPointer] + _FuncPtr: type[_FuncPointer] if sys.version_info >= (3, 8): def __init__( self, @@ -48,6 +47,7 @@ class CDLL(object): def __init__( self, name: str | None, mode: int = ..., handle: int | None = ..., use_errno: bool = ..., use_last_error: bool = ... ) -> None: ... + def __getattr__(self, name: str) -> _NamedFuncPointer: ... def __getitem__(self, name: str) -> _NamedFuncPointer: ... @@ -58,7 +58,7 @@ if sys.platform == "win32": class PyDLL(CDLL): ... class LibraryLoader(Generic[_DLLT]): - def __init__(self, dlltype: Type[_DLLT]) -> None: ... + def __init__(self, dlltype: type[_DLLT]) -> None: ... def __getattr__(self, name: str) -> _DLLT: ... def __getitem__(self, name: str) -> _DLLT: ... def LoadLibrary(self, name: str) -> _DLLT: ... @@ -72,53 +72,46 @@ if sys.platform == "win32": pydll: LibraryLoader[PyDLL] pythonapi: PyDLL -# Anything that implements the read-write buffer interface. -# The buffer interface is defined purely on the C level, so we cannot define a normal Protocol -# for it. Instead we have to list the most common stdlib buffer classes in a Union. -_WritableBuffer = _UnionT[bytearray, memoryview, array[Any], _CData] -# Same as _WritableBuffer, but also includes read-only buffer types (like bytes). -_ReadOnlyBuffer = _UnionT[_WritableBuffer, bytes] - class _CDataMeta(type): # By default mypy complains about the following two methods, because strictly speaking cls # might not be a Type[_CT]. However this can never actually happen, because the only class that # uses _CDataMeta as its metaclass is _CData. So it's safe to ignore the errors here. - def __mul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore - def __rmul__(cls: Type[_CT], other: int) -> Type[Array[_CT]]: ... # type: ignore + def __mul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] + def __rmul__(cls: type[_CT], other: int) -> type[Array[_CT]]: ... # type: ignore[misc] class _CData(metaclass=_CDataMeta): _b_base: int _b_needsfree_: bool _objects: Mapping[Any, int] | None @classmethod - def from_buffer(cls: Type[_CT], source: _WritableBuffer, offset: int = ...) -> _CT: ... + def from_buffer(cls: type[Self], source: WriteableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_buffer_copy(cls: Type[_CT], source: _ReadOnlyBuffer, offset: int = ...) -> _CT: ... + def from_buffer_copy(cls: type[Self], source: ReadableBuffer, offset: int = ...) -> Self: ... @classmethod - def from_address(cls: Type[_CT], address: int) -> _CT: ... + def from_address(cls: type[Self], address: int) -> Self: ... @classmethod - def from_param(cls: Type[_CT], obj: Any) -> _CT | _CArgObject: ... + def from_param(cls: type[Self], obj: Any) -> Self | _CArgObject: ... @classmethod - def in_dll(cls: Type[_CT], library: CDLL, name: str) -> _CT: ... + def in_dll(cls: type[Self], library: CDLL, name: str) -> Self: ... class _CanCastTo(_CData): ... class _PointerLike(_CanCastTo): ... -_ECT = Callable[[Optional[Type[_CData]], _FuncPointer, Tuple[_CData, ...]], _CData] -_PF = _UnionT[Tuple[int], Tuple[int, str], Tuple[int, str, Any]] +_ECT = Callable[[Optional[type[_CData]], _FuncPointer, tuple[_CData, ...]], _CData] +_PF = _UnionT[tuple[int], tuple[int, str], tuple[int, str, Any]] class _FuncPointer(_PointerLike, _CData): - restype: Type[_CData] | Callable[[int], Any] | None - argtypes: Sequence[Type[_CData]] + restype: type[_CData] | Callable[[int], Any] | None + argtypes: Sequence[type[_CData]] errcheck: _ECT @overload def __init__(self, address: int) -> None: ... @overload def __init__(self, callable: Callable[..., Any]) -> None: ... @overload - def __init__(self, func_spec: Tuple[str | int, CDLL], paramflags: Tuple[_PF, ...] = ...) -> None: ... + def __init__(self, func_spec: tuple[str | int, CDLL], paramflags: tuple[_PF, ...] = ...) -> None: ... @overload - def __init__(self, vtlb_index: int, name: str, paramflags: Tuple[_PF, ...] = ..., iid: pointer[c_int] = ...) -> None: ... + def __init__(self, vtlb_index: int, name: str, paramflags: tuple[_PF, ...] = ..., iid: pointer[c_int] = ...) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... class _NamedFuncPointer(_FuncPointer): @@ -127,15 +120,15 @@ class _NamedFuncPointer(_FuncPointer): class ArgumentError(Exception): ... def CFUNCTYPE( - restype: Type[_CData] | None, *argtypes: Type[_CData], use_errno: bool = ..., use_last_error: bool = ... -) -> Type[_FuncPointer]: ... + restype: type[_CData] | None, *argtypes: type[_CData], use_errno: bool = ..., use_last_error: bool = ... +) -> type[_FuncPointer]: ... if sys.platform == "win32": def WINFUNCTYPE( - restype: Type[_CData] | None, *argtypes: Type[_CData], use_errno: bool = ..., use_last_error: bool = ... - ) -> Type[_FuncPointer]: ... + restype: type[_CData] | None, *argtypes: type[_CData], use_errno: bool = ..., use_last_error: bool = ... + ) -> type[_FuncPointer]: ... -def PYFUNCTYPE(restype: Type[_CData] | None, *argtypes: Type[_CData]) -> Type[_FuncPointer]: ... +def PYFUNCTYPE(restype: type[_CData] | None, *argtypes: type[_CData]) -> type[_FuncPointer]: ... class _CArgObject: ... @@ -149,12 +142,12 @@ _CVoidPLike = _UnionT[_PointerLike, Array[Any], _CArgObject, int] _CVoidConstPLike = _UnionT[_CVoidPLike, bytes] def addressof(obj: _CData) -> int: ... -def alignment(obj_or_type: _CData | Type[_CData]) -> int: ... +def alignment(obj_or_type: _CData | type[_CData]) -> int: ... def byref(obj: _CData, offset: int = ...) -> _CArgObject: ... _CastT = TypeVar("_CastT", bound=_CanCastTo) -def cast(obj: _CData | _CArgObject | int, typ: Type[_CastT]) -> _CastT: ... +def cast(obj: _CData | _CArgObject | int, typ: type[_CastT]) -> _CastT: ... def create_string_buffer(init: int | bytes, size: int | None = ...) -> Array[c_char]: ... c_buffer = create_string_buffer @@ -164,7 +157,7 @@ def create_unicode_buffer(init: int | str, size: int | None = ...) -> Array[c_wc if sys.platform == "win32": def DllCanUnloadNow() -> int: ... def DllGetClassObject(rclsid: Any, riid: Any, ppv: Any) -> int: ... # TODO not documented - def FormatError(code: int) -> str: ... + def FormatError(code: int = ...) -> str: ... def GetLastError() -> int: ... def get_errno() -> int: ... @@ -174,23 +167,23 @@ if sys.platform == "win32": def memmove(dst: _CVoidPLike, src: _CVoidConstPLike, count: int) -> None: ... def memset(dst: _CVoidPLike, c: int, count: int) -> None: ... -def POINTER(type: Type[_CT]) -> Type[pointer[_CT]]: ... +def POINTER(type: type[_CT]) -> type[pointer[_CT]]: ... # The real ctypes.pointer is a function, not a class. The stub version of pointer behaves like # ctypes._Pointer in that it is the base class for all pointer types. Unlike the real _Pointer, # it can be instantiated directly (to mimic the behavior of the real pointer function). class pointer(Generic[_CT], _PointerLike, _CData): - _type_: ClassVar[Type[_CT]] + _type_: type[_CT] contents: _CT def __init__(self, arg: _CT = ...) -> None: ... @overload - def __getitem__(self, i: int) -> _CT: ... + def __getitem__(self, __i: int) -> _CT: ... @overload - def __getitem__(self, s: slice) -> list[_CT]: ... + def __getitem__(self, __s: slice) -> list[_CT]: ... @overload - def __setitem__(self, i: int, o: _CT) -> None: ... + def __setitem__(self, __i: int, __o: _CT) -> None: ... @overload - def __setitem__(self, s: slice, o: Iterable[_CT]) -> None: ... + def __setitem__(self, __s: slice, __o: Iterable[_CT]) -> None: ... def resize(obj: _CData, size: int) -> None: ... def set_errno(value: int) -> int: ... @@ -198,7 +191,7 @@ def set_errno(value: int) -> int: ... if sys.platform == "win32": def set_last_error(value: int) -> int: ... -def sizeof(obj_or_type: _CData | Type[_CData]) -> int: ... +def sizeof(obj_or_type: _CData | type[_CData]) -> int: ... def string_at(address: _CVoidConstPLike, size: int = ...) -> bytes: ... if sys.platform == "win32": @@ -259,7 +252,7 @@ class _CField: size: int class _StructUnionMeta(_CDataMeta): - _fields_: Sequence[Tuple[str, Type[_CData]] | Tuple[str, Type[_CData], int]] + _fields_: Sequence[tuple[str, type[_CData]] | tuple[str, type[_CData], int]] _pack_: int _anonymous_: Sequence[str] def __getattr__(self, name: str) -> _CField: ... @@ -275,8 +268,16 @@ class BigEndianStructure(Structure): ... class LittleEndianStructure(Structure): ... class Array(Generic[_CT], _CData): - _length_: ClassVar[int] - _type_: ClassVar[Type[_CT]] + @property + @abstractmethod + def _length_(self) -> int: ... + @_length_.setter + def _length_(self, value: int) -> None: ... + @property + @abstractmethod + def _type_(self) -> type[_CT]: ... + @_type_.setter + def _type_(self, value: type[_CT]) -> None: ... raw: bytes # Note: only available if _CT == c_char value: Any # Note: bytes if _CT == c_char, str if _CT == c_wchar, unavailable otherwise # TODO These methods cannot be annotated correctly at the moment. @@ -293,13 +294,13 @@ class Array(Generic[_CT], _CData): # the array element type would belong are annotated with Any instead. def __init__(self, *args: Any) -> None: ... @overload - def __getitem__(self, i: int) -> Any: ... + def __getitem__(self, __i: int) -> Any: ... @overload - def __getitem__(self, s: slice) -> list[Any]: ... + def __getitem__(self, __s: slice) -> list[Any]: ... @overload - def __setitem__(self, i: int, o: Any) -> None: ... + def __setitem__(self, __i: int, __o: Any) -> None: ... @overload - def __setitem__(self, s: slice, o: Iterable[Any]) -> None: ... + def __setitem__(self, __s: slice, __o: Iterable[Any]) -> None: ... def __iter__(self) -> Iterator[Any]: ... # Can't inherit from Sized because the metaclass conflict between # Sized and _CData prevents using _CDataMeta. diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/curses/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/curses/__init__.pyi index 73e84fba3763..ee74c13b6b50 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/curses/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/curses/__init__.pyi @@ -1,15 +1,19 @@ -from _curses import * # noqa: F403 -from _curses import _CursesWindow as _CursesWindow +import sys from typing import Any, Callable, TypeVar -_T = TypeVar("_T") +if sys.platform != "win32": + from _curses import * + from _curses import _CursesWindow as _CursesWindow -# available after calling `curses.initscr()` -LINES: int -COLS: int + _T = TypeVar("_T") -# available after calling `curses.start_color()` -COLORS: int -COLOR_PAIRS: int + # available after calling `curses.initscr()` + LINES: int + COLS: int -def wrapper(__func: Callable[..., _T], *arg: Any, **kwds: Any) -> _T: ... + # available after calling `curses.start_color()` + COLORS: int + COLOR_PAIRS: int + # TODO: wait for `Concatenate` support + # def wrapper(__func: Callable[Concatenate[_CursesWindow, _P], _T], *arg: _P.args, **kwds: _P.kwargs) -> _T: ... + def wrapper(__func: Callable[..., _T], *arg: Any, **kwds: Any) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/curses/ascii.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/curses/ascii.pyi index 66efbe36a7df..25de8f605bda 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/curses/ascii.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/curses/ascii.pyi @@ -1,62 +1,63 @@ +import sys from typing import TypeVar -_CharT = TypeVar("_CharT", str, int) +if sys.platform != "win32": + _CharT = TypeVar("_CharT", str, int) -NUL: int -SOH: int -STX: int -ETX: int -EOT: int -ENQ: int -ACK: int -BEL: int -BS: int -TAB: int -HT: int -LF: int -NL: int -VT: int -FF: int -CR: int -SO: int -SI: int -DLE: int -DC1: int -DC2: int -DC3: int -DC4: int -NAK: int -SYN: int -ETB: int -CAN: int -EM: int -SUB: int -ESC: int -FS: int -GS: int -RS: int -US: int -SP: int -DEL: int + NUL: int + SOH: int + STX: int + ETX: int + EOT: int + ENQ: int + ACK: int + BEL: int + BS: int + TAB: int + HT: int + LF: int + NL: int + VT: int + FF: int + CR: int + SO: int + SI: int + DLE: int + DC1: int + DC2: int + DC3: int + DC4: int + NAK: int + SYN: int + ETB: int + CAN: int + EM: int + SUB: int + ESC: int + FS: int + GS: int + RS: int + US: int + SP: int + DEL: int -controlnames: list[int] - -def isalnum(c: str | int) -> bool: ... -def isalpha(c: str | int) -> bool: ... -def isascii(c: str | int) -> bool: ... -def isblank(c: str | int) -> bool: ... -def iscntrl(c: str | int) -> bool: ... -def isdigit(c: str | int) -> bool: ... -def isgraph(c: str | int) -> bool: ... -def islower(c: str | int) -> bool: ... -def isprint(c: str | int) -> bool: ... -def ispunct(c: str | int) -> bool: ... -def isspace(c: str | int) -> bool: ... -def isupper(c: str | int) -> bool: ... -def isxdigit(c: str | int) -> bool: ... -def isctrl(c: str | int) -> bool: ... -def ismeta(c: str | int) -> bool: ... -def ascii(c: _CharT) -> _CharT: ... -def ctrl(c: _CharT) -> _CharT: ... -def alt(c: _CharT) -> _CharT: ... -def unctrl(c: str | int) -> str: ... + controlnames: list[int] + def isalnum(c: str | int) -> bool: ... + def isalpha(c: str | int) -> bool: ... + def isascii(c: str | int) -> bool: ... + def isblank(c: str | int) -> bool: ... + def iscntrl(c: str | int) -> bool: ... + def isdigit(c: str | int) -> bool: ... + def isgraph(c: str | int) -> bool: ... + def islower(c: str | int) -> bool: ... + def isprint(c: str | int) -> bool: ... + def ispunct(c: str | int) -> bool: ... + def isspace(c: str | int) -> bool: ... + def isupper(c: str | int) -> bool: ... + def isxdigit(c: str | int) -> bool: ... + def isctrl(c: str | int) -> bool: ... + def ismeta(c: str | int) -> bool: ... + def ascii(c: _CharT) -> _CharT: ... + def ctrl(c: _CharT) -> _CharT: ... + def alt(c: _CharT) -> _CharT: ... + def unctrl(c: str | int) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/curses/panel.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/curses/panel.pyi index 138e4a9f727e..30803791f039 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/curses/panel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/curses/panel.pyi @@ -1,20 +1,25 @@ -from _curses import _CursesWindow +import sys -class _Curses_Panel: # type is (note the space in the class name) - def above(self) -> _Curses_Panel: ... - def below(self) -> _Curses_Panel: ... - def bottom(self) -> None: ... - def hidden(self) -> bool: ... - def hide(self) -> None: ... - def move(self, y: int, x: int) -> None: ... - def replace(self, win: _CursesWindow) -> None: ... - def set_userptr(self, obj: object) -> None: ... - def show(self) -> None: ... - def top(self) -> None: ... - def userptr(self) -> object: ... - def window(self) -> _CursesWindow: ... +if sys.platform != "win32": + from _curses import _CursesWindow -def bottom_panel() -> _Curses_Panel: ... -def new_panel(__win: _CursesWindow) -> _Curses_Panel: ... -def top_panel() -> _Curses_Panel: ... -def update_panels() -> _Curses_Panel: ... + version: str + + class _Curses_Panel: # type is (note the space in the class name) + def above(self) -> _Curses_Panel: ... + def below(self) -> _Curses_Panel: ... + def bottom(self) -> None: ... + def hidden(self) -> bool: ... + def hide(self) -> None: ... + def move(self, y: int, x: int) -> None: ... + def replace(self, win: _CursesWindow) -> None: ... + def set_userptr(self, obj: object) -> None: ... + def show(self) -> None: ... + def top(self) -> None: ... + def userptr(self) -> object: ... + def window(self) -> _CursesWindow: ... + + def bottom_panel() -> _Curses_Panel: ... + def new_panel(__win: _CursesWindow) -> _Curses_Panel: ... + def top_panel() -> _Curses_Panel: ... + def update_panels() -> _Curses_Panel: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/curses/textpad.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/curses/textpad.pyi index 578a579fda38..b8a9c843f402 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/curses/textpad.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/curses/textpad.pyi @@ -1,11 +1,13 @@ -from _curses import _CursesWindow +import sys from typing import Callable -def rectangle(win: _CursesWindow, uly: int, ulx: int, lry: int, lrx: int) -> None: ... +if sys.platform != "win32": + from _curses import _CursesWindow + def rectangle(win: _CursesWindow, uly: int, ulx: int, lry: int, lrx: int) -> None: ... -class Textbox: - stripspaces: bool - def __init__(self, win: _CursesWindow, insert_mode: bool = ...) -> None: ... - def edit(self, validate: Callable[[int], int] | None = ...) -> str: ... - def do_command(self, ch: str | int) -> None: ... - def gather(self) -> str: ... + class Textbox: + stripspaces: bool + def __init__(self, win: _CursesWindow, insert_mode: bool = ...) -> None: ... + def edit(self, validate: Callable[[int], int] | None = ...) -> str: ... + def do_command(self, ch: str | int) -> None: ... + def gather(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dataclasses.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dataclasses.pyi index b6b76af979f9..f8e4044932ed 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dataclasses.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dataclasses.pyi @@ -1,7 +1,9 @@ +import enum import sys import types -from typing import Any, Callable, Generic, Iterable, Mapping, Tuple, Type, TypeVar, overload -from typing_extensions import Protocol +from builtins import type as Type # alias to avoid name clashes with fields named "type" +from typing import Any, Callable, Generic, Iterable, Mapping, Protocol, TypeVar, overload +from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias @@ -9,9 +11,47 @@ if sys.version_info >= (3, 9): _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) -class _MISSING_TYPE: ... +if sys.version_info >= (3, 10): + __all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "KW_ONLY", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", + ] +else: + __all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", + ] + +# define _MISSING_TYPE as an enum within the type stubs, +# even though that is not really its type at runtime +# this allows us to use Literal[_MISSING_TYPE.MISSING] +# for background, see: +# https://github.com/python/typeshed/pull/5900#issuecomment-895513797 +class _MISSING_TYPE(enum.Enum): + MISSING = enum.auto() -MISSING: _MISSING_TYPE +MISSING = _MISSING_TYPE.MISSING if sys.version_info >= (3, 10): class KW_ONLY: ... @@ -19,17 +59,17 @@ if sys.version_info >= (3, 10): @overload def asdict(obj: Any) -> dict[str, Any]: ... @overload -def asdict(obj: Any, *, dict_factory: Callable[[list[Tuple[str, Any]]], _T]) -> _T: ... +def asdict(obj: Any, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: Any) -> Tuple[Any, ...]: ... +def astuple(obj: Any) -> tuple[Any, ...]: ... @overload def astuple(obj: Any, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... if sys.version_info >= (3, 10): @overload - def dataclass(__cls: Type[_T]) -> Type[_T]: ... + def dataclass(__cls: type[_T]) -> type[_T]: ... @overload - def dataclass(__cls: None) -> Callable[[Type[_T]], Type[_T]]: ... + def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... @overload def dataclass( *, @@ -42,28 +82,28 @@ if sys.version_info >= (3, 10): match_args: bool = ..., kw_only: bool = ..., slots: bool = ..., - ) -> Callable[[Type[_T]], Type[_T]]: ... + ) -> Callable[[type[_T]], type[_T]]: ... elif sys.version_info >= (3, 8): # cls argument is now positional-only @overload - def dataclass(__cls: Type[_T]) -> Type[_T]: ... + def dataclass(__cls: type[_T]) -> type[_T]: ... @overload - def dataclass(__cls: None) -> Callable[[Type[_T]], Type[_T]]: ... + def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... @overload def dataclass( *, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ... - ) -> Callable[[Type[_T]], Type[_T]]: ... + ) -> Callable[[type[_T]], type[_T]]: ... else: @overload - def dataclass(_cls: Type[_T]) -> Type[_T]: ... + def dataclass(_cls: type[_T]) -> type[_T]: ... @overload - def dataclass(_cls: None) -> Callable[[Type[_T]], Type[_T]]: ... + def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... @overload def dataclass( *, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ... - ) -> Callable[[Type[_T]], Type[_T]]: ... + ) -> Callable[[type[_T]], type[_T]]: ... # See https://github.com/python/mypy/issues/10750 class _DefaultFactory(Protocol[_T_co]): @@ -72,15 +112,15 @@ class _DefaultFactory(Protocol[_T_co]): class Field(Generic[_T]): name: str type: Type[_T] - default: _T - default_factory: _DefaultFactory[_T] + default: _T | Literal[_MISSING_TYPE.MISSING] + default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] repr: bool hash: bool | None init: bool compare: bool metadata: types.MappingProxyType[Any, Any] if sys.version_info >= (3, 10): - kw_only: bool + kw_only: bool | Literal[_MISSING_TYPE.MISSING] def __init__( self, default: _T, @@ -103,6 +143,8 @@ class Field(Generic[_T]): compare: bool, metadata: Mapping[Any, Any], ) -> None: ... + + def __set_name__(self, owner: Type[Any], name: str) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -173,7 +215,7 @@ else: metadata: Mapping[Any, Any] | None = ..., ) -> Any: ... -def fields(class_or_instance: Any) -> Tuple[Field[Any], ...]: ... +def fields(class_or_instance: Any) -> tuple[Field[Any], ...]: ... def is_dataclass(obj: Any) -> bool: ... class FrozenInstanceError(AttributeError): ... @@ -190,9 +232,9 @@ class InitVar(Generic[_T]): if sys.version_info >= (3, 10): def make_dataclass( cls_name: str, - fields: Iterable[str | Tuple[str, type] | Tuple[str, type, Field[Any]]], + fields: Iterable[str | tuple[str, type] | tuple[str, type, Field[Any]]], *, - bases: Tuple[type, ...] = ..., + bases: tuple[type, ...] = ..., namespace: dict[str, Any] | None = ..., init: bool = ..., repr: bool = ..., @@ -201,15 +243,16 @@ if sys.version_info >= (3, 10): unsafe_hash: bool = ..., frozen: bool = ..., match_args: bool = ..., + kw_only: bool = ..., slots: bool = ..., ) -> type: ... else: def make_dataclass( cls_name: str, - fields: Iterable[str | Tuple[str, type] | Tuple[str, type, Field[Any]]], + fields: Iterable[str | tuple[str, type] | tuple[str, type, Field[Any]]], *, - bases: Tuple[type, ...] = ..., + bases: tuple[type, ...] = ..., namespace: dict[str, Any] | None = ..., init: bool = ..., repr: bool = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/datetime.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/datetime.pyi index 22782d9d46e4..df33fb88f37e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/datetime.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/datetime.pyi @@ -1,21 +1,27 @@ import sys +from _typeshed import Self from time import struct_time -from typing import ClassVar, NamedTuple, SupportsAbs, Tuple, Type, TypeVar, overload +from typing import ClassVar, NamedTuple, NoReturn, SupportsAbs, TypeVar, overload +from typing_extensions import Literal, final -_S = TypeVar("_S") +if sys.version_info >= (3, 9): + __all__ = ("date", "datetime", "time", "timedelta", "timezone", "tzinfo", "MINYEAR", "MAXYEAR") + +_D = TypeVar("_D", bound=date) -MINYEAR: int -MAXYEAR: int +MINYEAR: Literal[1] +MAXYEAR: Literal[9999] class tzinfo: - def tzname(self, dt: datetime | None) -> str | None: ... - def utcoffset(self, dt: datetime | None) -> timedelta | None: ... - def dst(self, dt: datetime | None) -> timedelta | None: ... - def fromutc(self, dt: datetime) -> datetime: ... + def tzname(self, __dt: datetime | None) -> str | None: ... + def utcoffset(self, __dt: datetime | None) -> timedelta | None: ... + def dst(self, __dt: datetime | None) -> timedelta | None: ... + def fromutc(self, __dt: datetime) -> datetime: ... # Alias required to avoid name conflicts with date(time).tzinfo. _tzinfo = tzinfo +@final class timezone(tzinfo): utc: ClassVar[timezone] min: ClassVar[timezone] @@ -33,19 +39,20 @@ class date: min: ClassVar[date] max: ClassVar[date] resolution: ClassVar[timedelta] - def __new__(cls: Type[_S], year: int, month: int, day: int) -> _S: ... + def __new__(cls: type[Self], year: int, month: int, day: int) -> Self: ... @classmethod - def fromtimestamp(cls: Type[_S], __timestamp: float) -> _S: ... + def fromtimestamp(cls: type[Self], __timestamp: float) -> Self: ... @classmethod - def today(cls: Type[_S]) -> _S: ... + def today(cls: type[Self]) -> Self: ... @classmethod - def fromordinal(cls: Type[_S], n: int) -> _S: ... + def fromordinal(cls: type[Self], __n: int) -> Self: ... if sys.version_info >= (3, 7): @classmethod - def fromisoformat(cls: Type[_S], date_string: str) -> _S: ... + def fromisoformat(cls: type[Self], __date_string: str) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def fromisocalendar(cls: Type[_S], year: int, week: int, day: int) -> _S: ... + def fromisocalendar(cls: type[Self], year: int, week: int, day: int) -> Self: ... + @property def year(self) -> int: ... @property @@ -53,40 +60,50 @@ class date: @property def day(self) -> int: ... def ctime(self) -> str: ... - def strftime(self, fmt: str) -> str: ... - def __format__(self, fmt: str) -> str: ... + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def isoformat(self) -> str: ... def timetuple(self) -> struct_time: ... def toordinal(self) -> int: ... def replace(self, year: int = ..., month: int = ..., day: int = ...) -> date: ... - def __le__(self, other: date) -> bool: ... - def __lt__(self, other: date) -> bool: ... - def __ge__(self, other: date) -> bool: ... - def __gt__(self, other: date) -> bool: ... + def __le__(self, __other: date) -> bool: ... + def __lt__(self, __other: date) -> bool: ... + def __ge__(self, __other: date) -> bool: ... + def __gt__(self, __other: date) -> bool: ... if sys.version_info >= (3, 8): - def __add__(self: _S, other: timedelta) -> _S: ... - def __radd__(self: _S, other: timedelta) -> _S: ... + def __add__(self: Self, __other: timedelta) -> Self: ... + def __radd__(self: Self, __other: timedelta) -> Self: ... + @overload + def __sub__(self: Self, __other: timedelta) -> Self: ... + @overload + def __sub__(self, __other: datetime) -> NoReturn: ... + @overload + def __sub__(self: _D, __other: _D) -> timedelta: ... else: - def __add__(self, other: timedelta) -> date: ... - def __radd__(self, other: timedelta) -> date: ... - @overload - def __sub__(self, other: timedelta) -> date: ... - @overload - def __sub__(self, other: date) -> timedelta: ... + # Prior to Python 3.8, arithmetic operations always returned `date`, even in subclasses + def __add__(self, __other: timedelta) -> date: ... + def __radd__(self, __other: timedelta) -> date: ... + @overload + def __sub__(self, __other: timedelta) -> date: ... + @overload + def __sub__(self, __other: datetime) -> NoReturn: ... + @overload + def __sub__(self, __other: date) -> timedelta: ... + def __hash__(self) -> int: ... def weekday(self) -> int: ... def isoweekday(self) -> int: ... if sys.version_info >= (3, 9): def isocalendar(self) -> _IsoCalendarDate: ... else: - def isocalendar(self) -> Tuple[int, int, int]: ... + def isocalendar(self) -> tuple[int, int, int]: ... class time: min: ClassVar[time] max: ClassVar[time] resolution: ClassVar[timedelta] def __new__( - cls: Type[_S], + cls: type[Self], hour: int = ..., minute: int = ..., second: int = ..., @@ -94,7 +111,7 @@ class time: tzinfo: _tzinfo | None = ..., *, fold: int = ..., - ) -> _S: ... + ) -> Self: ... @property def hour(self) -> int: ... @property @@ -107,17 +124,18 @@ class time: def tzinfo(self) -> _tzinfo | None: ... @property def fold(self) -> int: ... - def __le__(self, other: time) -> bool: ... - def __lt__(self, other: time) -> bool: ... - def __ge__(self, other: time) -> bool: ... - def __gt__(self, other: time) -> bool: ... + def __le__(self, __other: time) -> bool: ... + def __lt__(self, __other: time) -> bool: ... + def __ge__(self, __other: time) -> bool: ... + def __gt__(self, __other: time) -> bool: ... def __hash__(self) -> int: ... def isoformat(self, timespec: str = ...) -> str: ... if sys.version_info >= (3, 7): @classmethod - def fromisoformat(cls: Type[_S], time_string: str) -> _S: ... - def strftime(self, fmt: str) -> str: ... - def __format__(self, fmt: str) -> str: ... + def fromisoformat(cls: type[Self], __time_string: str) -> Self: ... + + def strftime(self, __format: str) -> str: ... + def __format__(self, __fmt: str) -> str: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... @@ -140,7 +158,7 @@ class timedelta(SupportsAbs[timedelta]): max: ClassVar[timedelta] resolution: ClassVar[timedelta] def __new__( - cls: Type[_S], + cls: type[Self], days: float = ..., seconds: float = ..., microseconds: float = ..., @@ -148,7 +166,7 @@ class timedelta(SupportsAbs[timedelta]): minutes: float = ..., hours: float = ..., weeks: float = ..., - ) -> _S: ... + ) -> Self: ... @property def days(self) -> int: ... @property @@ -156,29 +174,29 @@ class timedelta(SupportsAbs[timedelta]): @property def microseconds(self) -> int: ... def total_seconds(self) -> float: ... - def __add__(self, other: timedelta) -> timedelta: ... - def __radd__(self, other: timedelta) -> timedelta: ... - def __sub__(self, other: timedelta) -> timedelta: ... - def __rsub__(self, other: timedelta) -> timedelta: ... + def __add__(self, __other: timedelta) -> timedelta: ... + def __radd__(self, __other: timedelta) -> timedelta: ... + def __sub__(self, __other: timedelta) -> timedelta: ... + def __rsub__(self, __other: timedelta) -> timedelta: ... def __neg__(self) -> timedelta: ... def __pos__(self) -> timedelta: ... def __abs__(self) -> timedelta: ... - def __mul__(self, other: float) -> timedelta: ... - def __rmul__(self, other: float) -> timedelta: ... + def __mul__(self, __other: float) -> timedelta: ... + def __rmul__(self, __other: float) -> timedelta: ... @overload - def __floordiv__(self, other: timedelta) -> int: ... + def __floordiv__(self, __other: timedelta) -> int: ... @overload - def __floordiv__(self, other: int) -> timedelta: ... + def __floordiv__(self, __other: int) -> timedelta: ... @overload - def __truediv__(self, other: timedelta) -> float: ... + def __truediv__(self, __other: timedelta) -> float: ... @overload - def __truediv__(self, other: float) -> timedelta: ... - def __mod__(self, other: timedelta) -> timedelta: ... - def __divmod__(self, other: timedelta) -> Tuple[int, timedelta]: ... - def __le__(self, other: timedelta) -> bool: ... - def __lt__(self, other: timedelta) -> bool: ... - def __ge__(self, other: timedelta) -> bool: ... - def __gt__(self, other: timedelta) -> bool: ... + def __truediv__(self, __other: float) -> timedelta: ... + def __mod__(self, __other: timedelta) -> timedelta: ... + def __divmod__(self, __other: timedelta) -> tuple[int, timedelta]: ... + def __le__(self, __other: timedelta) -> bool: ... + def __lt__(self, __other: timedelta) -> bool: ... + def __ge__(self, __other: timedelta) -> bool: ... + def __gt__(self, __other: timedelta) -> bool: ... def __bool__(self) -> bool: ... def __hash__(self) -> int: ... @@ -187,7 +205,7 @@ class datetime(date): max: ClassVar[datetime] resolution: ClassVar[timedelta] def __new__( - cls: Type[_S], + cls: type[Self], year: int, month: int, day: int, @@ -198,13 +216,7 @@ class datetime(date): tzinfo: _tzinfo | None = ..., *, fold: int = ..., - ) -> _S: ... - @property - def year(self) -> int: ... - @property - def month(self) -> int: ... - @property - def day(self) -> int: ... + ) -> Self: ... @property def hour(self) -> int: ... @property @@ -217,35 +229,32 @@ class datetime(date): def tzinfo(self) -> _tzinfo | None: ... @property def fold(self) -> int: ... + # The first parameter in `fromtimestamp` is actually positional-or-keyword, + # but it is named "timestamp" in the C implementation and "t" in the Python implementation, + # so it is only truly *safe* to pass it as a positional argument. @classmethod - def fromtimestamp(cls: Type[_S], t: float, tz: _tzinfo | None = ...) -> _S: ... - @classmethod - def utcfromtimestamp(cls: Type[_S], t: float) -> _S: ... + def fromtimestamp(cls: type[Self], __timestamp: float, tz: _tzinfo | None = ...) -> Self: ... @classmethod - def today(cls: Type[_S]) -> _S: ... - @classmethod - def fromordinal(cls: Type[_S], n: int) -> _S: ... + def utcfromtimestamp(cls: type[Self], __t: float) -> Self: ... if sys.version_info >= (3, 8): @classmethod - def now(cls: Type[_S], tz: _tzinfo | None = ...) -> _S: ... + def now(cls: type[Self], tz: _tzinfo | None = ...) -> Self: ... else: @overload @classmethod - def now(cls: Type[_S], tz: None = ...) -> _S: ... + def now(cls: type[Self], tz: None = ...) -> Self: ... @overload @classmethod def now(cls, tz: _tzinfo) -> datetime: ... + @classmethod - def utcnow(cls: Type[_S]) -> _S: ... + def utcnow(cls: type[Self]) -> Self: ... @classmethod def combine(cls, date: _date, time: _time, tzinfo: _tzinfo | None = ...) -> datetime: ... if sys.version_info >= (3, 7): @classmethod - def fromisoformat(cls: Type[_S], date_string: str) -> _S: ... - def strftime(self, fmt: str) -> str: ... - def __format__(self, fmt: str) -> str: ... - def toordinal(self) -> int: ... - def timetuple(self) -> struct_time: ... + def fromisoformat(cls: type[Self], __date_string: str) -> Self: ... + def timestamp(self) -> float: ... def utctimetuple(self) -> struct_time: ... def date(self) -> _date: ... @@ -265,34 +274,35 @@ class datetime(date): fold: int = ..., ) -> datetime: ... if sys.version_info >= (3, 8): - def astimezone(self: _S, tz: _tzinfo | None = ...) -> _S: ... + def astimezone(self: Self, tz: _tzinfo | None = ...) -> Self: ... else: def astimezone(self, tz: _tzinfo | None = ...) -> datetime: ... + def ctime(self) -> str: ... def isoformat(self, sep: str = ..., timespec: str = ...) -> str: ... @classmethod - def strptime(cls, date_string: str, format: str) -> datetime: ... + def strptime(cls, __date_string: str, __format: str) -> datetime: ... def utcoffset(self) -> timedelta | None: ... def tzname(self) -> str | None: ... def dst(self) -> timedelta | None: ... - def __le__(self, other: datetime) -> bool: ... # type: ignore - def __lt__(self, other: datetime) -> bool: ... # type: ignore - def __ge__(self, other: datetime) -> bool: ... # type: ignore - def __gt__(self, other: datetime) -> bool: ... # type: ignore + def __le__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __lt__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __ge__(self, __other: datetime) -> bool: ... # type: ignore[override] + def __gt__(self, __other: datetime) -> bool: ... # type: ignore[override] if sys.version_info >= (3, 8): - def __add__(self: _S, other: timedelta) -> _S: ... - def __radd__(self: _S, other: timedelta) -> _S: ... + @overload # type: ignore[override] + def __sub__(self: Self, __other: timedelta) -> Self: ... + @overload + def __sub__(self: _D, __other: _D) -> timedelta: ... else: - def __add__(self, other: timedelta) -> datetime: ... - def __radd__(self, other: timedelta) -> datetime: ... - @overload # type: ignore - def __sub__(self, other: datetime) -> timedelta: ... - @overload - def __sub__(self, other: timedelta) -> datetime: ... - def __hash__(self) -> int: ... - def weekday(self) -> int: ... - def isoweekday(self) -> int: ... + # Prior to Python 3.8, arithmetic operations always returned `datetime`, even in subclasses + def __add__(self, __other: timedelta) -> datetime: ... + def __radd__(self, __other: timedelta) -> datetime: ... + @overload # type: ignore[override] + def __sub__(self, __other: datetime) -> timedelta: ... + @overload + def __sub__(self, __other: timedelta) -> datetime: ... if sys.version_info >= (3, 9): def isocalendar(self) -> _IsoCalendarDate: ... else: - def isocalendar(self) -> Tuple[int, int, int]: ... + def isocalendar(self) -> tuple[int, int, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/__init__.pyi index 9b9f92ccaa86..630a0d16c1c7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/__init__.pyi @@ -1,8 +1,10 @@ from _typeshed import Self from types import TracebackType -from typing import Iterator, MutableMapping, Tuple, Type, Union +from typing import Iterator, MutableMapping, Union from typing_extensions import Literal +__all__ = ["open", "whichdb", "error"] + _KeyType = Union[str, bytes] _ValueType = Union[str, bytes] _TFlags = Literal[ @@ -82,12 +84,12 @@ class _Database(MutableMapping[_KeyType, bytes]): def __del__(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class _error(Exception): ... -error = Tuple[Type[_error], Type[OSError]] +error: tuple[type[_error], type[OSError]] def whichdb(filename: str) -> str: ... def open(file: str, flag: _TFlags = ..., mode: int = ...) -> _Database: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/dumb.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/dumb.pyi index 0a941b070754..917fe378b261 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/dumb.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/dumb.pyi @@ -1,6 +1,8 @@ from _typeshed import Self from types import TracebackType -from typing import Iterator, MutableMapping, Type, Union +from typing import Iterator, MutableMapping, Union + +__all__ = ["error", "open"] _KeyType = Union[str, bytes] _ValueType = Union[str, bytes] @@ -20,7 +22,7 @@ class _Database(MutableMapping[_KeyType, bytes]): def __del__(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def open(file: str, flag: str = ..., mode: int = ...) -> _Database: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/gnu.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/gnu.pyi index 7cec827e8992..ef4706b97f74 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/gnu.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/gnu.pyi @@ -1,11 +1,13 @@ from _typeshed import Self from types import TracebackType -from typing import Type, TypeVar, Union, overload +from typing import TypeVar, Union, overload _T = TypeVar("_T") _KeyType = Union[str, bytes] _ValueType = Union[str, bytes] +open_flags: str + class error(OSError): ... # Actual typename gdbm, not exposed by the implementation @@ -22,7 +24,7 @@ class _gdbm: def __len__(self) -> int: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @overload def get(self, k: _KeyType) -> bytes | None: ... @@ -31,7 +33,7 @@ class _gdbm: def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _gdbm: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/ndbm.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/ndbm.pyi index a4b35a309dbd..c49ad82c53d4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dbm/ndbm.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dbm/ndbm.pyi @@ -1,6 +1,6 @@ from _typeshed import Self from types import TracebackType -from typing import Type, TypeVar, Union, overload +from typing import TypeVar, Union, overload _T = TypeVar("_T") _KeyType = Union[str, bytes] @@ -20,7 +20,7 @@ class _dbm: def __del__(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... @overload def get(self, k: _KeyType) -> bytes | None: ... @@ -29,7 +29,7 @@ class _dbm: def keys(self) -> list[bytes]: ... def setdefault(self, k: _KeyType, default: _ValueType = ...) -> bytes: ... # Don't exist at runtime - __new__: None # type: ignore - __init__: None # type: ignore + __new__: None # type: ignore[assignment] + __init__: None # type: ignore[assignment] def open(__filename: str, __flags: str = ..., __mode: int = ...) -> _dbm: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/decimal.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/decimal.pyi index a21ec92a3d65..dfd0670e474e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/decimal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/decimal.pyi @@ -1,15 +1,18 @@ import numbers +import sys +from _typeshed import Self from types import TracebackType -from typing import Any, Container, NamedTuple, Sequence, Tuple, Type, TypeVar, Union, overload +from typing import Any, Container, NamedTuple, Sequence, Union, overload _Decimal = Union[Decimal, int] -_DecimalNew = Union[Decimal, float, str, Tuple[int, Sequence[int], int]] +_DecimalNew = Union[Decimal, float, str, tuple[int, Sequence[int], int]] _ComparableNum = Union[Decimal, float, numbers.Rational] -_DecimalT = TypeVar("_DecimalT", bound=Decimal) + +__libmpdec_version__: str class DecimalTuple(NamedTuple): sign: int - digits: Tuple[int, ...] + digits: tuple[int, ...] exponent: int ROUND_DOWN: str @@ -21,6 +24,8 @@ ROUND_UP: str ROUND_HALF_DOWN: str ROUND_05UP: str +if sys.version_info >= (3, 7): + HAVE_CONTEXTVAR: bool HAVE_THREADS: bool MAX_EMAX: int MAX_PREC: int @@ -46,40 +51,39 @@ def setcontext(__context: Context) -> None: ... def getcontext() -> Context: ... def localcontext(ctx: Context | None = ...) -> _ContextManager: ... -class Decimal(object): - def __new__(cls: Type[_DecimalT], value: _DecimalNew = ..., context: Context | None = ...) -> _DecimalT: ... +class Decimal: + def __new__(cls: type[Self], value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... @classmethod - def from_float(cls, __f: float) -> Decimal: ... + def from_float(cls: type[Self], __f: float) -> Self: ... def __bool__(self) -> bool: ... def compare(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... def __hash__(self) -> int: ... def as_tuple(self) -> DecimalTuple: ... - def as_integer_ratio(self) -> Tuple[int, int]: ... + def as_integer_ratio(self) -> tuple[int, int]: ... def to_eng_string(self, context: Context | None = ...) -> str: ... def __abs__(self) -> Decimal: ... - def __add__(self, other: _Decimal) -> Decimal: ... - def __divmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... - def __eq__(self, other: object) -> bool: ... - def __floordiv__(self, other: _Decimal) -> Decimal: ... - def __ge__(self, other: _ComparableNum) -> bool: ... - def __gt__(self, other: _ComparableNum) -> bool: ... - def __le__(self, other: _ComparableNum) -> bool: ... - def __lt__(self, other: _ComparableNum) -> bool: ... - def __mod__(self, other: _Decimal) -> Decimal: ... - def __mul__(self, other: _Decimal) -> Decimal: ... + def __add__(self, __other: _Decimal) -> Decimal: ... + def __divmod__(self, __other: _Decimal) -> tuple[Decimal, Decimal]: ... + def __eq__(self, __other: object) -> bool: ... + def __floordiv__(self, __other: _Decimal) -> Decimal: ... + def __ge__(self, __other: _ComparableNum) -> bool: ... + def __gt__(self, __other: _ComparableNum) -> bool: ... + def __le__(self, __other: _ComparableNum) -> bool: ... + def __lt__(self, __other: _ComparableNum) -> bool: ... + def __mod__(self, __other: _Decimal) -> Decimal: ... + def __mul__(self, __other: _Decimal) -> Decimal: ... def __neg__(self) -> Decimal: ... def __pos__(self) -> Decimal: ... - def __pow__(self, other: _Decimal, modulo: _Decimal | None = ...) -> Decimal: ... - def __radd__(self, other: _Decimal) -> Decimal: ... - def __rdivmod__(self, other: _Decimal) -> Tuple[Decimal, Decimal]: ... - def __rfloordiv__(self, other: _Decimal) -> Decimal: ... - def __rmod__(self, other: _Decimal) -> Decimal: ... - def __rmul__(self, other: _Decimal) -> Decimal: ... - def __rsub__(self, other: _Decimal) -> Decimal: ... - def __rtruediv__(self, other: _Decimal) -> Decimal: ... - def __str__(self) -> str: ... - def __sub__(self, other: _Decimal) -> Decimal: ... - def __truediv__(self, other: _Decimal) -> Decimal: ... + def __pow__(self, __other: _Decimal, __modulo: _Decimal | None = ...) -> Decimal: ... + def __radd__(self, __other: _Decimal) -> Decimal: ... + def __rdivmod__(self, __other: _Decimal) -> tuple[Decimal, Decimal]: ... + def __rfloordiv__(self, __other: _Decimal) -> Decimal: ... + def __rmod__(self, __other: _Decimal) -> Decimal: ... + def __rmul__(self, __other: _Decimal) -> Decimal: ... + def __rsub__(self, __other: _Decimal) -> Decimal: ... + def __rtruediv__(self, __other: _Decimal) -> Decimal: ... + def __sub__(self, __other: _Decimal) -> Decimal: ... + def __truediv__(self, __other: _Decimal) -> Decimal: ... def remainder_near(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... def __float__(self) -> float: ... def __int__(self) -> int: ... @@ -93,11 +97,11 @@ class Decimal(object): @overload def __round__(self) -> int: ... @overload - def __round__(self, ndigits: int) -> Decimal: ... + def __round__(self, __ndigits: int) -> Decimal: ... def __floor__(self) -> int: ... def __ceil__(self) -> int: ... def fma(self, other: _Decimal, third: _Decimal, context: Context | None = ...) -> Decimal: ... - def __rpow__(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... + def __rpow__(self, __other: _Decimal, __context: Context | None = ...) -> Decimal: ... def normalize(self, context: Context | None = ...) -> Decimal: ... def quantize(self, exp: _Decimal, rounding: str | None = ..., context: Context | None = ...) -> Decimal: ... def same_quantum(self, other: _Decimal, context: Context | None = ...) -> bool: ... @@ -143,21 +147,21 @@ class Decimal(object): def rotate(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... def scaleb(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... def shift(self, other: _Decimal, context: Context | None = ...) -> Decimal: ... - def __reduce__(self) -> Tuple[Type[Decimal], Tuple[str]]: ... - def __copy__(self) -> Decimal: ... - def __deepcopy__(self, memo: Any) -> Decimal: ... - def __format__(self, specifier: str, context: Context | None = ...) -> str: ... + def __reduce__(self: Self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self: Self) -> Self: ... + def __deepcopy__(self: Self, __memo: Any) -> Self: ... + def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ... -class _ContextManager(object): +class _ContextManager: new_context: Context saved_context: Context def __init__(self, new_context: Context) -> None: ... def __enter__(self) -> Context: ... - def __exit__(self, t: Type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... -_TrapType = Type[DecimalException] +_TrapType = type[DecimalException] -class Context(object): +class Context: prec: int rounding: str Emin: int @@ -180,8 +184,8 @@ class Context(object): ) -> None: ... # __setattr__() only allows to set a specific set of attributes, # already defined above. - def __delattr__(self, name: str) -> None: ... - def __reduce__(self) -> Tuple[Type[Context], Tuple[Any, ...]]: ... + def __delattr__(self, __name: str) -> None: ... + def __reduce__(self: Self) -> tuple[type[Self], tuple[Any, ...]]: ... def clear_flags(self) -> None: ... def clear_traps(self) -> None: ... def copy(self) -> Context: ... @@ -204,7 +208,7 @@ class Context(object): def copy_sign(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... def divide(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... def divide_int(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... - def divmod(self, __x: _Decimal, __y: _Decimal) -> Tuple[Decimal, Decimal]: ... + def divmod(self, __x: _Decimal, __y: _Decimal) -> tuple[Decimal, Decimal]: ... def exp(self, __x: _Decimal) -> Decimal: ... def fma(self, __x: _Decimal, __y: _Decimal, __z: _Decimal) -> Decimal: ... def is_canonical(self, __x: _Decimal) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/difflib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/difflib.pyi index 5db947293e42..a572430155e9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/difflib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/difflib.pyi @@ -1,11 +1,25 @@ import sys -from typing import Any, AnyStr, Callable, Generic, Iterable, Iterator, NamedTuple, Sequence, Tuple, TypeVar, Union, overload +from typing import Any, AnyStr, Callable, Generic, Iterable, Iterator, NamedTuple, Sequence, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = [ + "get_close_matches", + "ndiff", + "restore", + "SequenceMatcher", + "Differ", + "IS_CHARACTER_JUNK", + "IS_LINE_JUNK", + "context_diff", + "unified_diff", + "diff_bytes", + "HtmlDiff", + "Match", +] + _T = TypeVar("_T") -_JunkCallback = Union[Callable[[str], bool], Callable[[str], bool]] class Match(NamedTuple): a: int @@ -23,9 +37,10 @@ class SequenceMatcher(Generic[_T]): def find_longest_match(self, alo: int = ..., ahi: int | None = ..., blo: int = ..., bhi: int | None = ...) -> Match: ... else: def find_longest_match(self, alo: int, ahi: int, blo: int, bhi: int) -> Match: ... + def get_matching_blocks(self) -> list[Match]: ... - def get_opcodes(self) -> list[Tuple[str, int, int, int, int]]: ... - def get_grouped_opcodes(self, n: int = ...) -> Iterable[list[Tuple[str, int, int, int, int]]]: ... + def get_opcodes(self) -> list[tuple[str, int, int, int, int]]: ... + def get_grouped_opcodes(self, n: int = ...) -> Iterable[list[tuple[str, int, int, int, int]]]: ... def ratio(self) -> float: ... def quick_ratio(self) -> float: ... def real_quick_ratio(self) -> float: ... @@ -34,16 +49,14 @@ class SequenceMatcher(Generic[_T]): # mypy thinks the signatures of the overloads overlap, but the types still work fine @overload -def get_close_matches( # type: ignore - word: AnyStr, possibilities: Iterable[AnyStr], n: int = ..., cutoff: float = ... -) -> list[AnyStr]: ... +def get_close_matches(word: AnyStr, possibilities: Iterable[AnyStr], n: int = ..., cutoff: float = ...) -> list[AnyStr]: ... # type: ignore[misc] @overload def get_close_matches( word: Sequence[_T], possibilities: Iterable[Sequence[_T]], n: int = ..., cutoff: float = ... ) -> list[Sequence[_T]]: ... class Differ: - def __init__(self, linejunk: _JunkCallback | None = ..., charjunk: _JunkCallback | None = ...) -> None: ... + def __init__(self, linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ...) -> None: ... def compare(self, a: Sequence[str], b: Sequence[str]) -> Iterator[str]: ... def IS_LINE_JUNK(line: str, pat: Any = ...) -> bool: ... # pat is undocumented @@ -69,16 +82,16 @@ def context_diff( lineterm: str = ..., ) -> Iterator[str]: ... def ndiff( - a: Sequence[str], b: Sequence[str], linejunk: _JunkCallback | None = ..., charjunk: _JunkCallback | None = ... + a: Sequence[str], b: Sequence[str], linejunk: Callable[[str], bool] | None = ..., charjunk: Callable[[str], bool] | None = ... ) -> Iterator[str]: ... -class HtmlDiff(object): +class HtmlDiff: def __init__( self, tabsize: int = ..., wrapcolumn: int | None = ..., - linejunk: _JunkCallback | None = ..., - charjunk: _JunkCallback | None = ..., + linejunk: Callable[[str], bool] | None = ..., + charjunk: Callable[[str], bool] | None = ..., ) -> None: ... def make_file( self, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/dis.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/dis.pyi index d9e3c7213c84..dcb82cb7aefc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/dis.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/dis.pyi @@ -1,27 +1,41 @@ import sys import types -from opcode import ( - EXTENDED_ARG as EXTENDED_ARG, - HAVE_ARGUMENT as HAVE_ARGUMENT, - cmp_op as cmp_op, - hascompare as hascompare, - hasconst as hasconst, - hasfree as hasfree, - hasjabs as hasjabs, - hasjrel as hasjrel, - haslocal as haslocal, - hasname as hasname, - hasnargs as hasnargs, - opmap as opmap, - opname as opname, - stack_effect as stack_effect, -) -from typing import IO, Any, Callable, Iterator, NamedTuple, Tuple, Union +from _typeshed import Self +from opcode import * # `dis` re-exports it as a part of public API +from typing import IO, Any, Callable, Iterator, NamedTuple, Union + +__all__ = [ + "code_info", + "dis", + "disassemble", + "distb", + "disco", + "findlinestarts", + "findlabels", + "show_code", + "get_instructions", + "Instruction", + "Bytecode", + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "hasnargs", + "stack_effect", +] # Strictly this should not have to include Callable, but mypy doesn't use FunctionType # for functions (python/mypy#3171) -_have_code = Union[types.MethodType, types.FunctionType, types.CodeType, type, Callable[..., Any]] -_have_code_or_string = Union[_have_code, str, bytes] +_HaveCodeType = Union[types.MethodType, types.FunctionType, types.CodeType, type, Callable[..., Any]] +_HaveCodeOrStringType = Union[_HaveCodeType, str, bytes] class Instruction(NamedTuple): opname: str @@ -36,29 +50,28 @@ class Instruction(NamedTuple): class Bytecode: codeobj: types.CodeType first_line: int - def __init__(self, x: _have_code_or_string, *, first_line: int | None = ..., current_offset: int | None = ...) -> None: ... + def __init__(self, x: _HaveCodeOrStringType, *, first_line: int | None = ..., current_offset: int | None = ...) -> None: ... def __iter__(self) -> Iterator[Instruction]: ... - def __repr__(self) -> str: ... def info(self) -> str: ... def dis(self) -> str: ... @classmethod - def from_traceback(cls, tb: types.TracebackType) -> Bytecode: ... + def from_traceback(cls: type[Self], tb: types.TracebackType) -> Self: ... COMPILER_FLAG_NAMES: dict[int, str] -def findlabels(code: _have_code) -> list[int]: ... -def findlinestarts(code: _have_code) -> Iterator[Tuple[int, int]]: ... +def findlabels(code: _HaveCodeType) -> list[int]: ... +def findlinestarts(code: _HaveCodeType) -> Iterator[tuple[int, int]]: ... def pretty_flags(flags: int) -> str: ... -def code_info(x: _have_code_or_string) -> str: ... +def code_info(x: _HaveCodeOrStringType) -> str: ... if sys.version_info >= (3, 7): - def dis(x: _have_code_or_string | None = ..., *, file: IO[str] | None = ..., depth: int | None = ...) -> None: ... + def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ..., depth: int | None = ...) -> None: ... else: - def dis(x: _have_code_or_string | None = ..., *, file: IO[str] | None = ...) -> None: ... + def dis(x: _HaveCodeOrStringType | None = ..., *, file: IO[str] | None = ...) -> None: ... def distb(tb: types.TracebackType | None = ..., *, file: IO[str] | None = ...) -> None: ... -def disassemble(co: _have_code, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... -def disco(co: _have_code, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... -def show_code(co: _have_code, *, file: IO[str] | None = ...) -> None: ... -def get_instructions(x: _have_code, *, first_line: int | None = ...) -> Iterator[Instruction]: ... +def disassemble(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... +def disco(co: _HaveCodeType, lasti: int = ..., *, file: IO[str] | None = ...) -> None: ... +def show_code(co: _HaveCodeType, *, file: IO[str] | None = ...) -> None: ... +def get_instructions(x: _HaveCodeType, *, first_line: int | None = ...) -> Iterator[Instruction]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/ccompiler.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/ccompiler.pyi index d21de4691503..7c7023ed0b65 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/ccompiler.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/ccompiler.pyi @@ -1,6 +1,6 @@ -from typing import Any, Callable, Optional, Tuple, Union +from typing import Any, Callable, Optional, Union -_Macro = Union[Tuple[str], Tuple[str, Optional[str]]] +_Macro = Union[tuple[str], tuple[str, Optional[str]]] def gen_lib_options( compiler: CCompiler, library_dirs: list[str], runtime_library_dirs: list[str], libraries: list[str] @@ -141,7 +141,7 @@ class CCompiler: def library_filename(self, libname: str, lib_type: str = ..., strip_dir: int = ..., output_dir: str = ...) -> str: ... def object_filenames(self, source_filenames: list[str], strip_dir: int = ..., output_dir: str = ...) -> list[str]: ... def shared_object_filename(self, basename: str, strip_dir: int = ..., output_dir: str = ...) -> str: ... - def execute(self, func: Callable[..., None], args: Tuple[Any, ...], msg: str | None = ..., level: int = ...) -> None: ... + def execute(self, func: Callable[..., None], args: tuple[Any, ...], msg: str | None = ..., level: int = ...) -> None: ... def spawn(self, cmd: list[str]) -> None: ... def mkpath(self, name: str, mode: int = ...) -> None: ... def move_file(self, src: str, dst: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/cmd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/cmd.pyi index dd2e1905adf5..96a048c93f41 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/cmd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/cmd.pyi @@ -1,9 +1,9 @@ from abc import abstractmethod from distutils.dist import Distribution -from typing import Any, Callable, Iterable, Tuple +from typing import Any, Callable, Iterable class Command: - sub_commands: list[Tuple[str, Callable[[Command], bool] | None]] + sub_commands: list[tuple[str, Callable[[Command], bool] | None]] def __init__(self, dist: Distribution) -> None: ... @abstractmethod def initialize_options(self) -> None: ... @@ -18,7 +18,7 @@ class Command: def ensure_filename(self, option: str) -> None: ... def ensure_dirname(self, option: str) -> None: ... def get_command_name(self) -> str: ... - def set_undefined_options(self, src_cmd: str, *option_pairs: Tuple[str, str]) -> None: ... + def set_undefined_options(self, src_cmd: str, *option_pairs: tuple[str, str]) -> None: ... def get_finalized_command(self, command: str, create: int = ...) -> Command: ... def reinitialize_command(self, command: Command | str, reinit_subcommands: int = ...) -> Command: ... def run_command(self, command: str) -> None: ... @@ -34,7 +34,7 @@ class Command: preserve_times: int = ..., link: str | None = ..., level: Any = ..., - ) -> Tuple[str, bool]: ... # level is not used + ) -> tuple[str, bool]: ... # level is not used def copy_tree( self, infile: str, @@ -57,7 +57,7 @@ class Command: ) -> str: ... def make_file( self, - infiles: str | list[str] | Tuple[str], + infiles: str | list[str] | tuple[str, ...], outfile: str, func: Callable[..., Any], args: list[Any], diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/__init__.pyi index e69de29bb2d1..4d804bcb7671 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/__init__.pyi @@ -0,0 +1,45 @@ +import sys + +from . import ( + bdist_dumb as bdist_dumb, + bdist_rpm as bdist_rpm, + build as build, + build_clib as build_clib, + build_ext as build_ext, + build_py as build_py, + build_scripts as build_scripts, + check as check, + clean as clean, + install as install, + install_data as install_data, + install_headers as install_headers, + install_lib as install_lib, + register as register, + sdist as sdist, + upload as upload, +) + +if sys.version_info < (3, 10): + from . import bdist_wininst as bdist_wininst + +__all__ = [ + "build", + "build_py", + "build_ext", + "build_clib", + "build_scripts", + "clean", + "install", + "install_lib", + "install_headers", + "install_scripts", + "install_data", + "sdist", + "register", + "bdist", + "bdist_dumb", + "bdist_rpm", + "bdist_wininst", + "check", + "upload", +] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi index e4f64ff6a59e..66202e841d3c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_msi.pyi @@ -5,6 +5,7 @@ from ..cmd import Command if sys.platform == "win32": from msilib import Dialog + class PyDialog(Dialog): def __init__(self, *args, **kw) -> None: ... def title(self, title) -> None: ... @@ -12,6 +13,7 @@ if sys.platform == "win32": def cancel(self, title, next, name: str = ..., active: int = ...): ... def next(self, title, next, name: str = ..., active: int = ...): ... def xbutton(self, name, title, next, xpos): ... + class bdist_msi(Command): description: str user_options: Any diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi index e69de29bb2d1..1091fb278493 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/bdist_wininst.pyi @@ -0,0 +1,16 @@ +from _typeshed import StrOrBytesPath +from distutils.cmd import Command +from typing import Any, ClassVar + +class bdist_wininst(Command): + description: ClassVar[str] + user_options: ClassVar[list[tuple[Any, ...]]] + boolean_options: ClassVar[list[str]] + + def initialize_options(self) -> None: ... + def finalize_options(self) -> None: ... + def run(self) -> None: ... + def get_inidata(self) -> str: ... + def create_exe(self, arcname: StrOrBytesPath, fullname: str, bitmap: StrOrBytesPath | None = ...) -> None: ... + def get_installer_filename(self, fullname: str) -> str: ... + def get_exe_bytes(self) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/check.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/check.pyi index 9149b540f715..36895d2c16f1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/check.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/check.pyi @@ -5,7 +5,7 @@ from ..cmd import Command _Reporter = Any # really docutils.utils.Reporter # Only defined if docutils is installed. -class SilentReporter(_Reporter): # type: ignore +class SilentReporter(_Reporter): messages: Any def __init__( self, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/install.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/install.pyi index 47fa8b08d1b2..661d256e6f07 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/install.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/command/install.pyi @@ -1,9 +1,9 @@ -from typing import Any, Tuple +from typing import Any from ..cmd import Command HAS_USER_SITE: bool -SCHEME_KEYS: Tuple[str, ...] +SCHEME_KEYS: tuple[str, ...] INSTALL_SCHEMES: dict[str, dict[Any, Any]] class install(Command): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/core.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/core.pyi index dc0870895cf9..6564c9a86ded 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/core.pyi @@ -1,7 +1,7 @@ from distutils.cmd import Command as Command from distutils.dist import Distribution as Distribution from distutils.extension import Extension as Extension -from typing import Any, Mapping, Tuple, Type +from typing import Any, Mapping def setup( *, @@ -20,21 +20,21 @@ def setup( scripts: list[str] = ..., ext_modules: list[Extension] = ..., classifiers: list[str] = ..., - distclass: Type[Distribution] = ..., + distclass: type[Distribution] = ..., script_name: str = ..., script_args: list[str] = ..., options: Mapping[str, Any] = ..., license: str = ..., keywords: list[str] | str = ..., platforms: list[str] | str = ..., - cmdclass: Mapping[str, Type[Command]] = ..., - data_files: list[Tuple[str, list[str]]] = ..., + cmdclass: Mapping[str, type[Command]] = ..., + data_files: list[tuple[str, list[str]]] = ..., package_dir: Mapping[str, str] = ..., obsoletes: list[str] = ..., provides: list[str] = ..., requires: list[str] = ..., command_packages: list[str] = ..., - command_options: Mapping[str, Mapping[str, Tuple[Any, Any]]] = ..., + command_options: Mapping[str, Mapping[str, tuple[Any, Any]]] = ..., package_data: Mapping[str, list[str]] = ..., include_package_data: bool = ..., libraries: list[str] = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dep_util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dep_util.pyi index 595dcb80a38c..929d6ffd0c81 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dep_util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dep_util.pyi @@ -1,5 +1,3 @@ -from typing import Tuple - def newer(source: str, target: str) -> bool: ... -def newer_pairwise(sources: list[str], targets: list[str]) -> list[Tuple[str, str]]: ... +def newer_pairwise(sources: list[str], targets: list[str]) -> list[tuple[str, str]]: ... def newer_group(sources: list[str], target: str, missing: str = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dist.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dist.pyi index ca3f108ab681..c5b3afe7cc3b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dist.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/dist.pyi @@ -1,6 +1,6 @@ from _typeshed import StrOrBytesPath, SupportsWrite from distutils.cmd import Command -from typing import IO, Any, Iterable, Mapping, Tuple, Type +from typing import IO, Any, Iterable, Mapping class DistributionMetadata: def __init__(self, path: int | StrOrBytesPath | None = ...) -> None: ... @@ -50,9 +50,9 @@ class DistributionMetadata: def set_obsoletes(self, value: Iterable[str]) -> None: ... class Distribution: - cmdclass: dict[str, Type[Command]] + cmdclass: dict[str, type[Command]] metadata: DistributionMetadata def __init__(self, attrs: Mapping[str, Any] | None = ...) -> None: ... - def get_option_dict(self, command: str) -> dict[str, Tuple[str, str]]: ... + def get_option_dict(self, command: str) -> dict[str, tuple[str, str]]: ... def parse_config_files(self, filenames: Iterable[str] | None = ...) -> None: ... def get_command_obj(self, command: str, create: bool = ...) -> Command | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/extension.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/extension.pyi index 0941a402e604..655ea1e9e347 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/extension.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/extension.pyi @@ -1,12 +1,10 @@ -from typing import Tuple - class Extension: def __init__( self, name: str, sources: list[str], include_dirs: list[str] | None = ..., - define_macros: list[Tuple[str, str | None]] | None = ..., + define_macros: list[tuple[str, str | None]] | None = ..., undef_macros: list[str] | None = ..., library_dirs: list[str] | None = ..., libraries: list[str] | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi index a2c24187a498..dce8394b6289 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/fancy_getopt.pyi @@ -1,7 +1,7 @@ -from typing import Any, Iterable, List, Mapping, Optional, Tuple, overload +from typing import Any, Iterable, Mapping, Optional, overload -_Option = Tuple[str, Optional[str], str] -_GR = Tuple[List[str], OptionDummy] +_Option = tuple[str, Optional[str], str] +_GR = tuple[list[str], OptionDummy] def fancy_getopt( options: list[_Option], negative_opt: Mapping[_Option, _Option], object: Any, args: list[str] | None @@ -15,7 +15,7 @@ class FancyGetopt: def getopt(self, args: list[str] | None = ...) -> _GR: ... @overload def getopt(self, args: list[str] | None, object: Any) -> list[str]: ... - def get_option_order(self) -> list[Tuple[str, str]]: ... + def get_option_order(self) -> list[tuple[str, str]]: ... def generate_help(self, header: str | None = ...) -> list[str]: ... class OptionDummy: diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/file_util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/file_util.pyi index cfe840e71040..a7f24105a678 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/file_util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/file_util.pyi @@ -1,4 +1,4 @@ -from typing import Sequence, Tuple +from typing import Sequence def copy_file( src: str, @@ -9,6 +9,6 @@ def copy_file( link: str | None = ..., verbose: bool = ..., dry_run: bool = ..., -) -> Tuple[str, str]: ... +) -> tuple[str, str]: ... def move_file(src: str, dst: str, verbose: bool = ..., dry_run: bool = ...) -> str: ... def write_file(filename: str, contents: Sequence[str]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/text_file.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/text_file.pyi index 6a0aded5176e..ace642e027cf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/text_file.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/text_file.pyi @@ -1,4 +1,4 @@ -from typing import IO, Tuple +from typing import IO class TextFile: def __init__( @@ -15,7 +15,7 @@ class TextFile: ) -> None: ... def open(self, filename: str) -> None: ... def close(self) -> None: ... - def warn(self, msg: str, line: list[int] | Tuple[int, int] | int | None = ...) -> None: ... + def warn(self, msg: str, line: list[int] | tuple[int, int] | int | None = ...) -> None: ... def readline(self) -> str | None: ... def readlines(self) -> list[str]: ... def unreadline(self, line: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/util.pyi index 9b0915570ece..22d982e6949d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/util.pyi @@ -1,6 +1,7 @@ from _typeshed import StrPath from collections.abc import Callable, Container, Iterable, Mapping -from typing import Any, Tuple +from typing import Any +from typing_extensions import Literal def get_platform() -> str: ... def convert_path(pathname: str) -> str: ... @@ -9,9 +10,9 @@ def check_environ() -> None: ... def subst_vars(s: str, local_vars: Mapping[str, str]) -> None: ... def split_quoted(s: str) -> list[str]: ... def execute( - func: Callable[..., None], args: Tuple[Any, ...], msg: str | None = ..., verbose: bool = ..., dry_run: bool = ... + func: Callable[..., None], args: tuple[Any, ...], msg: str | None = ..., verbose: bool = ..., dry_run: bool = ... ) -> None: ... -def strtobool(val: str) -> bool: ... +def strtobool(val: str) -> Literal[0, 1]: ... def byte_compile( py_files: list[str], optimize: int = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/version.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/version.pyi index 29d72174d8db..210aa5cf718b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/distutils/version.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/distutils/version.pyi @@ -1,38 +1,36 @@ +from _typeshed import Self from abc import abstractmethod -from typing import Pattern, Tuple, TypeVar - -_T = TypeVar("_T", bound=Version) +from typing import Pattern class Version: - def __repr__(self) -> str: ... def __eq__(self, other: object) -> bool: ... - def __lt__(self: _T, other: _T | str) -> bool: ... - def __le__(self: _T, other: _T | str) -> bool: ... - def __gt__(self: _T, other: _T | str) -> bool: ... - def __ge__(self: _T, other: _T | str) -> bool: ... + def __lt__(self: Self, other: Self | str) -> bool: ... + def __le__(self: Self, other: Self | str) -> bool: ... + def __gt__(self: Self, other: Self | str) -> bool: ... + def __ge__(self: Self, other: Self | str) -> bool: ... @abstractmethod def __init__(self, vstring: str | None = ...) -> None: ... @abstractmethod - def parse(self: _T, vstring: str) -> _T: ... + def parse(self: Self, vstring: str) -> Self: ... @abstractmethod def __str__(self) -> str: ... @abstractmethod - def _cmp(self: _T, other: _T | str) -> bool: ... + def _cmp(self: Self, other: Self | str) -> bool: ... class StrictVersion(Version): version_re: Pattern[str] - version: Tuple[int, int, int] - prerelease: Tuple[str, int] | None + version: tuple[int, int, int] + prerelease: tuple[str, int] | None def __init__(self, vstring: str | None = ...) -> None: ... - def parse(self: _T, vstring: str) -> _T: ... - def __str__(self) -> str: ... - def _cmp(self: _T, other: _T | str) -> bool: ... + def parse(self: Self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa Y029 + def _cmp(self: Self, other: Self | str) -> bool: ... class LooseVersion(Version): component_re: Pattern[str] vstring: str - version: Tuple[str | int, ...] + version: tuple[str | int, ...] def __init__(self, vstring: str | None = ...) -> None: ... - def parse(self: _T, vstring: str) -> _T: ... - def __str__(self) -> str: ... - def _cmp(self: _T, other: _T | str) -> bool: ... + def parse(self: Self, vstring: str) -> Self: ... + def __str__(self) -> str: ... # noqa Y029 + def _cmp(self: Self, other: Self | str) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/doctest.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/doctest.pyi index 9a9f83b0d8fe..651e1b298aaf 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/doctest.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/doctest.pyi @@ -1,6 +1,42 @@ import types import unittest -from typing import Any, Callable, NamedTuple, Tuple, Type +from typing import Any, Callable, NamedTuple + +__all__ = [ + "register_optionflag", + "DONT_ACCEPT_TRUE_FOR_1", + "DONT_ACCEPT_BLANKLINE", + "NORMALIZE_WHITESPACE", + "ELLIPSIS", + "SKIP", + "IGNORE_EXCEPTION_DETAIL", + "COMPARISON_FLAGS", + "REPORT_UDIFF", + "REPORT_CDIFF", + "REPORT_NDIFF", + "REPORT_ONLY_FIRST_FAILURE", + "REPORTING_FLAGS", + "FAIL_FAST", + "Example", + "DocTest", + "DocTestParser", + "DocTestFinder", + "DocTestRunner", + "OutputChecker", + "DocTestFailure", + "UnexpectedException", + "DebugRunner", + "testmod", + "testfile", + "run_docstring_examples", + "DocTestSuite", + "DocFileSuite", + "set_unittest_reportflags", + "script_from_examples", + "testsource", + "debug_src", + "debug", +] class TestResults(NamedTuple): failed: int @@ -47,6 +83,7 @@ class Example: options: dict[int, bool] | None = ..., ) -> None: ... def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... class DocTest: examples: list[Example] @@ -66,6 +103,7 @@ class DocTest: ) -> None: ... def __hash__(self) -> int: ... def __lt__(self, other: DocTest) -> bool: ... + def __eq__(self, other: object) -> bool: ... class DocTestParser: def parse(self, string: str, name: str = ...) -> list[str | Example]: ... @@ -86,7 +124,7 @@ class DocTestFinder: ) -> list[DocTest]: ... _Out = Callable[[str], Any] -_ExcInfo = Tuple[Type[BaseException], BaseException, types.TracebackType] +_ExcInfo = tuple[type[BaseException], BaseException, types.TracebackType] class DocTestRunner: DIVIDER: str @@ -172,6 +210,7 @@ class DocTestCase(unittest.TestCase): def debug(self) -> None: ... def id(self) -> str: ... def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... def shortDescription(self) -> str: ... class SkipDocTestCase(DocTestCase): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/_header_value_parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/_header_value_parser.pyi index 04234b482773..87018f04e9d4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/_header_value_parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/_header_value_parser.pyi @@ -1,29 +1,28 @@ import sys +from _typeshed import Self from email.errors import HeaderParseError, MessageDefect from email.policy import Policy -from typing import Any, Iterable, Iterator, List, Pattern, Set, Tuple, Type, TypeVar, Union +from typing import Any, Iterable, Iterator, Pattern, Union from typing_extensions import Final -_T = TypeVar("_T") - -WSP: Final[Set[str]] -CFWS_LEADER: Final[Set[str]] -SPECIALS: Final[Set[str]] -ATOM_ENDS: Final[Set[str]] -DOT_ATOM_ENDS: Final[Set[str]] -PHRASE_ENDS: Final[Set[str]] -TSPECIALS: Final[Set[str]] -TOKEN_ENDS: Final[Set[str]] -ASPECIALS: Final[Set[str]] -ATTRIBUTE_ENDS: Final[Set[str]] -EXTENDED_ATTRIBUTE_ENDS: Final[Set[str]] +WSP: Final[set[str]] +CFWS_LEADER: Final[set[str]] +SPECIALS: Final[set[str]] +ATOM_ENDS: Final[set[str]] +DOT_ATOM_ENDS: Final[set[str]] +PHRASE_ENDS: Final[set[str]] +TSPECIALS: Final[set[str]] +TOKEN_ENDS: Final[set[str]] +ASPECIALS: Final[set[str]] +ATTRIBUTE_ENDS: Final[set[str]] +EXTENDED_ATTRIBUTE_ENDS: Final[set[str]] def quote_string(value: Any) -> str: ... if sys.version_info >= (3, 7): rfc2047_matcher: Pattern[str] -class TokenList(List[Union[TokenList, Terminal]]): +class TokenList(list[Union[TokenList, Terminal]]): token_type: str | None syntactic_break: bool ew_combine_allowed: bool @@ -281,12 +280,12 @@ class MimeParameters(TokenList): token_type: str syntactic_break: bool @property - def params(self) -> Iterator[Tuple[str, str]]: ... + def params(self) -> Iterator[tuple[str, str]]: ... class ParameterizedHeaderValue(TokenList): syntactic_break: bool @property - def params(self) -> Iterable[Tuple[str, str]]: ... + def params(self) -> Iterable[tuple[str, str]]: ... class ContentType(ParameterizedHeaderValue): token_type: str @@ -313,8 +312,10 @@ if sys.version_info >= (3, 8): token_type: str as_ew_allowed: bool def fold(self, policy: Policy) -> str: ... + class MessageID(MsgID): token_type: str + class InvalidMessageID(MessageID): token_type: str @@ -327,14 +328,14 @@ class Terminal(str): syntactic_break: bool token_type: str defects: list[MessageDefect] - def __new__(cls: Type[_T], value: str, token_type: str) -> _T: ... + def __new__(cls: type[Self], value: str, token_type: str) -> Self: ... def pprint(self) -> None: ... @property def all_defects(self) -> list[MessageDefect]: ... def pop_trailing_ws(self) -> None: ... @property def comments(self) -> list[str]: ... - def __getnewargs__(self) -> Tuple[str, str]: ... # type: ignore + def __getnewargs__(self) -> tuple[str, str]: ... # type: ignore[override] class WhiteSpaceTerminal(Terminal): @property @@ -356,55 +357,55 @@ DOT: Final[ValueTerminal] ListSeparator: Final[ValueTerminal] RouteComponentMarker: Final[ValueTerminal] -def get_fws(value: str) -> Tuple[WhiteSpaceTerminal, str]: ... -def get_encoded_word(value: str) -> Tuple[EncodedWord, str]: ... +def get_fws(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_encoded_word(value: str) -> tuple[EncodedWord, str]: ... def get_unstructured(value: str) -> UnstructuredTokenList: ... -def get_qp_ctext(value: str) -> Tuple[WhiteSpaceTerminal, str]: ... -def get_qcontent(value: str) -> Tuple[ValueTerminal, str]: ... -def get_atext(value: str) -> Tuple[ValueTerminal, str]: ... -def get_bare_quoted_string(value: str) -> Tuple[BareQuotedString, str]: ... -def get_comment(value: str) -> Tuple[Comment, str]: ... -def get_cfws(value: str) -> Tuple[CFWSList, str]: ... -def get_quoted_string(value: str) -> Tuple[QuotedString, str]: ... -def get_atom(value: str) -> Tuple[Atom, str]: ... -def get_dot_atom_text(value: str) -> Tuple[DotAtomText, str]: ... -def get_dot_atom(value: str) -> Tuple[DotAtom, str]: ... -def get_word(value: str) -> Tuple[Any, str]: ... -def get_phrase(value: str) -> Tuple[Phrase, str]: ... -def get_local_part(value: str) -> Tuple[LocalPart, str]: ... -def get_obs_local_part(value: str) -> Tuple[ObsLocalPart, str]: ... -def get_dtext(value: str) -> Tuple[ValueTerminal, str]: ... -def get_domain_literal(value: str) -> Tuple[DomainLiteral, str]: ... -def get_domain(value: str) -> Tuple[Domain, str]: ... -def get_addr_spec(value: str) -> Tuple[AddrSpec, str]: ... -def get_obs_route(value: str) -> Tuple[ObsRoute, str]: ... -def get_angle_addr(value: str) -> Tuple[AngleAddr, str]: ... -def get_display_name(value: str) -> Tuple[DisplayName, str]: ... -def get_name_addr(value: str) -> Tuple[NameAddr, str]: ... -def get_mailbox(value: str) -> Tuple[Mailbox, str]: ... -def get_invalid_mailbox(value: str, endchars: str) -> Tuple[InvalidMailbox, str]: ... -def get_mailbox_list(value: str) -> Tuple[MailboxList, str]: ... -def get_group_list(value: str) -> Tuple[GroupList, str]: ... -def get_group(value: str) -> Tuple[Group, str]: ... -def get_address(value: str) -> Tuple[Address, str]: ... -def get_address_list(value: str) -> Tuple[AddressList, str]: ... +def get_qp_ctext(value: str) -> tuple[WhiteSpaceTerminal, str]: ... +def get_qcontent(value: str) -> tuple[ValueTerminal, str]: ... +def get_atext(value: str) -> tuple[ValueTerminal, str]: ... +def get_bare_quoted_string(value: str) -> tuple[BareQuotedString, str]: ... +def get_comment(value: str) -> tuple[Comment, str]: ... +def get_cfws(value: str) -> tuple[CFWSList, str]: ... +def get_quoted_string(value: str) -> tuple[QuotedString, str]: ... +def get_atom(value: str) -> tuple[Atom, str]: ... +def get_dot_atom_text(value: str) -> tuple[DotAtomText, str]: ... +def get_dot_atom(value: str) -> tuple[DotAtom, str]: ... +def get_word(value: str) -> tuple[Any, str]: ... +def get_phrase(value: str) -> tuple[Phrase, str]: ... +def get_local_part(value: str) -> tuple[LocalPart, str]: ... +def get_obs_local_part(value: str) -> tuple[ObsLocalPart, str]: ... +def get_dtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_domain_literal(value: str) -> tuple[DomainLiteral, str]: ... +def get_domain(value: str) -> tuple[Domain, str]: ... +def get_addr_spec(value: str) -> tuple[AddrSpec, str]: ... +def get_obs_route(value: str) -> tuple[ObsRoute, str]: ... +def get_angle_addr(value: str) -> tuple[AngleAddr, str]: ... +def get_display_name(value: str) -> tuple[DisplayName, str]: ... +def get_name_addr(value: str) -> tuple[NameAddr, str]: ... +def get_mailbox(value: str) -> tuple[Mailbox, str]: ... +def get_invalid_mailbox(value: str, endchars: str) -> tuple[InvalidMailbox, str]: ... +def get_mailbox_list(value: str) -> tuple[MailboxList, str]: ... +def get_group_list(value: str) -> tuple[GroupList, str]: ... +def get_group(value: str) -> tuple[Group, str]: ... +def get_address(value: str) -> tuple[Address, str]: ... +def get_address_list(value: str) -> tuple[AddressList, str]: ... if sys.version_info >= (3, 8): - def get_no_fold_literal(value: str) -> Tuple[NoFoldLiteral, str]: ... - def get_msg_id(value: str) -> Tuple[MsgID, str]: ... + def get_no_fold_literal(value: str) -> tuple[NoFoldLiteral, str]: ... + def get_msg_id(value: str) -> tuple[MsgID, str]: ... def parse_message_id(value: str) -> MessageID: ... def parse_mime_version(value: str) -> MIMEVersion: ... -def get_invalid_parameter(value: str) -> Tuple[InvalidParameter, str]: ... -def get_ttext(value: str) -> Tuple[ValueTerminal, str]: ... -def get_token(value: str) -> Tuple[Token, str]: ... -def get_attrtext(value: str) -> Tuple[ValueTerminal, str]: ... -def get_attribute(value: str) -> Tuple[Attribute, str]: ... -def get_extended_attrtext(value: str) -> Tuple[ValueTerminal, str]: ... -def get_extended_attribute(value: str) -> Tuple[Attribute, str]: ... -def get_section(value: str) -> Tuple[Section, str]: ... -def get_value(value: str) -> Tuple[Value, str]: ... -def get_parameter(value: str) -> Tuple[Parameter, str]: ... +def get_invalid_parameter(value: str) -> tuple[InvalidParameter, str]: ... +def get_ttext(value: str) -> tuple[ValueTerminal, str]: ... +def get_token(value: str) -> tuple[Token, str]: ... +def get_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_attribute(value: str) -> tuple[Attribute, str]: ... +def get_extended_attrtext(value: str) -> tuple[ValueTerminal, str]: ... +def get_extended_attribute(value: str) -> tuple[Attribute, str]: ... +def get_section(value: str) -> tuple[Section, str]: ... +def get_value(value: str) -> tuple[Value, str]: ... +def get_parameter(value: str) -> tuple[Parameter, str]: ... def parse_mime_parameters(value: str) -> MimeParameters: ... def parse_content_type_header(value: str) -> ContentType: ... def parse_content_disposition_header(value: str) -> ContentDisposition: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/base64mime.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/base64mime.pyi new file mode 100644 index 000000000000..e55658046f55 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/base64mime.pyi @@ -0,0 +1,9 @@ +__all__ = ["body_decode", "body_encode", "decode", "decodestring", "header_encode", "header_length"] + +def header_length(bytearray: str | bytes) -> int: ... +def header_encode(header_bytes: str | bytes, charset: str = ...) -> str: ... +def body_encode(s: bytes, maxlinelen: int = ..., eol: str = ...) -> str: ... +def decode(string: str | bytes) -> bytes: ... + +body_decode = decode +decodestring = decode diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/charset.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/charset.pyi index 4bf5d11690eb..fd3de9ceace2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/charset.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/charset.pyi @@ -1,4 +1,6 @@ -from typing import Any, Iterator +from typing import Iterator + +__all__ = ["Charset", "add_alias", "add_charset", "add_codec"] QP: int # undocumented BASE64: int # undocumented @@ -17,9 +19,8 @@ class Charset: def header_encode(self, string: str) -> str: ... def header_encode_lines(self, string: str, maxlengths: Iterator[int]) -> list[str]: ... def body_encode(self, string: str) -> str: ... - def __str__(self) -> str: ... - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... def add_charset( charset: str, header_enc: int | None = ..., body_enc: int | None = ..., output_charset: str | None = ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/encoders.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/encoders.pyi index e05225e895c4..55223bdc0762 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/encoders.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/encoders.pyi @@ -1,5 +1,7 @@ from email.message import Message +__all__ = ["encode_7or8bit", "encode_base64", "encode_noop", "encode_quopri"] + def encode_base64(msg: Message) -> None: ... def encode_quopri(msg: Message) -> None: ... def encode_7or8bit(msg: Message) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/feedparser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/feedparser.pyi index ffcf4f0a7c6e..fd27a7242649 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/feedparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/feedparser.pyi @@ -2,6 +2,8 @@ from email.message import Message from email.policy import Policy from typing import Callable, Generic, TypeVar, overload +__all__ = ["FeedParser", "BytesFeedParser"] + _M = TypeVar("_M", bound=Message) class FeedParser(Generic[_M]): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/generator.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/generator.pyi index 1a810558a0da..5a6b6374dd4b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/generator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/generator.pyi @@ -1,27 +1,39 @@ +from _typeshed import SupportsWrite from email.message import Message from email.policy import Policy -from typing import BinaryIO, TextIO + +__all__ = ["Generator", "DecodedGenerator", "BytesGenerator"] class Generator: - def clone(self, fp: TextIO) -> Generator: ... + def clone(self, fp: SupportsWrite[str]) -> Generator: ... def write(self, s: str) -> None: ... def __init__( - self, outfp: TextIO, mangle_from_: bool | None = ..., maxheaderlen: int | None = ..., *, policy: Policy | None = ... + self, + outfp: SupportsWrite[str], + mangle_from_: bool | None = ..., + maxheaderlen: int | None = ..., + *, + policy: Policy | None = ..., ) -> None: ... def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... class BytesGenerator: - def clone(self, fp: BinaryIO) -> BytesGenerator: ... + def clone(self, fp: SupportsWrite[bytes]) -> BytesGenerator: ... def write(self, s: str) -> None: ... def __init__( - self, outfp: BinaryIO, mangle_from_: bool | None = ..., maxheaderlen: int | None = ..., *, policy: Policy | None = ... + self, + outfp: SupportsWrite[bytes], + mangle_from_: bool | None = ..., + maxheaderlen: int | None = ..., + *, + policy: Policy | None = ..., ) -> None: ... def flatten(self, msg: Message, unixfrom: bool = ..., linesep: str | None = ...) -> None: ... class DecodedGenerator(Generator): def __init__( self, - outfp: TextIO, + outfp: SupportsWrite[str], mangle_from_: bool | None = ..., maxheaderlen: int | None = ..., fmt: str | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/header.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/header.pyi index 0d7691a622cb..bd851bcf8679 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/header.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/header.pyi @@ -1,5 +1,6 @@ from email.charset import Charset -from typing import Any, Tuple + +__all__ = ["Header", "decode_header", "make_header"] class Header: def __init__( @@ -13,13 +14,12 @@ class Header: ) -> None: ... def append(self, s: bytes | str, charset: Charset | str | None = ..., errors: str = ...) -> None: ... def encode(self, splitchars: str = ..., maxlinelen: int | None = ..., linesep: str = ...) -> str: ... - def __str__(self) -> str: ... - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... -def decode_header(header: Header | str) -> list[Tuple[bytes, str | None]]: ... +def decode_header(header: Header | str) -> list[tuple[bytes, str | None]]: ... def make_header( - decoded_seq: list[Tuple[bytes, str | None]], + decoded_seq: list[tuple[bytes, str | None]], maxlinelen: int | None = ..., header_name: str | None = ..., continuation_ws: str = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/headerregistry.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/headerregistry.pyi index 69e7bf315d9f..7f1d86b985a1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/headerregistry.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/headerregistry.pyi @@ -1,5 +1,7 @@ import sys import types +from _typeshed import Self +from collections.abc import Iterable, Mapping from datetime import datetime as _datetime from email._header_value_parser import ( AddressList, @@ -12,28 +14,33 @@ from email._header_value_parser import ( ) from email.errors import MessageDefect from email.policy import Policy -from typing import Any, Iterable, Tuple, Type +from typing import Any, ClassVar +from typing_extensions import Literal class BaseHeader(str): + # max_count is actually more of an abstract ClassVar (not defined on the base class, but expected to be defined in subclasses) + max_count: ClassVar[Literal[1] | None] @property def name(self) -> str: ... @property - def defects(self) -> Tuple[MessageDefect, ...]: ... - @property - def max_count(self) -> int | None: ... - def __new__(cls, name: str, value: Any) -> BaseHeader: ... + def defects(self) -> tuple[MessageDefect, ...]: ... + def __new__(cls: type[Self], name: str, value: Any) -> Self: ... def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... def fold(self, *, policy: Policy) -> str: ... class UnstructuredHeader: + max_count: ClassVar[Literal[1] | None] @staticmethod def value_parser(value: str) -> UnstructuredTokenList: ... @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... -class UniqueUnstructuredHeader(UnstructuredHeader): ... +class UniqueUnstructuredHeader(UnstructuredHeader): + max_count: ClassVar[Literal[1]] class DateHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], datetime: _datetime) -> None: ... @property def datetime(self) -> _datetime: ... @staticmethod @@ -41,27 +48,43 @@ class DateHeader: @classmethod def parse(cls, value: str | _datetime, kwds: dict[str, Any]) -> None: ... -class UniqueDateHeader(DateHeader): ... +class UniqueDateHeader(DateHeader): + max_count: ClassVar[Literal[1]] class AddressHeader: + max_count: ClassVar[Literal[1] | None] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], groups: Iterable[Group]) -> None: ... @property - def groups(self) -> Tuple[Group, ...]: ... + def groups(self) -> tuple[Group, ...]: ... @property - def addresses(self) -> Tuple[Address, ...]: ... + def addresses(self) -> tuple[Address, ...]: ... @staticmethod def value_parser(value: str) -> AddressList: ... @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... -class UniqueAddressHeader(AddressHeader): ... +class UniqueAddressHeader(AddressHeader): + max_count: ClassVar[Literal[1]] class SingleAddressHeader(AddressHeader): @property def address(self) -> Address: ... -class UniqueSingleAddressHeader(SingleAddressHeader): ... +class UniqueSingleAddressHeader(SingleAddressHeader): + max_count: ClassVar[Literal[1]] class MIMEVersionHeader: + max_count: ClassVar[Literal[1]] + def init( + self, + name: str, + *, + parse_tree: TokenList, + defects: Iterable[MessageDefect], + version: str | None, + major: int | None, + minor: int | None, + ) -> None: ... @property def version(self) -> str | None: ... @property @@ -74,6 +97,8 @@ class MIMEVersionHeader: def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... class ParameterizedMIMEHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect], params: Mapping[str, Any]) -> None: ... @property def params(self) -> types.MappingProxyType[str, Any]: ... @classmethod @@ -90,12 +115,15 @@ class ContentTypeHeader(ParameterizedMIMEHeader): def value_parser(value: str) -> ContentType: ... class ContentDispositionHeader(ParameterizedMIMEHeader): + # init is redefined but has the same signature as parent class, so is omitted from the stub @property - def content_disposition(self) -> str: ... + def content_disposition(self) -> str | None: ... @staticmethod def value_parser(value: str) -> ContentDisposition: ... class ContentTransferEncodingHeader: + max_count: ClassVar[Literal[1]] + def init(self, name: str, *, parse_tree: TokenList, defects: Iterable[MessageDefect]) -> None: ... @property def cte(self) -> str: ... @classmethod @@ -105,7 +133,9 @@ class ContentTransferEncodingHeader: if sys.version_info >= (3, 8): from email._header_value_parser import MessageID + class MessageIDHeader: + max_count: ClassVar[Literal[1]] @classmethod def parse(cls, value: str, kwds: dict[str, Any]) -> None: ... @staticmethod @@ -113,10 +143,10 @@ if sys.version_info >= (3, 8): class HeaderRegistry: def __init__( - self, base_class: Type[BaseHeader] = ..., default_class: Type[BaseHeader] = ..., use_default_map: bool = ... + self, base_class: type[BaseHeader] = ..., default_class: type[BaseHeader] = ..., use_default_map: bool = ... ) -> None: ... - def map_to_type(self, name: str, cls: Type[BaseHeader]) -> None: ... - def __getitem__(self, name: str) -> Type[BaseHeader]: ... + def map_to_type(self, name: str, cls: type[BaseHeader]) -> None: ... + def __getitem__(self, name: str) -> type[BaseHeader]: ... def __call__(self, name: str, value: Any) -> BaseHeader: ... class Address: @@ -131,12 +161,12 @@ class Address: def __init__( self, display_name: str = ..., username: str | None = ..., domain: str | None = ..., addr_spec: str | None = ... ) -> None: ... - def __str__(self) -> str: ... + def __eq__(self, other: object) -> bool: ... class Group: @property def display_name(self) -> str | None: ... @property - def addresses(self) -> Tuple[Address, ...]: ... + def addresses(self) -> tuple[Address, ...]: ... def __init__(self, display_name: str | None = ..., addresses: Iterable[Address] | None = ...) -> None: ... - def __str__(self) -> str: ... + def __eq__(self, other: object) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/iterators.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/iterators.pyi index 9081a3e3ba73..4bc81c09326a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/iterators.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/iterators.pyi @@ -1,5 +1,8 @@ from email.message import Message from typing import Iterator +__all__ = ["body_line_iterator", "typed_subpart_iterator", "walk"] + def body_line_iterator(msg: Message, decode: bool = ...) -> Iterator[str]: ... def typed_subpart_iterator(msg: Message, maintype: str = ..., subtype: str | None = ...) -> Iterator[str]: ... +def walk(self: Message) -> Iterator[Message]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/message.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/message.pyi index 9ad8c1852199..d6857328a4ee 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/message.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/message.pyi @@ -2,22 +2,24 @@ from email.charset import Charset from email.contentmanager import ContentManager from email.errors import MessageDefect from email.policy import Policy -from typing import Any, Generator, Iterator, List, Optional, Sequence, Tuple, TypeVar, Union + +# using a type alias ("_HeaderType = Any") breaks mypy, who knows why +from typing import Any, Any as _HeaderType, Generator, Iterator, Optional, Sequence, TypeVar, Union + +__all__ = ["Message", "EmailMessage"] _T = TypeVar("_T") -_PayloadType = Union[List[Message], str, bytes] +_PayloadType = Union[list[Message], str, bytes] _CharsetType = Union[Charset, str, None] -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] -_ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] -_HeaderType = Any +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] +_ParamType = Union[str, tuple[Optional[str], Optional[str], str]] class Message: policy: Policy # undocumented preamble: str | None epilogue: str | None defects: list[MessageDefect] - def __str__(self) -> str: ... def is_multipart(self) -> bool: ... def set_unixfrom(self, unixfrom: str) -> None: ... def get_unixfrom(self) -> str | None: ... @@ -34,7 +36,7 @@ class Message: def __delitem__(self, name: str) -> None: ... def keys(self) -> list[str]: ... def values(self) -> list[_HeaderType]: ... - def items(self) -> list[Tuple[str, _HeaderType]]: ... + def items(self) -> list[tuple[str, _HeaderType]]: ... def get(self, name: str, failobj: _T = ...) -> _HeaderType | _T: ... def get_all(self, name: str, failobj: _T = ...) -> list[_HeaderType] | _T: ... def add_header(self, _name: str, _value: str, **_params: _ParamsType) -> None: ... @@ -44,7 +46,7 @@ class Message: def get_content_subtype(self) -> str: ... def get_default_type(self) -> str: ... def set_default_type(self, ctype: str) -> None: ... - def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> list[Tuple[str, str]] | _T: ... + def get_params(self, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> list[tuple[str, str]] | _T: ... def get_param(self, param: str, failobj: _T = ..., header: str = ..., unquote: bool = ...) -> _T | _ParamType: ... def del_param(self, param: str, header: str = ..., requote: bool = ...) -> None: ... def set_type(self, type: str, header: str = ..., requote: bool = ...) -> None: ... @@ -69,6 +71,9 @@ class Message: replace: bool = ..., ) -> None: ... def __init__(self, policy: Policy = ...) -> None: ... + # The following two methods are undocumented, but a source code comment states that they are public API + def set_raw(self, name: str, value: str) -> None: ... + def raw_items(self) -> Iterator[tuple[str, str]]: ... class MIMEPart(Message): def __init__(self, policy: Policy | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/application.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/application.pyi index 11fc470e9dd1..978324c03863 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/application.pyi @@ -1,8 +1,10 @@ from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy -from typing import Callable, Optional, Tuple, Union +from typing import Callable, Optional, Union -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] +__all__ = ["MIMEApplication"] + +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] class MIMEApplication(MIMENonMultipart): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/audio.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/audio.pyi index ee6de410bf53..aa7b9ceb2dea 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/audio.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/audio.pyi @@ -1,8 +1,10 @@ from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy -from typing import Callable, Optional, Tuple, Union +from typing import Callable, Optional, Union -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] +__all__ = ["MIMEAudio"] + +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] class MIMEAudio(MIMENonMultipart): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/base.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/base.pyi index b88dfd492554..faa561fcb3a1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/base.pyi @@ -1,8 +1,10 @@ import email.message from email.policy import Policy -from typing import Optional, Tuple, Union +from typing import Optional, Union -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] +__all__ = ["MIMEBase"] + +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] class MIMEBase(email.message.Message): def __init__(self, _maintype: str, _subtype: str, *, policy: Policy | None = ..., **_params: _ParamsType) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/image.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/image.pyi index 886aa74d5fe5..4e767e6aa6d8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/image.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/image.pyi @@ -1,8 +1,10 @@ from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy -from typing import Callable, Optional, Tuple, Union +from typing import Callable, Optional, Union -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] +__all__ = ["MIMEImage"] + +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] class MIMEImage(MIMENonMultipart): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/message.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/message.pyi index 8878741e8db3..9e7cd04b6e77 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/message.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/message.pyi @@ -2,5 +2,7 @@ from email.message import Message from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy +__all__ = ["MIMEMessage"] + class MIMEMessage(MIMENonMultipart): def __init__(self, _msg: Message, _subtype: str = ..., *, policy: Policy | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/multipart.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/multipart.pyi index 6259ddf5ab8f..3b70d5875661 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/multipart.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/multipart.pyi @@ -1,9 +1,11 @@ from email.message import Message from email.mime.base import MIMEBase from email.policy import Policy -from typing import Optional, Sequence, Tuple, Union +from typing import Optional, Sequence, Union -_ParamsType = Union[str, None, Tuple[str, Optional[str], str]] +__all__ = ["MIMEMultipart"] + +_ParamsType = Union[str, None, tuple[str, Optional[str], str]] class MIMEMultipart(MIMEBase): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/nonmultipart.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/nonmultipart.pyi index 4addff18861a..5497d89b1072 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/nonmultipart.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/nonmultipart.pyi @@ -1,3 +1,5 @@ from email.mime.base import MIMEBase +__all__ = ["MIMENonMultipart"] + class MIMENonMultipart(MIMEBase): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/text.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/text.pyi index afcafa66ee60..9672c3b717b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/text.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/mime/text.pyi @@ -1,5 +1,7 @@ from email.mime.nonmultipart import MIMENonMultipart from email.policy import Policy +__all__ = ["MIMEText"] + class MIMEText(MIMENonMultipart): def __init__(self, _text: str, _subtype: str = ..., _charset: str | None = ..., *, policy: Policy | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/parser.pyi index 574c927eeaf5..bbc5d0124b3a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/parser.pyi @@ -1,10 +1,14 @@ import email.feedparser from email.message import Message from email.policy import Policy -from typing import BinaryIO, Callable, TextIO +from typing import BinaryIO, Callable, TextIO, TypeVar -FeedParser = email.feedparser.FeedParser -BytesFeedParser = email.feedparser.BytesFeedParser +__all__ = ["Parser", "HeaderParser", "BytesParser", "BytesHeaderParser", "FeedParser", "BytesFeedParser"] + +_M = TypeVar("_M", bound=Message) + +FeedParser = email.feedparser.FeedParser[_M] +BytesFeedParser = email.feedparser.BytesFeedParser[_M] class Parser: def __init__(self, _class: Callable[[], Message] | None = ..., *, policy: Policy = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/policy.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/policy.pyi index 625e6a5dcdbc..d4ebb1fd5e37 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/policy.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/policy.pyi @@ -1,25 +1,37 @@ -from abc import abstractmethod +from abc import ABCMeta, abstractmethod from email.contentmanager import ContentManager from email.errors import MessageDefect from email.header import Header from email.message import Message -from typing import Any, Callable, Tuple +from typing import Any, Callable -class Policy: +__all__ = ["Compat32", "compat32", "Policy", "EmailPolicy", "default", "strict", "SMTP", "HTTP"] + +class Policy(metaclass=ABCMeta): max_line_length: int | None linesep: str cte_type: str raise_on_defect: bool - mange_from: bool - def __init__(self, **kw: Any) -> None: ... + mangle_from_: bool + message_factory: Callable[[Policy], Message] | None + def __init__( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: Callable[[Policy], Message] | None = ..., + ) -> None: ... def clone(self, **kw: Any) -> Policy: ... def handle_defect(self, obj: Message, defect: MessageDefect) -> None: ... def register_defect(self, obj: Message, defect: MessageDefect) -> None: ... def header_max_count(self, name: str) -> int | None: ... @abstractmethod - def header_source_parse(self, sourcelines: list[str]) -> Tuple[str, str]: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... @abstractmethod - def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... @abstractmethod def header_fetch_parse(self, name: str, value: str) -> str: ... @abstractmethod @@ -28,9 +40,9 @@ class Policy: def fold_binary(self, name: str, value: str) -> bytes: ... class Compat32(Policy): - def header_source_parse(self, sourcelines: list[str]) -> Tuple[str, str]: ... - def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... - def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... + def header_fetch_parse(self, name: str, value: str) -> str | Header: ... # type: ignore[override] def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... @@ -41,8 +53,22 @@ class EmailPolicy(Policy): refold_source: str header_factory: Callable[[str, str], str] content_manager: ContentManager - def header_source_parse(self, sourcelines: list[str]) -> Tuple[str, str]: ... - def header_store_parse(self, name: str, value: str) -> Tuple[str, str]: ... + def __init__( + self, + *, + max_line_length: int | None = ..., + linesep: str = ..., + cte_type: str = ..., + raise_on_defect: bool = ..., + mangle_from_: bool = ..., + message_factory: Callable[[Policy], Message] | None = ..., + utf8: bool = ..., + refold_source: str = ..., + header_factory: Callable[[str, str], str] = ..., + content_manager: ContentManager = ..., + ) -> None: ... + def header_source_parse(self, sourcelines: list[str]) -> tuple[str, str]: ... + def header_store_parse(self, name: str, value: str) -> tuple[str, str]: ... def header_fetch_parse(self, name: str, value: str) -> str: ... def fold(self, name: str, value: str) -> str: ... def fold_binary(self, name: str, value: str) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/quoprimime.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/quoprimime.pyi new file mode 100644 index 000000000000..c5d324d17e13 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/quoprimime.pyi @@ -0,0 +1,26 @@ +__all__ = [ + "body_decode", + "body_encode", + "body_length", + "decode", + "decodestring", + "header_decode", + "header_encode", + "header_length", + "quote", + "unquote", +] + +def header_check(octet: int) -> bool: ... +def body_check(octet: int) -> bool: ... +def header_length(bytearray: bytes) -> int: ... +def body_length(bytearray: bytes) -> int: ... +def unquote(s: str | bytes) -> str: ... +def quote(c: str | bytes) -> str: ... +def header_encode(header_bytes: bytes, charset: str = ...) -> str: ... +def body_encode(body: str, maxlinelen: int = ..., eol: str = ...) -> str: ... +def decode(encoded: str, eol: str = ...) -> str: ... +def header_decode(s: str) -> str: ... + +body_decode = decode +decodestring = decode diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/email/utils.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/email/utils.pyi index 96d75a63ab12..2b7f1bab3ce0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/email/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/email/utils.pyi @@ -1,20 +1,38 @@ import datetime import sys from email.charset import Charset -from typing import Optional, Tuple, Union, overload +from typing import Optional, Union, overload -_ParamType = Union[str, Tuple[Optional[str], Optional[str], str]] -_PDTZ = Tuple[int, int, int, int, int, int, int, int, int, Optional[int]] +__all__ = [ + "collapse_rfc2231_value", + "decode_params", + "decode_rfc2231", + "encode_rfc2231", + "formataddr", + "formatdate", + "format_datetime", + "getaddresses", + "make_msgid", + "mktime_tz", + "parseaddr", + "parsedate", + "parsedate_tz", + "parsedate_to_datetime", + "unquote", +] + +_ParamType = Union[str, tuple[Optional[str], Optional[str], str]] +_PDTZ = tuple[int, int, int, int, int, int, int, int, int, Optional[int]] def quote(str: str) -> str: ... def unquote(str: str) -> str: ... -def parseaddr(addr: str | None) -> Tuple[str, str]: ... -def formataddr(pair: Tuple[str | None, str], charset: str | Charset = ...) -> str: ... -def getaddresses(fieldvalues: list[str]) -> list[Tuple[str, str]]: ... +def parseaddr(addr: str | None) -> tuple[str, str]: ... +def formataddr(pair: tuple[str | None, str], charset: str | Charset = ...) -> str: ... +def getaddresses(fieldvalues: list[str]) -> list[tuple[str, str]]: ... @overload def parsedate(data: None) -> None: ... @overload -def parsedate(data: str) -> Tuple[int, int, int, int, int, int, int, int, int] | None: ... +def parsedate(data: str) -> tuple[int, int, int, int, int, int, int, int, int] | None: ... @overload def parsedate_tz(data: None) -> None: ... @overload @@ -34,7 +52,7 @@ def formatdate(timeval: float | None = ..., localtime: bool = ..., usegmt: bool def format_datetime(dt: datetime.datetime, usegmt: bool = ...) -> str: ... def localtime(dt: datetime.datetime | None = ..., isdst: int = ...) -> datetime.datetime: ... def make_msgid(idstring: str | None = ..., domain: str | None = ...) -> str: ... -def decode_rfc2231(s: str) -> Tuple[str | None, str | None, str]: ... +def decode_rfc2231(s: str) -> tuple[str | None, str | None, str]: ... def encode_rfc2231(s: str, charset: str | None = ..., language: str | None = ...) -> str: ... def collapse_rfc2231_value(value: _ParamType, errors: str = ..., fallback_charset: str = ...) -> str: ... -def decode_params(params: list[Tuple[str, str]]) -> list[Tuple[str, _ParamType]]: ... +def decode_params(params: list[tuple[str, str]]) -> list[tuple[str, _ParamType]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8.pyi index 892f2f082af0..568fa6013373 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8.pyi @@ -1,21 +1,20 @@ import codecs -from typing import Tuple class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input: str, final: bool = ...) -> bytes: ... class IncrementalDecoder(codecs.BufferedIncrementalDecoder): @staticmethod - def _buffer_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> Tuple[str, int]: ... + def _buffer_decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... class StreamWriter(codecs.StreamWriter): @staticmethod - def encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... + def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... class StreamReader(codecs.StreamReader): @staticmethod - def decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> Tuple[str, int]: ... + def decode(__data: bytes, __errors: str | None = ..., __final: bool = ...) -> tuple[str, int]: ... def getregentry() -> codecs.CodecInfo: ... -def encode(__str: str, __errors: str | None = ...) -> Tuple[bytes, int]: ... -def decode(input: bytes, errors: str | None = ...) -> Tuple[str, int]: ... +def encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8_sig.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8_sig.pyi new file mode 100644 index 000000000000..bf52e8a6f3d3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/encodings/utf_8_sig.pyi @@ -0,0 +1,27 @@ +import codecs + +class IncrementalEncoder(codecs.IncrementalEncoder): + def __init__(self, errors: str = ...) -> None: ... + def encode(self, input: str, final: bool = ...) -> bytes: ... + def reset(self) -> None: ... + def getstate(self) -> int: ... # type: ignore[override] + def setstate(self, state: int) -> None: ... # type: ignore[override] + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def __init__(self, errors: str = ...) -> None: ... + def _buffer_decode(self, input: bytes, errors: str | None, final: bool) -> tuple[str, int]: ... + def reset(self) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... + +class StreamWriter(codecs.StreamWriter): + def reset(self) -> None: ... + def encode(self, input: str, errors: str | None = ...) -> tuple[bytes, int]: ... + +class StreamReader(codecs.StreamReader): + def reset(self) -> None: ... + def decode(self, input: bytes, errors: str | None = ...) -> tuple[str, int]: ... + +def getregentry() -> codecs.CodecInfo: ... +def encode(input: str, errors: str | None = ...) -> tuple[bytes, int]: ... +def decode(input: bytes, errors: str | None = ...) -> tuple[str, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ensurepip/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ensurepip/__init__.pyi index 749fedc04424..e2686b8d5437 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ensurepip/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ensurepip/__init__.pyi @@ -1,3 +1,5 @@ +__all__ = ["version", "bootstrap"] + def version() -> str: ... def bootstrap( *, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi index 426bda857193..86fa192750c4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/enum.pyi @@ -1,11 +1,62 @@ import sys import types +from _typeshed import Self from abc import ABCMeta from builtins import property as _builtins_property -from typing import Any, Iterator, Type, TypeVar +from collections.abc import Iterable, Iterator, Mapping +from typing import Any, TypeVar, Union, overload +from typing_extensions import Literal -_T = TypeVar("_T") -_S = TypeVar("_S", bound=Type[Enum]) +if sys.version_info >= (3, 11): + __all__ = [ + "EnumType", + "EnumMeta", + "Enum", + "IntEnum", + "StrEnum", + "Flag", + "IntFlag", + "ReprEnum", + "auto", + "unique", + "property", + "verify", + "FlagBoundary", + "STRICT", + "CONFORM", + "EJECT", + "KEEP", + "global_flag_repr", + "global_enum_repr", + "global_str", + "global_enum", + "EnumCheck", + "CONTINUOUS", + "NAMED_FLAGS", + "UNIQUE", + ] +else: + __all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] + +_EnumMemberT = TypeVar("_EnumMemberT") +_EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) + +# The following all work: +# >>> from enum import Enum +# >>> from string import ascii_lowercase +# >>> Enum('Foo', names='RED YELLOW GREEN') +# +# >>> Enum('Foo', names=[('RED', 1), ('YELLOW, 2)]) +# +# >>> Enum('Foo', names=((x for x in (ascii_lowercase[i], i)) for i in range(5))) +# +# >>> Enum('Foo', names={'RED': 1, 'YELLOW': 2}) +# +_EnumNames = Union[str, Iterable[str], Iterable[Iterable[Union[str, Any]]], Mapping[str, Any]] + +class _EnumDict(dict[str, Any]): + def __init__(self) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: ... # Note: EnumMeta actually subclasses type directly, not ABCMeta. # This is a temporary workaround to allow multiple creation of enums with builtins @@ -13,20 +64,89 @@ _S = TypeVar("_S", bound=Type[Enum]) # spurious inconsistent metaclass structure. See #1595. # Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself class EnumMeta(ABCMeta): - def __iter__(self: Type[_T]) -> Iterator[_T]: ... - def __reversed__(self: Type[_T]) -> Iterator[_T]: ... - def __contains__(self: Type[Any], member: object) -> bool: ... - def __getitem__(self: Type[_T], name: str) -> _T: ... + if sys.version_info >= (3, 11): + def __new__( + metacls: type[Self], # type: ignore + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + boundary: FlagBoundary | None = ..., + _simple: bool = ..., + **kwds: Any, + ) -> Self: ... + elif sys.version_info >= (3, 9): + def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any) -> Self: ... # type: ignore + else: + def __new__(metacls: type[Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> Self: ... # type: ignore + + if sys.version_info >= (3, 9): + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] + else: + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...]) -> _EnumDict: ... # type: ignore[override] + + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __contains__(self: type[Any], member: object) -> bool: ... + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... @_builtins_property - def __members__(self: Type[_T]) -> types.MappingProxyType[str, _T]: ... + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def __setattr__(self, name: str, value: Any) -> None: ... + def __delattr__(self, name: str) -> None: ... + if sys.version_info >= (3, 11): + # Simple value lookup + @overload # type: ignore[override] + def __call__(cls: type[_EnumMemberT], value: Any, names: None = ...) -> _EnumMemberT: ... + # Functional Enum API + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = ..., + qualname: str | None = ..., + type: type | None = ..., + start: int = ..., + boundary: FlagBoundary | None = ..., + ) -> type[Enum]: ... + else: + @overload # type: ignore[override] + def __call__(cls: type[_EnumMemberT], value: Any, names: None = ...) -> _EnumMemberT: ... + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = ..., + qualname: str | None = ..., + type: type | None = ..., + start: int = ..., + ) -> type[Enum]: ... _member_names_: list[str] # undocumented _member_map_: dict[str, Enum] # undocumented _value2member_map_: dict[Any, Enum] # undocumented +if sys.version_info >= (3, 11): + # In 3.11 `EnumMeta` metaclass is renamed to `EnumType`, but old name also exists. + EnumType = EnumMeta + class Enum(metaclass=EnumMeta): - name: str - value: Any + if sys.version_info >= (3, 11): + @property + def name(self) -> str: ... + @property + def value(self) -> Any: ... + else: + @types.DynamicClassAttribute + def name(self) -> str: ... + @types.DynamicClassAttribute + def value(self) -> Any: ... _name_: str _value_: Any if sys.version_info >= (3, 7): @@ -37,51 +157,94 @@ class Enum(metaclass=EnumMeta): def _missing_(cls, value: object) -> Any: ... @staticmethod def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... - def __new__(cls: Type[_T], value: object) -> _T: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... + def __new__(cls: type[Self], value: Any) -> Self: ... def __dir__(self) -> list[str]: ... def __format__(self, format_spec: str) -> str: ... def __hash__(self) -> Any: ... def __reduce_ex__(self, proto: object) -> Any: ... -class IntEnum(int, Enum): - value: int - def __new__(cls: Type[_T], value: int | _T) -> _T: ... +if sys.version_info >= (3, 11): + class ReprEnum(Enum): ... -def unique(enumeration: _S) -> _S: ... +if sys.version_info >= (3, 11): + class IntEnum(int, ReprEnum): + _value_: int + @property + def value(self) -> int: ... + def __new__(cls: type[Self], value: int) -> Self: ... + +else: + class IntEnum(int, Enum): + _value_: int + @types.DynamicClassAttribute + def value(self) -> int: ... + def __new__(cls: type[Self], value: int) -> Self: ... + +def unique(enumeration: _EnumerationT) -> _EnumerationT: ... _auto_null: Any # subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() class auto(IntFlag): - value: Any - def __new__(cls: Type[_T]) -> _T: ... + _value_: Any + if sys.version_info >= (3, 11): + @property + def value(self) -> Any: ... + else: + @types.DynamicClassAttribute + def value(self) -> Any: ... + + def __new__(cls: type[Self]) -> Self: ... class Flag(Enum): - name: str | None # type: ignore - value: int - def __contains__(self: _T, other: _T) -> bool: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... + _name_: str | None # type: ignore[assignment] + _value_: int + if sys.version_info >= (3, 11): + @property + def name(self) -> str | None: ... # type: ignore[override] + @property + def value(self) -> int: ... + else: + @types.DynamicClassAttribute + def name(self) -> str | None: ... # type: ignore[override] + @types.DynamicClassAttribute + def value(self) -> int: ... + + def __contains__(self: Self, other: Self) -> bool: ... def __bool__(self) -> bool: ... - def __or__(self: _T, other: _T) -> _T: ... - def __and__(self: _T, other: _T) -> _T: ... - def __xor__(self: _T, other: _T) -> _T: ... - def __invert__(self: _T) -> _T: ... + def __or__(self: Self, other: Self) -> Self: ... + def __and__(self: Self, other: Self) -> Self: ... + def __xor__(self: Self, other: Self) -> Self: ... + def __invert__(self: Self) -> Self: ... class IntFlag(int, Flag): - def __new__(cls: Type[_T], value: int | _T) -> _T: ... - def __or__(self: _T, other: int | _T) -> _T: ... - def __and__(self: _T, other: int | _T) -> _T: ... - def __xor__(self: _T, other: int | _T) -> _T: ... - __ror__ = __or__ - __rand__ = __and__ - __rxor__ = __xor__ + def __new__(cls: type[Self], value: int) -> Self: ... + def __or__(self: Self, other: int) -> Self: ... + def __and__(self: Self, other: int) -> Self: ... + def __xor__(self: Self, other: int) -> Self: ... + def __ror__(self: Self, n: int) -> Self: ... + def __rand__(self: Self, n: int) -> Self: ... + def __rxor__(self: Self, n: int) -> Self: ... if sys.version_info >= (3, 11): - class StrEnum(str, Enum): - def __new__(cls: Type[_T], value: int | _T) -> _T: ... + class StrEnum(str, ReprEnum): + def __new__(cls: type[Self], value: str) -> Self: ... + _value_: str + @property + def value(self) -> str: ... + + class EnumCheck(StrEnum): + CONTINUOUS: str + NAMED_FLAGS: str + UNIQUE: str + CONTINUOUS = EnumCheck.CONTINUOUS + NAMED_FLAGS = EnumCheck.NAMED_FLAGS + UNIQUE = EnumCheck.UNIQUE + + class verify: + def __init__(self, *checks: EnumCheck) -> None: ... + def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... + class FlagBoundary(StrEnum): STRICT: str CONFORM: str @@ -91,7 +254,12 @@ if sys.version_info >= (3, 11): CONFORM = FlagBoundary.CONFORM EJECT = FlagBoundary.EJECT KEEP = FlagBoundary.KEEP - class property(_builtins_property): ... - def global_enum(cls: _S) -> _S: ... + + class property(types.DynamicClassAttribute): + def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... + name: str + clsname: str + def global_str(self: Enum) -> str: ... + def global_enum(cls: _EnumerationT, update_str: bool = ...) -> _EnumerationT: ... def global_enum_repr(self: Enum) -> str: ... def global_flag_repr(self: Flag) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/fcntl.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/fcntl.pyi index ebaa31749528..69863bf580fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/fcntl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/fcntl.pyi @@ -1,101 +1,116 @@ import sys -from _typeshed import FileDescriptorLike -from array import array -from typing import Any, Union, overload +from _typeshed import FileDescriptorLike, ReadOnlyBuffer, WriteableBuffer +from typing import Any, overload from typing_extensions import Literal -FASYNC: int -FD_CLOEXEC: int -DN_ACCESS: int -DN_ATTRIB: int -DN_CREATE: int -DN_DELETE: int -DN_MODIFY: int -DN_MULTISHOT: int -DN_RENAME: int -F_DUPFD: int -F_DUPFD_CLOEXEC: int -F_FULLFSYNC: int -F_EXLCK: int -F_GETFD: int -F_GETFL: int -F_GETLEASE: int -F_GETLK: int -F_GETLK64: int -F_GETOWN: int -F_NOCACHE: int -F_GETSIG: int -F_NOTIFY: int -F_RDLCK: int -F_SETFD: int -F_SETFL: int -F_SETLEASE: int -F_SETLK: int -F_SETLK64: int -F_SETLKW: int -F_SETLKW64: int -if sys.version_info >= (3, 9) and sys.platform == "linux": - F_OFD_GETLK: int - F_OFD_SETLK: int - F_OFD_SETLKW: int -F_SETOWN: int -F_SETSIG: int -F_SHLCK: int -F_UNLCK: int -F_WRLCK: int -I_ATMARK: int -I_CANPUT: int -I_CKBAND: int -I_FDINSERT: int -I_FIND: int -I_FLUSH: int -I_FLUSHBAND: int -I_GETBAND: int -I_GETCLTIME: int -I_GETSIG: int -I_GRDOPT: int -I_GWROPT: int -I_LINK: int -I_LIST: int -I_LOOK: int -I_NREAD: int -I_PEEK: int -I_PLINK: int -I_POP: int -I_PUNLINK: int -I_PUSH: int -I_RECVFD: int -I_SENDFD: int -I_SETCLTIME: int -I_SETSIG: int -I_SRDOPT: int -I_STR: int -I_SWROPT: int -I_UNLINK: int -LOCK_EX: int -LOCK_MAND: int -LOCK_NB: int -LOCK_READ: int -LOCK_RW: int -LOCK_SH: int -LOCK_UN: int -LOCK_WRITE: int +if sys.platform != "win32": + FASYNC: int + FD_CLOEXEC: int + F_DUPFD: int + F_DUPFD_CLOEXEC: int + F_GETFD: int + F_GETFL: int + F_GETLK: int + F_GETOWN: int + F_RDLCK: int + F_SETFD: int + F_SETFL: int + F_SETLK: int + F_SETLKW: int + F_SETOWN: int + F_UNLCK: int + F_WRLCK: int + if sys.platform == "darwin": + F_FULLFSYNC: int + F_NOCACHE: int + if sys.version_info >= (3, 9): + F_GETPATH: int + if sys.platform == "linux": + F_SETLKW64: int + F_SETSIG: int + F_SHLCK: int + F_SETLK64: int + F_SETLEASE: int + F_GETSIG: int + F_NOTIFY: int + F_EXLCK: int + F_GETLEASE: int + F_GETLK64: int + if sys.version_info >= (3, 8): + F_ADD_SEALS: int + F_GET_SEALS: int + F_SEAL_GROW: int + F_SEAL_SEAL: int + F_SEAL_SHRINK: int + F_SEAL_WRITE: int + if sys.version_info >= (3, 9): + F_OFD_GETLK: int + F_OFD_SETLK: int + F_OFD_SETLKW: int + if sys.version_info >= (3, 10): + F_GETPIPE_SZ: int + F_SETPIPE_SZ: int -@overload -def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... -@overload -def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: bytes) -> bytes: ... + DN_ACCESS: int + DN_ATTRIB: int + DN_CREATE: int + DN_DELETE: int + DN_MODIFY: int + DN_MULTISHOT: int + DN_RENAME: int -_ReadOnlyBuffer = bytes -_WritableBuffer = Union[bytearray, memoryview, array[Any]] + LOCK_EX: int + LOCK_NB: int + LOCK_SH: int + LOCK_UN: int + if sys.platform == "linux": + LOCK_MAND: int + LOCK_READ: int + LOCK_RW: int + LOCK_WRITE: int -@overload -def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... -@overload -def ioctl(__fd: FileDescriptorLike, __request: int, __arg: _WritableBuffer, __mutate_flag: Literal[True] = ...) -> int: ... -@overload -def ioctl(__fd: FileDescriptorLike, __request: int, __arg: _WritableBuffer, __mutate_flag: Literal[False]) -> bytes: ... -@overload -def ioctl(__fd: FileDescriptorLike, __request: int, __arg: _ReadOnlyBuffer, __mutate_flag: bool = ...) -> bytes: ... -def flock(__fd: FileDescriptorLike, __operation: int) -> None: ... -def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = ..., __start: int = ..., __whence: int = ...) -> Any: ... + # These are highly problematic, they might be present or not, depends on the specific OS. + if sys.platform == "linux": + I_ATMARK: int + I_CANPUT: int + I_CKBAND: int + I_FDINSERT: int + I_FIND: int + I_FLUSH: int + I_FLUSHBAND: int + I_GETBAND: int + I_GETCLTIME: int + I_GETSIG: int + I_GRDOPT: int + I_GWROPT: int + I_LINK: int + I_LIST: int + I_LOOK: int + I_NREAD: int + I_PEEK: int + I_PLINK: int + I_POP: int + I_PUNLINK: int + I_PUSH: int + I_RECVFD: int + I_SENDFD: int + I_SETCLTIME: int + I_SETSIG: int + I_SRDOPT: int + I_STR: int + I_SWROPT: int + I_UNLINK: int + @overload + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: int = ...) -> int: ... + @overload + def fcntl(__fd: FileDescriptorLike, __cmd: int, __arg: bytes) -> bytes: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: int = ..., __mutate_flag: bool = ...) -> int: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[True] = ...) -> int: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: WriteableBuffer, __mutate_flag: Literal[False]) -> bytes: ... + @overload + def ioctl(__fd: FileDescriptorLike, __request: int, __arg: ReadOnlyBuffer, __mutate_flag: bool = ...) -> bytes: ... + def flock(__fd: FileDescriptorLike, __operation: int) -> None: ... + def lockf(__fd: FileDescriptorLike, __cmd: int, __len: int = ..., __start: int = ..., __whence: int = ...) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/filecmp.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/filecmp.pyi index 0cc92ed9e3ed..7c606af40791 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/filecmp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/filecmp.pyi @@ -1,17 +1,24 @@ import sys from _typeshed import StrOrBytesPath from os import PathLike -from typing import Any, AnyStr, Callable, Generic, Iterable, Sequence, Tuple +from typing import Any, AnyStr, Callable, Generic, Iterable, Sequence +from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = ["clear_cache", "cmp", "dircmp", "cmpfiles", "DEFAULT_IGNORES"] + DEFAULT_IGNORES: list[str] +BUFSIZE: Literal[8192] def cmp(f1: StrOrBytesPath, f2: StrOrBytesPath, shallow: int | bool = ...) -> bool: ... def cmpfiles( - a: AnyStr | PathLike[AnyStr], b: AnyStr | PathLike[AnyStr], common: Iterable[AnyStr], shallow: int | bool = ... -) -> Tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... + a: AnyStr | PathLike[AnyStr], + b: AnyStr | PathLike[AnyStr], + common: Iterable[AnyStr | PathLike[AnyStr]], + shallow: int | bool = ..., +) -> tuple[list[AnyStr], list[AnyStr], list[AnyStr]]: ... class dircmp(Generic[AnyStr]): def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/fileinput.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/fileinput.pyi index 3c14b736ca50..787f75b8950a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/fileinput.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/fileinput.pyi @@ -2,6 +2,21 @@ import sys from _typeshed import Self, StrOrBytesPath from typing import IO, Any, AnyStr, Callable, Generic, Iterable, Iterator +__all__ = [ + "input", + "close", + "nextfile", + "filename", + "lineno", + "filelineno", + "fileno", + "isfirstline", + "isstdin", + "FileInput", + "hook_compressed", + "hook_encoded", +] + if sys.version_info >= (3, 9): from types import GenericAlias @@ -46,7 +61,7 @@ def fileno() -> int: ... def isfirstline() -> bool: ... def isstdin() -> bool: ... -class FileInput(Iterable[AnyStr], Generic[AnyStr]): +class FileInput(Iterator[AnyStr], Generic[AnyStr]): if sys.version_info >= (3, 10): def __init__( self, @@ -79,13 +94,16 @@ class FileInput(Iterable[AnyStr], Generic[AnyStr]): mode: str = ..., openhook: Callable[[StrOrBytesPath, str], IO[AnyStr]] = ..., ) -> None: ... + def __del__(self) -> None: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... - def __iter__(self) -> Iterator[AnyStr]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> AnyStr: ... - def __getitem__(self, i: int) -> AnyStr: ... + if sys.version_info < (3, 11): + def __getitem__(self, i: int) -> AnyStr: ... + def nextfile(self) -> None: ... def readline(self) -> AnyStr: ... def filename(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/fnmatch.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/fnmatch.pyi index 1cbcf00729ed..8351fce59ebb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/fnmatch.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/fnmatch.pyi @@ -1,5 +1,7 @@ from typing import AnyStr, Iterable +__all__ = ["filter", "fnmatch", "fnmatchcase", "translate"] + def fnmatch(name: AnyStr, pat: AnyStr) -> bool: ... def fnmatchcase(name: AnyStr, pat: AnyStr) -> bool: ... def filter(names: Iterable[AnyStr], pat: AnyStr) -> list[AnyStr]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/formatter.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/formatter.pyi index 7c3b97688dbd..f5d8348d08a1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/formatter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/formatter.pyi @@ -1,8 +1,8 @@ -from typing import IO, Any, Iterable, Tuple +from typing import IO, Any, Iterable AS_IS: None -_FontType = Tuple[str, bool, bool, bool] -_StylesType = Tuple[Any, ...] +_FontType = tuple[str, bool, bool, bool] +_StylesType = tuple[Any, ...] class NullFormatter: writer: NullWriter | None @@ -68,7 +68,7 @@ class NullWriter: def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: str | None) -> None: ... - def new_styles(self, styles: Tuple[Any, ...]) -> None: ... + def new_styles(self, styles: tuple[Any, ...]) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... @@ -81,7 +81,7 @@ class AbstractWriter(NullWriter): def new_font(self, font: _FontType) -> None: ... def new_margin(self, margin: int, level: int) -> None: ... def new_spacing(self, spacing: str | None) -> None: ... - def new_styles(self, styles: Tuple[Any, ...]) -> None: ... + def new_styles(self, styles: tuple[Any, ...]) -> None: ... def send_paragraph(self, blankline: int) -> None: ... def send_line_break(self) -> None: ... def send_hor_rule(self, *args: Any, **kw: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/fractions.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/fractions.pyi index a5d2e21e9a29..117990fb58a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/fractions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/fractions.pyi @@ -1,13 +1,16 @@ import sys +from _typeshed import Self from decimal import Decimal from numbers import Integral, Rational, Real -from typing import Tuple, Type, TypeVar, Union, overload +from typing import Any, Union, overload from typing_extensions import Literal _ComparableNum = Union[int, float, Decimal, Real] -_T = TypeVar("_T") -if sys.version_info < (3, 9): +if sys.version_info >= (3, 9): + __all__ = ["Fraction"] +else: + __all__ = ["Fraction", "gcd"] @overload def gcd(a: int, b: int) -> int: ... @overload @@ -20,17 +23,18 @@ if sys.version_info < (3, 9): class Fraction(Rational): @overload def __new__( - cls: Type[_T], numerator: int | Rational = ..., denominator: int | Rational | None = ..., *, _normalize: bool = ... - ) -> _T: ... + cls: type[Self], numerator: int | Rational = ..., denominator: int | Rational | None = ..., *, _normalize: bool = ... + ) -> Self: ... @overload - def __new__(cls: Type[_T], __value: float | Decimal | str, *, _normalize: bool = ...) -> _T: ... + def __new__(cls: type[Self], __value: float | Decimal | str, *, _normalize: bool = ...) -> Self: ... @classmethod - def from_float(cls, f: float) -> Fraction: ... + def from_float(cls: type[Self], f: float) -> Self: ... @classmethod - def from_decimal(cls, dec: Decimal) -> Fraction: ... + def from_decimal(cls: type[Self], dec: Decimal) -> Self: ... def limit_denominator(self, max_denominator: int = ...) -> Fraction: ... if sys.version_info >= (3, 8): - def as_integer_ratio(self) -> Tuple[int, int]: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + @property def numerator(self) -> int: ... @property @@ -100,13 +104,13 @@ class Fraction(Rational): @overload def __rmod__(self, other: float) -> float: ... @overload - def __divmod__(self, other: int | Fraction) -> Tuple[int, Fraction]: ... + def __divmod__(self, other: int | Fraction) -> tuple[int, Fraction]: ... @overload - def __divmod__(self, other: float) -> Tuple[float, Fraction]: ... + def __divmod__(self, other: float) -> tuple[float, Fraction]: ... @overload - def __rdivmod__(self, other: int | Fraction) -> Tuple[int, Fraction]: ... + def __rdivmod__(self, other: int | Fraction) -> tuple[int, Fraction]: ... @overload - def __rdivmod__(self, other: float) -> Tuple[float, Fraction]: ... + def __rdivmod__(self, other: float) -> tuple[float, Fraction]: ... @overload def __pow__(self, other: int) -> Fraction: ... @overload @@ -134,6 +138,10 @@ class Fraction(Rational): def __le__(self, other: _ComparableNum) -> bool: ... def __ge__(self, other: _ComparableNum) -> bool: ... def __bool__(self) -> bool: ... + def __copy__(self: Self) -> Self: ... + def __deepcopy__(self: Self, memo: Any) -> Self: ... + if sys.version_info >= (3, 11): + def __int__(self) -> int: ... # Not actually defined within fractions.py, but provides more useful # overrides @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ftplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ftplib.pyi index 4275888f5fc6..4a5dad0dd14f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ftplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ftplib.pyi @@ -1,15 +1,18 @@ +import sys from _typeshed import Self, SupportsRead, SupportsReadline from socket import socket from ssl import SSLContext from types import TracebackType -from typing import Any, Callable, Iterable, Iterator, TextIO, Tuple, Type +from typing import Any, Callable, Iterable, Iterator, TextIO from typing_extensions import Literal -MSG_OOB: int -FTP_PORT: int -MAXLINE: int -CRLF: str -B_CRLF: bytes +__all__ = ["FTP", "error_reply", "error_temp", "error_perm", "error_proto", "all_errors", "FTP_TLS"] + +MSG_OOB: Literal[1] +FTP_PORT: Literal[21] +MAXLINE: Literal[8192] +CRLF: Literal["\r\n"] +B_CRLF: Literal[b"\r\n"] class Error(Exception): ... class error_reply(Error): ... @@ -17,7 +20,7 @@ class error_temp(Error): ... class error_perm(Error): ... class error_proto(Error): ... -all_errors: Tuple[Type[Exception], ...] +all_errors: tuple[type[Exception], ...] class FTP: debugging: int @@ -34,20 +37,34 @@ class FTP: encoding: str def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> None: ... - source_address: Tuple[str, int] | None - def __init__( - self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - timeout: float = ..., - source_address: Tuple[str, int] | None = ..., + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... + source_address: tuple[str, int] | None + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = ..., + user: str = ..., + passwd: str = ..., + acct: str = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + *, + encoding: str = ..., + ) -> None: ... + else: + def __init__( + self, + host: str = ..., + user: str = ..., + passwd: str = ..., + acct: str = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + ) -> None: ... + def connect( - self, host: str = ..., port: int = ..., timeout: float = ..., source_address: Tuple[str, int] | None = ... + self, host: str = ..., port: int = ..., timeout: float = ..., source_address: tuple[str, int] | None = ... ) -> str: ... def getwelcome(self) -> str: ... def set_debuglevel(self, level: int) -> None: ... @@ -66,10 +83,10 @@ class FTP: def sendport(self, host: str, port: int) -> str: ... def sendeprt(self, host: str, port: int) -> str: ... def makeport(self) -> socket: ... - def makepasv(self) -> Tuple[str, int]: ... + def makepasv(self) -> tuple[str, int]: ... def login(self, user: str = ..., passwd: str = ..., acct: str = ...) -> str: ... # In practice, `rest` rest can actually be anything whose str() is an integer sequence, so to make it simple we allow integers. - def ntransfercmd(self, cmd: str, rest: int | str | None = ...) -> Tuple[socket, int]: ... + def ntransfercmd(self, cmd: str, rest: int | str | None = ...) -> tuple[socket, int]: ... def transfercmd(self, cmd: str, rest: int | str | None = ...) -> socket: ... def retrbinary( self, cmd: str, callback: Callable[[bytes], Any], blocksize: int = ..., rest: int | str | None = ... @@ -88,7 +105,7 @@ class FTP: def nlst(self, *args: str) -> list[str]: ... # Technically only the last arg can be a Callable but ... def dir(self, *args: str | Callable[[str], None]) -> None: ... - def mlsd(self, path: str = ..., facts: Iterable[str] = ...) -> Iterator[Tuple[str, dict[str, str]]]: ... + def mlsd(self, path: str = ..., facts: Iterable[str] = ...) -> Iterator[tuple[str, dict[str, str]]]: ... def rename(self, fromname: str, toname: str) -> str: ... def delete(self, filename: str) -> str: ... def cwd(self, dirname: str) -> str: ... @@ -100,18 +117,34 @@ class FTP: def close(self) -> None: ... class FTP_TLS(FTP): - def __init__( - self, - host: str = ..., - user: str = ..., - passwd: str = ..., - acct: str = ..., - keyfile: str | None = ..., - certfile: str | None = ..., - context: SSLContext | None = ..., - timeout: float = ..., - source_address: Tuple[str, int] | None = ..., - ) -> None: ... + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = ..., + user: str = ..., + passwd: str = ..., + acct: str = ..., + keyfile: str | None = ..., + certfile: str | None = ..., + context: SSLContext | None = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + *, + encoding: str = ..., + ) -> None: ... + else: + def __init__( + self, + host: str = ..., + user: str = ..., + passwd: str = ..., + acct: str = ..., + keyfile: str | None = ..., + certfile: str | None = ..., + context: SSLContext | None = ..., + timeout: float = ..., + source_address: tuple[str, int] | None = ..., + ) -> None: ... ssl_version: int keyfile: str | None certfile: str | None @@ -123,8 +156,8 @@ class FTP_TLS(FTP): def ccc(self) -> str: ... def parse150(resp: str) -> int | None: ... # undocumented -def parse227(resp: str) -> Tuple[str, int]: ... # undocumented -def parse229(resp: str, peer: Any) -> Tuple[str, int]: ... # undocumented +def parse227(resp: str) -> tuple[str, int]: ... # undocumented +def parse229(resp: str, peer: Any) -> tuple[str, int]: ... # undocumented def parse257(resp: str) -> str: ... # undocumented def ftpcp( source: FTP, sourcename: str, target: FTP, targetname: str = ..., type: Literal["A", "I"] = ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/functools.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/functools.pyi index 6fe4bb6f8e9e..042d7409c67e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/functools.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/functools.pyi @@ -1,11 +1,59 @@ import sys import types -from _typeshed import SupportsItems, SupportsLessThan -from typing import Any, Callable, Generic, Hashable, Iterable, NamedTuple, Sequence, Set, Sized, Tuple, Type, TypeVar, overload +from _typeshed import Self, SupportsAllComparisons, SupportsItems +from typing import Any, Callable, Generic, Hashable, Iterable, NamedTuple, Sequence, Sized, TypeVar, overload +from typing_extensions import Literal, final if sys.version_info >= (3, 9): from types import GenericAlias + __all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cache", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", + "singledispatchmethod", + "cached_property", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", + "singledispatchmethod", + "cached_property", + ] +else: + __all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", + ] + _AnyCallable = Callable[..., Any] _T = TypeVar("_T") @@ -22,11 +70,14 @@ class _CacheInfo(NamedTuple): maxsize: int currsize: int +@final class _lru_cache_wrapper(Generic[_T]): __wrapped__: Callable[..., _T] def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... def cache_info(self) -> _CacheInfo: ... def cache_clear(self) -> None: ... + def __copy__(self) -> _lru_cache_wrapper[_T]: ... + def __deepcopy__(self, __memo: Any) -> _lru_cache_wrapper[_T]: ... if sys.version_info >= (3, 8): @overload @@ -37,20 +88,22 @@ if sys.version_info >= (3, 8): else: def lru_cache(maxsize: int | None = ..., typed: bool = ...) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... -WRAPPER_ASSIGNMENTS: Sequence[str] -WRAPPER_UPDATES: Sequence[str] +WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"], +] +WRAPPER_UPDATES: tuple[Literal["__dict__"]] def update_wrapper(wrapper: _T, wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> _T: ... def wraps(wrapped: _AnyCallable, assigned: Sequence[str] = ..., updated: Sequence[str] = ...) -> Callable[[_T], _T]: ... -def total_ordering(cls: Type[_T]) -> Type[_T]: ... -def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsLessThan]: ... +def total_ordering(cls: type[_T]) -> type[_T]: ... +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... class partial(Generic[_T]): func: Callable[..., _T] - args: Tuple[Any, ...] + args: tuple[Any, ...] keywords: dict[str, Any] - def __init__(self, func: Callable[..., _T], *args: Any, **kwargs: Any) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> _T: ... + def __new__(cls: type[Self], __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -59,13 +112,17 @@ _Descriptor = Any class partialmethod(Generic[_T]): func: Callable[..., _T] | _Descriptor - args: Tuple[Any, ...] + args: tuple[Any, ...] keywords: dict[str, Any] @overload def __init__(self, __func: Callable[..., _T], *args: Any, **keywords: Any) -> None: ... @overload def __init__(self, __func: _Descriptor, *args: Any, **keywords: Any) -> None: ... - def __get__(self, obj: Any, cls: Type[Any]) -> Callable[..., _T]: ... + if sys.version_info >= (3, 8): + def __get__(self, obj: Any, cls: type[Any] | None = ...) -> Callable[..., _T]: ... + else: + def __get__(self, obj: Any, cls: type[Any] | None) -> Callable[..., _T]: ... + @property def __isabstractmethod__(self) -> bool: ... if sys.version_info >= (3, 9): @@ -77,16 +134,16 @@ class _SingleDispatchCallable(Generic[_T]): # @fun.register(complex) # def _(arg, verbose=False): ... @overload - def register(self, cls: Type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], func: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... # @fun.register # def _(arg: int, verbose=False): @overload def register(self, cls: Callable[..., _T], func: None = ...) -> Callable[..., _T]: ... # fun.register(int, lambda x: x) @overload - def register(self, cls: Type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... def _clear_cache(self) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> _T: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... @@ -95,22 +152,25 @@ if sys.version_info >= (3, 8): dispatcher: _SingleDispatchCallable[_T] func: Callable[..., _T] def __init__(self, func: Callable[..., _T]) -> None: ... + @property + def __isabstractmethod__(self) -> bool: ... @overload - def register(self, cls: Type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + def register(self, cls: type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... @overload def register(self, cls: Callable[..., _T], method: None = ...) -> Callable[..., _T]: ... @overload - def register(self, cls: Type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... - def __call__(self, *args: Any, **kwargs: Any) -> _T: ... + def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = ...) -> Callable[..., _T]: ... + class cached_property(Generic[_T]): func: Callable[[Any], _T] attrname: str | None def __init__(self, func: Callable[[Any], _T]) -> None: ... @overload - def __get__(self, instance: None, owner: Type[Any] | None = ...) -> cached_property[_T]: ... + def __get__(self, instance: None, owner: type[Any] | None = ...) -> cached_property[_T]: ... @overload - def __get__(self, instance: object, owner: Type[Any] | None = ...) -> _T: ... - def __set_name__(self, owner: Type[Any], name: str) -> None: ... + def __get__(self, instance: object, owner: type[Any] | None = ...) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -118,11 +178,11 @@ if sys.version_info >= (3, 9): def cache(__user_function: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ... def _make_key( - args: Tuple[Hashable, ...], + args: tuple[Hashable, ...], kwds: SupportsItems[Any, Any], typed: bool, - kwd_mark: Tuple[object, ...] = ..., - fasttypes: Set[type] = ..., + kwd_mark: tuple[object, ...] = ..., + fasttypes: set[type] = ..., tuple: type = ..., type: Any = ..., len: Callable[[Sized], int] = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/gc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/gc.pyi index 6eea01848939..7c15e0f5b0a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/gc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/gc.pyi @@ -1,18 +1,22 @@ import sys -from typing import Any, Tuple - -DEBUG_COLLECTABLE: int -DEBUG_LEAK: int -DEBUG_SAVEALL: int -DEBUG_STATS: int -DEBUG_UNCOLLECTABLE: int -callbacks: list[Any] +from typing import Any, Callable +from typing_extensions import Literal + +DEBUG_COLLECTABLE: Literal[2] +DEBUG_LEAK: Literal[38] +DEBUG_SAVEALL: Literal[32] +DEBUG_STATS: Literal[1] +DEBUG_UNCOLLECTABLE: Literal[4] + +_CallbackType = Callable[[Literal["start", "stop"], dict[str, int]], object] + +callbacks: list[_CallbackType] garbage: list[Any] def collect(generation: int = ...) -> int: ... def disable() -> None: ... def enable() -> None: ... -def get_count() -> Tuple[int, int, int]: ... +def get_count() -> tuple[int, int, int]: ... def get_debug() -> int: ... if sys.version_info >= (3, 8): @@ -29,7 +33,7 @@ if sys.version_info >= (3, 7): def get_referents(*objs: Any) -> list[Any]: ... def get_referrers(*objs: Any) -> list[Any]: ... def get_stats() -> list[dict[str, Any]]: ... -def get_threshold() -> Tuple[int, int, int]: ... +def get_threshold() -> tuple[int, int, int]: ... def is_tracked(__obj: Any) -> bool: ... if sys.version_info >= (3, 9): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/genericpath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/genericpath.pyi index 1c7be922e941..3abedda262ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/genericpath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/genericpath.pyi @@ -1,8 +1,22 @@ import os -from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsLessThanT -from typing import Sequence, Tuple, overload +from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRichComparisonT +from typing import Sequence, overload from typing_extensions import Literal +__all__ = [ + "commonprefix", + "exists", + "getatime", + "getctime", + "getmtime", + "getsize", + "isdir", + "isfile", + "samefile", + "sameopenfile", + "samestat", +] + # All overloads can return empty string. Ideally, Literal[""] would be a valid # Iterable[T], so that list[T] | Literal[""] could be used as a return # type. But because this only works when T is str, we need Sequence[T] instead. @@ -11,19 +25,19 @@ def commonprefix(m: Sequence[StrPath]) -> str: ... @overload def commonprefix(m: Sequence[BytesPath]) -> bytes | Literal[""]: ... @overload -def commonprefix(m: Sequence[list[SupportsLessThanT]]) -> Sequence[SupportsLessThanT]: ... +def commonprefix(m: Sequence[list[SupportsRichComparisonT]]) -> Sequence[SupportsRichComparisonT]: ... @overload -def commonprefix(m: Sequence[Tuple[SupportsLessThanT, ...]]) -> Sequence[SupportsLessThanT]: ... -def exists(path: StrOrBytesPath) -> bool: ... -def getsize(filename: StrOrBytesPath) -> int: ... -def isfile(path: StrOrBytesPath) -> bool: ... -def isdir(s: StrOrBytesPath) -> bool: ... +def commonprefix(m: Sequence[tuple[SupportsRichComparisonT, ...]]) -> Sequence[SupportsRichComparisonT]: ... +def exists(path: StrOrBytesPath | int) -> bool: ... +def getsize(filename: StrOrBytesPath | int) -> int: ... +def isfile(path: StrOrBytesPath | int) -> bool: ... +def isdir(s: StrOrBytesPath | int) -> bool: ... # These return float if os.stat_float_times() == True, # but int is a subclass of float. -def getatime(filename: StrOrBytesPath) -> float: ... -def getmtime(filename: StrOrBytesPath) -> float: ... -def getctime(filename: StrOrBytesPath) -> float: ... -def samefile(f1: StrOrBytesPath, f2: StrOrBytesPath) -> bool: ... +def getatime(filename: StrOrBytesPath | int) -> float: ... +def getmtime(filename: StrOrBytesPath | int) -> float: ... +def getctime(filename: StrOrBytesPath | int) -> float: ... +def samefile(f1: StrOrBytesPath | int, f2: StrOrBytesPath | int) -> bool: ... def sameopenfile(fp1: int, fp2: int) -> bool: ... def samestat(s1: os.stat_result, s2: os.stat_result) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/getopt.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/getopt.pyi index 6ae226f52972..42ddb1cb7020 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/getopt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/getopt.pyi @@ -1,3 +1,5 @@ +__all__ = ["GetoptError", "error", "getopt", "gnu_getopt"] + def getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... def gnu_getopt(args: list[str], shortopts: str, longopts: list[str] = ...) -> tuple[list[tuple[str, str]], list[str]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/getpass.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/getpass.pyi index 27f4c6a9b635..153db2f4cb9e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/getpass.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/getpass.pyi @@ -1,5 +1,7 @@ from typing import TextIO +__all__ = ["getpass", "getuser", "GetPassWarning"] + def getpass(prompt: str = ..., stream: TextIO | None = ...) -> str: ... def getuser() -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/gettext.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/gettext.pyi index b408d3f7485c..1f3ef67ab0f3 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/gettext.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/gettext.pyi @@ -1,8 +1,72 @@ import sys from _typeshed import StrPath -from typing import IO, Any, Container, Iterable, Sequence, Type, TypeVar, overload +from typing import IO, Any, Container, Iterable, Sequence, TypeVar, overload from typing_extensions import Literal +if sys.version_info >= (3, 11): + __all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "bindtextdomain", + "find", + "translation", + "install", + "textdomain", + "dgettext", + "dngettext", + "gettext", + "ngettext", + "pgettext", + "dpgettext", + "npgettext", + "dnpgettext", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "find", + "translation", + "install", + "textdomain", + "bindtextdomain", + "bind_textdomain_codeset", + "dgettext", + "dngettext", + "gettext", + "lgettext", + "ldgettext", + "ldngettext", + "lngettext", + "ngettext", + "pgettext", + "dpgettext", + "npgettext", + "dnpgettext", + ] +else: + __all__ = [ + "NullTranslations", + "GNUTranslations", + "Catalog", + "find", + "translation", + "install", + "textdomain", + "bindtextdomain", + "bind_textdomain_codeset", + "dgettext", + "dngettext", + "gettext", + "lgettext", + "ldgettext", + "ldngettext", + "lngettext", + "ngettext", + ] + class NullTranslations: def __init__(self, fp: IO[str] | None = ...) -> None: ... def _parse(self, fp: IO[str]) -> None: ... @@ -14,10 +78,13 @@ class NullTranslations: if sys.version_info >= (3, 8): def pgettext(self, context: str, message: str) -> str: ... def npgettext(self, context: str, msgid1: str, msgid2: str, n: int) -> str: ... + def info(self) -> Any: ... def charset(self) -> Any: ... - def output_charset(self) -> Any: ... - def set_output_charset(self, charset: str) -> None: ... + if sys.version_info < (3, 11): + def output_charset(self) -> Any: ... + def set_output_charset(self, charset: str) -> None: ... + def install(self, names: Container[str] | None = ...) -> None: ... class GNUTranslations(NullTranslations): @@ -26,51 +93,84 @@ class GNUTranslations(NullTranslations): CONTEXT: str VERSIONS: Sequence[int] +@overload # ignores incompatible overloads +def find( # type: ignore[misc] + domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[False] = ... +) -> str | None: ... +@overload +def find( + domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: Literal[True] = ... +) -> list[str]: ... +@overload def find(domain: str, localedir: StrPath | None = ..., languages: Iterable[str] | None = ..., all: bool = ...) -> Any: ... _T = TypeVar("_T") -@overload -def translation( - domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: None = ..., - fallback: bool = ..., - codeset: str | None = ..., -) -> NullTranslations: ... -@overload -def translation( - domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Type[_T] = ..., - fallback: Literal[False] = ..., - codeset: str | None = ..., -) -> _T: ... -@overload -def translation( - domain: str, - localedir: StrPath | None = ..., - languages: Iterable[str] | None = ..., - class_: Type[Any] = ..., - fallback: Literal[True] = ..., - codeset: str | None = ..., -) -> Any: ... -def install( - domain: str, localedir: StrPath | None = ..., codeset: str | None = ..., names: Container[str] | None = ... -) -> None: ... +if sys.version_info >= (3, 11): + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: None = ..., + fallback: bool = ..., + ) -> NullTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: type[_T] = ..., + fallback: Literal[False] = ..., + ) -> _T: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: type[Any] = ..., + fallback: Literal[True] = ..., + ) -> Any: ... + def install(domain: str, localedir: StrPath | None = ..., names: Container[str] | None = ...) -> None: ... + +else: + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: None = ..., + fallback: bool = ..., + codeset: str | None = ..., + ) -> NullTranslations: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: type[_T] = ..., + fallback: Literal[False] = ..., + codeset: str | None = ..., + ) -> _T: ... + @overload + def translation( + domain: str, + localedir: StrPath | None = ..., + languages: Iterable[str] | None = ..., + class_: type[Any] = ..., + fallback: Literal[True] = ..., + codeset: str | None = ..., + ) -> Any: ... + def install( + domain: str, localedir: StrPath | None = ..., codeset: str | None = ..., names: Container[str] | None = ... + ) -> None: ... + def textdomain(domain: str | None = ...) -> str: ... def bindtextdomain(domain: str, localedir: StrPath | None = ...) -> str: ... -def bind_textdomain_codeset(domain: str, codeset: str | None = ...) -> str: ... def dgettext(domain: str, message: str) -> str: ... -def ldgettext(domain: str, message: str) -> str: ... def dngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... -def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... def gettext(message: str) -> str: ... -def lgettext(message: str) -> str: ... def ngettext(msgid1: str, msgid2: str, n: int) -> str: ... -def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... if sys.version_info >= (3, 8): def pgettext(context: str, message: str) -> str: ... @@ -78,4 +178,11 @@ if sys.version_info >= (3, 8): def npgettext(context: str, msgid1: str, msgid2: str, n: int) -> str: ... def dnpgettext(domain: str, context: str, msgid1: str, msgid2: str, n: int) -> str: ... +if sys.version_info < (3, 11): + def lgettext(message: str) -> str: ... + def ldgettext(domain: str, message: str) -> str: ... + def lngettext(msgid1: str, msgid2: str, n: int) -> str: ... + def ldngettext(domain: str, msgid1: str, msgid2: str, n: int) -> str: ... + def bind_textdomain_codeset(domain: str, codeset: str | None = ...) -> str: ... + Catalog = translation diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/glob.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/glob.pyi index c1cd176f500c..ced0ceceb205 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/glob.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/glob.pyi @@ -2,10 +2,30 @@ import sys from _typeshed import StrOrBytesPath from typing import AnyStr, Iterator +__all__ = ["escape", "glob", "iglob"] + def glob0(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... def glob1(dirname: AnyStr, pattern: AnyStr) -> list[AnyStr]: ... -if sys.version_info >= (3, 10): +if sys.version_info >= (3, 11): + def glob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = ..., + dir_fd: int | None = ..., + recursive: bool = ..., + include_hidden: bool = ..., + ) -> list[AnyStr]: ... + def iglob( + pathname: AnyStr, + *, + root_dir: StrOrBytesPath | None = ..., + dir_fd: int | None = ..., + recursive: bool = ..., + include_hidden: bool = ..., + ) -> Iterator[AnyStr]: ... + +elif sys.version_info >= (3, 10): def glob( pathname: AnyStr, *, root_dir: StrOrBytesPath | None = ..., dir_fd: int | None = ..., recursive: bool = ... ) -> list[AnyStr]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/graphlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/graphlib.pyi index 0872af4a54a4..cae2a07e95c6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/graphlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/graphlib.pyi @@ -1,8 +1,14 @@ +import sys from _typeshed import SupportsItems -from typing import Generic, Iterable, Tuple, TypeVar +from typing import Any, Generic, Iterable, TypeVar + +__all__ = ["TopologicalSorter", "CycleError"] _T = TypeVar("_T") +if sys.version_info >= (3, 11): + from types import GenericAlias + class TopologicalSorter(Generic[_T]): def __init__(self, graph: SupportsItems[_T, Iterable[_T]] | None = ...) -> None: ... def add(self, node: _T, *predecessors: _T) -> None: ... @@ -10,7 +16,9 @@ class TopologicalSorter(Generic[_T]): def is_active(self) -> bool: ... def __bool__(self) -> bool: ... def done(self, *nodes: _T) -> None: ... - def get_ready(self) -> Tuple[_T, ...]: ... + def get_ready(self) -> tuple[_T, ...]: ... def static_order(self) -> Iterable[_T]: ... + if sys.version_info >= (3, 11): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... class CycleError(ValueError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/grp.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/grp.pyi index 08cbe6b86476..732c36b3dda7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/grp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/grp.pyi @@ -1,11 +1,20 @@ -from typing import NamedTuple +import sys +from _typeshed import structseq +from typing import Any, Optional +from typing_extensions import final -class struct_group(NamedTuple): - gr_name: str - gr_passwd: str | None - gr_gid: int - gr_mem: list[str] +if sys.platform != "win32": + @final + class struct_group(structseq[Any], tuple[str, Optional[str], int, list[str]]): + @property + def gr_name(self) -> str: ... + @property + def gr_passwd(self) -> str | None: ... + @property + def gr_gid(self) -> int: ... + @property + def gr_mem(self) -> list[str]: ... -def getgrall() -> list[struct_group]: ... -def getgrgid(id: int) -> struct_group: ... -def getgrnam(name: str) -> struct_group: ... + def getgrall() -> list[struct_group]: ... + def getgrgid(id: int) -> struct_group: ... + def getgrnam(name: str) -> struct_group: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/gzip.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/gzip.pyi index 070ceac48282..7347949ae865 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/gzip.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/gzip.pyi @@ -6,6 +6,11 @@ from io import FileIO from typing import Any, Protocol, TextIO, overload from typing_extensions import Literal +if sys.version_info >= (3, 8): + __all__ = ["BadGzipFile", "GzipFile", "open", "compress", "decompress"] +else: + __all__ = ["GzipFile", "open", "compress", "decompress"] + _ReadBinaryMode = Literal["r", "rb"] _WriteBinaryMode = Literal["a", "ab", "w", "wb", "x", "xb"] _OpenTextMode = Literal["rt", "at", "wt", "xt"] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/hashlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/hashlib.pyi index e39f2f25326e..4332153d281c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/hashlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/hashlib.pyi @@ -1,8 +1,30 @@ import sys from _typeshed import ReadableBuffer, Self from typing import AbstractSet +from typing_extensions import final -class _Hash(object): +__all__ = ( + "md5", + "sha1", + "sha224", + "sha256", + "sha384", + "sha512", + "blake2b", + "blake2s", + "sha3_224", + "sha3_256", + "sha3_384", + "sha3_512", + "shake_128", + "shake_256", + "new", + "algorithms_guaranteed", + "algorithms_available", + "pbkdf2_hmac", +) + +class _Hash: @property def digest_size(self) -> int: ... @property @@ -49,7 +71,7 @@ def pbkdf2_hmac( hash_name: str, password: ReadableBuffer, salt: ReadableBuffer, iterations: int, dklen: int | None = ... ) -> bytes: ... -class _VarLenHash(object): +class _VarLenHash: digest_size: int block_size: int name: str @@ -76,7 +98,7 @@ def scrypt( maxmem: int = ..., dklen: int = ..., ) -> bytes: ... - +@final class _BlakeHash(_Hash): MAX_DIGEST_SIZE: int MAX_KEY_SIZE: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi index 81ee02582a6a..a7a787d44e62 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/heapq.pyi @@ -1,14 +1,14 @@ -from _typeshed import SupportsLessThan +from _heapq import * +from _typeshed import SupportsRichComparison from typing import Any, Callable, Iterable, TypeVar -_T = TypeVar("_T") +__all__ = ["heappush", "heappop", "heapify", "heapreplace", "merge", "nlargest", "nsmallest", "heappushpop"] -def heappush(__heap: list[_T], __item: _T) -> None: ... -def heappop(__heap: list[_T]) -> _T: ... -def heappushpop(__heap: list[_T], __item: _T) -> _T: ... -def heapify(__heap: list[Any]) -> None: ... -def heapreplace(__heap: list[_T], __item: _T) -> _T: ... -def merge(*iterables: Iterable[_T], key: Callable[[_T], Any] | None = ..., reverse: bool = ...) -> Iterable[_T]: ... -def nlargest(n: int, iterable: Iterable[_T], key: Callable[[_T], SupportsLessThan] | None = ...) -> list[_T]: ... -def nsmallest(n: int, iterable: Iterable[_T], key: Callable[[_T], SupportsLessThan] | None = ...) -> list[_T]: ... +_S = TypeVar("_S") + +__about__: str + +def merge(*iterables: Iterable[_S], key: Callable[[_S], Any] | None = ..., reverse: bool = ...) -> Iterable[_S]: ... +def nlargest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... +def nsmallest(n: int, iterable: Iterable[_S], key: Callable[[_S], SupportsRichComparison] | None = ...) -> list[_S]: ... def _heapify_max(__x: list[Any]) -> None: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/hmac.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/hmac.pyi index 440bddd7919c..88c88631f99a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/hmac.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/hmac.pyi @@ -7,6 +7,9 @@ from typing import Any, AnyStr, Callable, Union, overload _Hash = Any _DigestMod = Union[str, Callable[[], _Hash], ModuleType] +trans_5C: bytes +trans_36: bytes + digest_size: None if sys.version_info >= (3, 8): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/html/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/html/__init__.pyi index af2a80021656..109c5f4b50fb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/html/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/html/__init__.pyi @@ -1,4 +1,6 @@ from typing import AnyStr +__all__ = ["escape", "unescape"] + def escape(s: AnyStr, quote: bool = ...) -> AnyStr: ... def unescape(s: AnyStr) -> AnyStr: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/html/entities.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/html/entities.pyi index 1743fccf32b9..be83fd1135be 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/html/entities.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/html/entities.pyi @@ -1,3 +1,5 @@ +__all__ = ["html5", "name2codepoint", "codepoint2name", "entitydefs"] + name2codepoint: dict[str, int] html5: dict[str, str] codepoint2name: dict[int, str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi index 1cdaf72ff561..1731a345920b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/html/parser.pyi @@ -1,16 +1,18 @@ from _markupbase import ParserBase -from typing import Tuple +from typing import Pattern + +__all__ = ["HTMLParser"] class HTMLParser(ParserBase): def __init__(self, *, convert_charrefs: bool = ...) -> None: ... def feed(self, data: str) -> None: ... def close(self) -> None: ... def reset(self) -> None: ... - def getpos(self) -> Tuple[int, int]: ... + def getpos(self) -> tuple[int, int]: ... def get_starttag_text(self) -> str | None: ... - def handle_starttag(self, tag: str, attrs: list[Tuple[str, str | None]]) -> None: ... + def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_endtag(self, tag: str) -> None: ... - def handle_startendtag(self, tag: str, attrs: list[Tuple[str, str | None]]) -> None: ... + def handle_startendtag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None: ... def handle_data(self, data: str) -> None: ... def handle_entityref(self, name: str) -> None: ... def handle_charref(self, name: str) -> None: ... @@ -18,7 +20,7 @@ class HTMLParser(ParserBase): def handle_decl(self, decl: str) -> None: ... def handle_pi(self, data: str) -> None: ... def unknown_decl(self, data: str) -> None: ... - CDATA_CONTENT_ELEMENTS: Tuple[str, ...] + CDATA_CONTENT_ELEMENTS: tuple[str, ...] def check_for_whole_start_tag(self, i: int) -> int: ... # undocumented def clear_cdata_mode(self) -> None: ... # undocumented def goahead(self, end: bool) -> None: ... # undocumented @@ -28,3 +30,8 @@ class HTMLParser(ParserBase): def parse_pi(self, i: int) -> int: ... # undocumented def parse_starttag(self, i: int) -> int: ... # undocumented def set_cdata_mode(self, elem: str) -> None: ... # undocumented + rawdata: str # undocumented + cdata_elem: str | None # undocumented + convert_charrefs: bool # undocumented + interesting: Pattern[str] # undocumented + lasttag: str # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/__init__.pyi index 93895549cb2a..822cc0932939 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/__init__.pyi @@ -2,6 +2,8 @@ import sys from enum import IntEnum from typing_extensions import Literal +__all__ = ["HTTPStatus"] + class HTTPStatus(IntEnum): @property def phrase(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi index 508191556f37..93f132d678f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/client.pyi @@ -5,7 +5,29 @@ import sys import types from _typeshed import Self, WriteableBuffer from socket import socket -from typing import IO, Any, BinaryIO, Callable, Iterable, Iterator, Mapping, Protocol, Tuple, Type, TypeVar, Union, overload +from typing import IO, Any, BinaryIO, Callable, Iterable, Iterator, Mapping, Protocol, TypeVar, Union, overload + +__all__ = [ + "HTTPResponse", + "HTTPConnection", + "HTTPException", + "NotConnected", + "UnknownProtocol", + "UnknownTransferEncoding", + "UnimplementedFileMode", + "IncompleteRead", + "InvalidURL", + "ImproperConnectionState", + "CannotSendRequest", + "CannotSendHeader", + "ResponseNotReady", + "BadStatusLine", + "LineTooLong", + "RemoteDisconnected", + "error", + "responses", + "HTTPSConnection", +] _DataType = Union[bytes, IO[Any], Iterable[bytes], str] _T = TypeVar("_T") @@ -78,31 +100,36 @@ class HTTPMessage(email.message.Message): def parse_headers(fp: io.BufferedIOBase, _class: Callable[[], email.message.Message] = ...) -> HTTPMessage: ... -class HTTPResponse(io.BufferedIOBase, BinaryIO): +class HTTPResponse(io.BufferedIOBase, BinaryIO): # type: ignore # argument disparities between base classes msg: HTTPMessage headers: HTTPMessage version: int debuglevel: int + fp: io.BufferedReader closed: bool status: int reason: str + chunked: bool + chunk_left: int | None + length: int | None + will_close: bool def __init__(self, sock: socket, debuglevel: int = ..., method: str | None = ..., url: str | None = ...) -> None: ... def peek(self, n: int = ...) -> bytes: ... def read(self, amt: int | None = ...) -> bytes: ... def read1(self, n: int = ...) -> bytes: ... def readinto(self, b: WriteableBuffer) -> int: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore + def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] @overload def getheader(self, name: str) -> str | None: ... @overload def getheader(self, name: str, default: _T) -> str | _T: ... - def getheaders(self) -> list[Tuple[str, str]]: ... + def getheaders(self) -> list[tuple[str, str]]: ... def fileno(self) -> int: ... def isclosed(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> bool | None: ... def info(self) -> email.message.Message: ... def geturl(self) -> str: ... @@ -118,19 +145,19 @@ class _HTTPConnectionProtocol(Protocol): host: str, port: int | None = ..., timeout: float = ..., - source_address: Tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = ..., blocksize: int = ..., ) -> HTTPConnection: ... else: def __call__( - self, host: str, port: int | None = ..., timeout: float = ..., source_address: Tuple[str, int] | None = ... + self, host: str, port: int | None = ..., timeout: float = ..., source_address: tuple[str, int] | None = ... ) -> HTTPConnection: ... class HTTPConnection: auto_open: int # undocumented debuglevel: int default_port: int # undocumented - response_class: Type[HTTPResponse] # undocumented + response_class: type[HTTPResponse] # undocumented timeout: float | None host: str port: int @@ -141,13 +168,14 @@ class HTTPConnection: host: str, port: int | None = ..., timeout: float | None = ..., - source_address: Tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = ..., blocksize: int = ..., ) -> None: ... else: def __init__( - self, host: str, port: int | None = ..., timeout: float | None = ..., source_address: Tuple[str, int] | None = ... + self, host: str, port: int | None = ..., timeout: float | None = ..., source_address: tuple[str, int] | None = ... ) -> None: ... + def request( self, method: str, url: str, body: _DataType | None = ..., headers: Mapping[str, str] = ..., *, encode_chunked: bool = ... ) -> None: ... @@ -170,7 +198,7 @@ class HTTPSConnection(HTTPConnection): key_file: str | None = ..., cert_file: str | None = ..., timeout: float | None = ..., - source_address: Tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = ..., *, context: ssl.SSLContext | None = ..., check_hostname: bool | None = ..., @@ -184,7 +212,7 @@ class HTTPSConnection(HTTPConnection): key_file: str | None = ..., cert_file: str | None = ..., timeout: float | None = ..., - source_address: Tuple[str, int] | None = ..., + source_address: tuple[str, int] | None = ..., *, context: ssl.SSLContext | None = ..., check_hostname: bool | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/cookiejar.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/cookiejar.pyi index f37fb19cebe9..4fb1c38c6ab8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/cookiejar.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/cookiejar.pyi @@ -1,9 +1,20 @@ import sys from _typeshed import StrPath from http.client import HTTPResponse -from typing import ClassVar, Iterable, Iterator, Pattern, Sequence, Tuple, TypeVar, overload +from typing import ClassVar, Iterable, Iterator, Pattern, Sequence, TypeVar, overload from urllib.request import Request +__all__ = [ + "Cookie", + "CookieJar", + "CookiePolicy", + "DefaultCookiePolicy", + "FileCookieJar", + "LWPCookieJar", + "LoadError", + "MozillaCookieJar", +] + _T = TypeVar("_T") class LoadError(OSError): ... @@ -27,8 +38,6 @@ class CookieJar(Iterable[Cookie]): def clear_expired_cookies(self) -> None: ... # undocumented def __iter__(self) -> Iterator[Cookie]: ... def __len__(self) -> int: ... - def __repr__(self) -> str: ... - def __str__(self) -> str: ... class FileCookieJar(CookieJar): filename: str @@ -37,6 +46,7 @@ class FileCookieJar(CookieJar): def __init__(self, filename: StrPath | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... else: def __init__(self, filename: str | None = ..., delayload: bool = ..., policy: CookiePolicy | None = ...) -> None: ... + def save(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def load(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... def revert(self, filename: str | None = ..., ignore_discard: bool = ..., ignore_expires: bool = ...) -> None: ... @@ -102,10 +112,11 @@ class DefaultCookiePolicy(CookiePolicy): strict_ns_set_initial_dollar: bool = ..., strict_ns_set_path: bool = ..., ) -> None: ... - def blocked_domains(self) -> Tuple[str, ...]: ... + + def blocked_domains(self) -> tuple[str, ...]: ... def set_blocked_domains(self, blocked_domains: Sequence[str]) -> None: ... def is_blocked(self, domain: str) -> bool: ... - def allowed_domains(self) -> Tuple[str, ...] | None: ... + def allowed_domains(self) -> tuple[str, ...] | None: ... def set_allowed_domains(self, allowed_domains: Sequence[str] | None) -> None: ... def is_not_allowed(self, domain: str) -> bool: ... def set_ok_version(self, cookie: Cookie, request: Request) -> bool: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/cookies.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/cookies.pyi index 5a88121f5070..80cb35608c59 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/cookies.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/cookies.pyi @@ -1,9 +1,11 @@ import sys -from typing import Any, Dict, Generic, Iterable, Mapping, Tuple, TypeVar, Union, overload +from typing import Any, Generic, Iterable, Mapping, TypeVar, Union, overload if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = ["CookieError", "BaseCookie", "SimpleCookie"] + _DataType = Union[str, Mapping[str, Union[str, Morsel[Any]]]] _T = TypeVar("_T") @@ -18,7 +20,7 @@ def _unquote(str: str) -> str: ... class CookieError(Exception): ... -class Morsel(Dict[str, Any], Generic[_T]): +class Morsel(dict[str, Any], Generic[_T]): value: str coded_value: _T key: str @@ -27,24 +29,29 @@ class Morsel(Dict[str, Any], Generic[_T]): def set(self, key: str, val: str, coded_val: _T) -> None: ... else: def set(self, key: str, val: str, coded_val: _T, LegalChars: str = ...) -> None: ... + def setdefault(self, key: str, val: str | None = ...) -> str: ... # The dict update can also get a keywords argument so this is incompatible - @overload # type: ignore + @overload # type: ignore[override] def update(self, values: Mapping[str, str]) -> None: ... @overload - def update(self, values: Iterable[Tuple[str, str]]) -> None: ... + def update(self, values: Iterable[tuple[str, str]]) -> None: ... def isReservedKey(self, K: str) -> bool: ... def output(self, attrs: list[str] | None = ..., header: str = ...) -> str: ... + __str__ = output def js_output(self, attrs: list[str] | None = ...) -> str: ... def OutputString(self, attrs: list[str] | None = ...) -> str: ... + def __eq__(self, morsel: object) -> bool: ... + def __setitem__(self, K: str, V: Any) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -class BaseCookie(Dict[str, Morsel[_T]], Generic[_T]): +class BaseCookie(dict[str, Morsel[_T]], Generic[_T]): def __init__(self, input: _DataType | None = ...) -> None: ... def value_decode(self, val: str) -> _T: ... def value_encode(self, val: _T) -> str: ... def output(self, attrs: list[str] | None = ..., header: str = ..., sep: str = ...) -> str: ... + __str__ = output def js_output(self, attrs: list[str] | None = ...) -> str: ... def load(self, rawdata: _DataType) -> None: ... def __setitem__(self, key: str, value: str | Morsel[_T]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/http/server.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/http/server.pyi index 92350b23a95f..53159b65ec14 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/http/server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/http/server.pyi @@ -3,7 +3,12 @@ import io import socketserver import sys from _typeshed import StrPath, SupportsRead, SupportsWrite -from typing import Any, AnyStr, BinaryIO, ClassVar, Mapping, Sequence, Tuple +from typing import Any, AnyStr, BinaryIO, ClassVar, Mapping, Sequence + +if sys.version_info >= (3, 7): + __all__ = ["HTTPServer", "ThreadingHTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] +else: + __all__ = ["HTTPServer", "BaseHTTPRequestHandler", "SimpleHTTPRequestHandler", "CGIHTTPRequestHandler"] class HTTPServer(socketserver.TCPServer): server_name: str @@ -14,7 +19,7 @@ if sys.version_info >= (3, 7): daemon_threads: bool # undocumented class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): - client_address: Tuple[str, int] + client_address: tuple[str, int] server: socketserver.BaseServer close_connection: bool requestline: str @@ -28,11 +33,11 @@ class BaseHTTPRequestHandler(socketserver.StreamRequestHandler): error_content_type: str protocol_version: str MessageClass: type - responses: Mapping[int, Tuple[str, str]] + responses: Mapping[int, tuple[str, str]] default_request_version: str # undocumented weekdayname: ClassVar[Sequence[str]] # undocumented monthname: ClassVar[Sequence[str | None]] # undocumented - def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... + def __init__(self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer) -> None: ... def handle(self) -> None: ... def handle_one_request(self) -> None: ... def handle_expect_100(self) -> bool: ... @@ -56,10 +61,11 @@ class SimpleHTTPRequestHandler(BaseHTTPRequestHandler): extensions_map: dict[str, str] if sys.version_info >= (3, 7): def __init__( - self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer, directory: str | None = ... + self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer, directory: str | None = ... ) -> None: ... else: - def __init__(self, request: bytes, client_address: Tuple[str, int], server: socketserver.BaseServer) -> None: ... + def __init__(self, request: bytes, client_address: tuple[str, int], server: socketserver.BaseServer) -> None: ... + def do_GET(self) -> None: ... def do_HEAD(self) -> None: ... def send_head(self) -> io.BytesIO | BinaryIO | None: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi index 63c57a4d4623..e85e7d4ad90a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/imaplib.pyi @@ -5,32 +5,36 @@ from _typeshed import Self from socket import socket as _socket from ssl import SSLContext, SSLSocket from types import TracebackType -from typing import IO, Any, Callable, List, Pattern, Tuple, Type, Union +from typing import IO, Any, Callable, Pattern, Union from typing_extensions import Literal +__all__ = ["IMAP4", "IMAP4_stream", "Internaldate2tuple", "Int2AP", "ParseFlags", "Time2Internaldate", "IMAP4_SSL"] + # TODO: Commands should use their actual return types, not this type alias. # E.g. Tuple[Literal["OK"], List[bytes]] -_CommandResults = Tuple[str, List[Any]] +_CommandResults = tuple[str, list[Any]] + +_AnyResponseData = Union[list[None], list[Union[bytes, tuple[bytes, bytes]]]] -_AnyResponseData = Union[List[None], List[Union[bytes, Tuple[bytes, bytes]]]] +_list = list # conflicts with a method named "list" class IMAP4: - error: Type[Exception] - abort: Type[Exception] - readonly: Type[Exception] + error: type[Exception] + abort: type[Exception] + readonly: type[Exception] mustquote: Pattern[str] debug: int state: str literal: str | None - tagged_commands: dict[bytes, List[bytes] | None] - untagged_responses: dict[str, List[bytes | Tuple[bytes, bytes]]] + tagged_commands: dict[bytes, _list[bytes] | None] + untagged_responses: dict[str, _list[bytes | tuple[bytes, bytes]]] continuation_response: str is_readonly: bool tagnum: int tagpre: str tagre: Pattern[str] welcome: bytes - capabilities: Tuple[str] + capabilities: tuple[str, ...] PROTOCOL_VERSION: str if sys.version_info >= (3, 9): def __init__(self, host: str = ..., port: int = ..., timeout: float | None = ...) -> None: ... @@ -38,6 +42,7 @@ class IMAP4: else: def __init__(self, host: str = ..., port: int = ...) -> None: ... def open(self, host: str = ..., port: int = ...) -> None: ... + def __getattr__(self, attr: str) -> Any: ... host: str port: int @@ -51,7 +56,7 @@ class IMAP4: def recent(self) -> _CommandResults: ... def response(self, code: str) -> _CommandResults: ... def append(self, mailbox: str, flags: str, date_time: str, message: str) -> str: ... - def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> Tuple[str, str]: ... + def authenticate(self, mechanism: str, authobject: Callable[[bytes], bytes | None]) -> tuple[str, str]: ... def capability(self) -> _CommandResults: ... def check(self) -> _CommandResults: ... def close(self) -> _CommandResults: ... @@ -61,31 +66,31 @@ class IMAP4: def deleteacl(self, mailbox: str, who: str) -> _CommandResults: ... def enable(self, capability: str) -> _CommandResults: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, t: Type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... def expunge(self) -> _CommandResults: ... - def fetch(self, message_set: str, message_parts: str) -> Tuple[str, _AnyResponseData]: ... + def fetch(self, message_set: str, message_parts: str) -> tuple[str, _AnyResponseData]: ... def getacl(self, mailbox: str) -> _CommandResults: ... def getannotation(self, mailbox: str, entry: str, attribute: str) -> _CommandResults: ... def getquota(self, root: str) -> _CommandResults: ... def getquotaroot(self, mailbox: str) -> _CommandResults: ... - def list(self, directory: str = ..., pattern: str = ...) -> Tuple[str, _AnyResponseData]: ... - def login(self, user: str, password: str) -> Tuple[Literal["OK"], List[bytes]]: ... + def list(self, directory: str = ..., pattern: str = ...) -> tuple[str, _AnyResponseData]: ... + def login(self, user: str, password: str) -> tuple[Literal["OK"], _list[bytes]]: ... def login_cram_md5(self, user: str, password: str) -> _CommandResults: ... - def logout(self) -> Tuple[str, _AnyResponseData]: ... + def logout(self) -> tuple[str, _AnyResponseData]: ... def lsub(self, directory: str = ..., pattern: str = ...) -> _CommandResults: ... def myrights(self, mailbox: str) -> _CommandResults: ... def namespace(self) -> _CommandResults: ... - def noop(self) -> Tuple[str, List[bytes]]: ... + def noop(self) -> tuple[str, _list[bytes]]: ... def partial(self, message_num: str, message_part: str, start: str, length: str) -> _CommandResults: ... def proxyauth(self, user: str) -> _CommandResults: ... def rename(self, oldmailbox: str, newmailbox: str) -> _CommandResults: ... def search(self, charset: str | None, *criteria: str) -> _CommandResults: ... - def select(self, mailbox: str = ..., readonly: bool = ...) -> Tuple[str, List[bytes | None]]: ... + def select(self, mailbox: str = ..., readonly: bool = ...) -> tuple[str, _list[bytes | None]]: ... def setacl(self, mailbox: str, who: str, what: str) -> _CommandResults: ... def setannotation(self, *args: str) -> _CommandResults: ... def setquota(self, root: str, limits: str) -> _CommandResults: ... def sort(self, sort_criteria: str, charset: str, *search_criteria: str) -> _CommandResults: ... - def starttls(self, ssl_context: Any | None = ...) -> Tuple[Literal["OK"], List[None]]: ... + def starttls(self, ssl_context: Any | None = ...) -> tuple[Literal["OK"], _list[None]]: ... def status(self, mailbox: str, names: str) -> _CommandResults: ... def store(self, message_set: str, command: str, flags: str) -> _CommandResults: ... def subscribe(self, mailbox: str) -> _CommandResults: ... @@ -94,6 +99,7 @@ class IMAP4: def unsubscribe(self, mailbox: str) -> _CommandResults: ... if sys.version_info >= (3, 9): def unselect(self) -> _CommandResults: ... + def xatom(self, name: str, *args: str) -> _CommandResults: ... def print_log(self) -> None: ... @@ -128,6 +134,7 @@ class IMAP4_SSL(IMAP4): def open(self, host: str = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... else: def open(self, host: str = ..., port: int | None = ...) -> None: ... + def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... @@ -149,6 +156,7 @@ class IMAP4_stream(IMAP4): def open(self, host: str | None = ..., port: int | None = ..., timeout: float | None = ...) -> None: ... else: def open(self, host: str | None = ..., port: int | None = ...) -> None: ... + def read(self, size: int) -> bytes: ... def readline(self) -> bytes: ... def send(self, data: bytes) -> None: ... @@ -163,5 +171,5 @@ class _Authenticator: def Internaldate2tuple(resp: str) -> time.struct_time: ... def Int2AP(num: int) -> str: ... -def ParseFlags(resp: str) -> Tuple[str]: ... +def ParseFlags(resp: str) -> tuple[str, ...]: ... def Time2Internaldate(date_time: float | time.struct_time | str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/imghdr.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/imghdr.pyi index 4515cf2269b0..af046e899326 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/imghdr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/imghdr.pyi @@ -1,6 +1,8 @@ from _typeshed import StrPath from typing import Any, BinaryIO, Callable, Protocol, overload +__all__ = ["what"] + class _ReadableBinary(Protocol): def tell(self) -> int: ... def read(self, size: int) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/imp.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/imp.pyi index aac16f029424..ec09d18de32c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/imp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/imp.pyi @@ -1,7 +1,7 @@ import types from _typeshed import StrPath from os import PathLike -from typing import IO, Any, Protocol, Tuple, TypeVar +from typing import IO, Any, Protocol from _imp import ( acquire_lock as acquire_lock, @@ -15,8 +15,6 @@ from _imp import ( release_lock as release_lock, ) -_T = TypeVar("_T") - SEARCH_ERROR: int PY_SOURCE: int PY_COMPILED: int @@ -33,7 +31,7 @@ def get_magic() -> bytes: ... def get_tag() -> str: ... def cache_from_source(path: StrPath, debug_override: bool | None = ...) -> str: ... def source_from_cache(path: StrPath) -> str: ... -def get_suffixes() -> list[Tuple[str, str, int]]: ... +def get_suffixes() -> list[tuple[str, str, int]]: ... class NullImporter: def __init__(self, path: StrPath) -> None: ... @@ -53,12 +51,12 @@ class _FileLike(Protocol): def load_source(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... def load_compiled(name: str, pathname: str, file: _FileLike | None = ...) -> types.ModuleType: ... def load_package(name: str, path: StrPath) -> types.ModuleType: ... -def load_module(name: str, file: _FileLike | None, filename: str, details: Tuple[str, str, int]) -> types.ModuleType: ... +def load_module(name: str, file: _FileLike | None, filename: str, details: tuple[str, str, int]) -> types.ModuleType: ... # IO[Any] is a TextIOWrapper if name is a .py file, and a FileIO otherwise. def find_module( name: str, path: None | list[str] | list[PathLike[str]] | list[StrPath] = ... -) -> Tuple[IO[Any], str, Tuple[str, str, int]]: ... +) -> tuple[IO[Any], str, tuple[str, str, int]]: ... def reload(module: types.ModuleType) -> types.ModuleType: ... def init_builtin(name: str) -> types.ModuleType | None: ... def load_dynamic(name: str, path: str, file: Any = ...) -> types.ModuleType: ... # file argument is ignored diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/__init__.pyi index b7986de57a45..0e99786775b0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/__init__.pyi @@ -1,15 +1,20 @@ -import types from importlib.abc import Loader -from typing import Any, Mapping, Sequence +from types import ModuleType +from typing import Mapping, Sequence +__all__ = ["__import__", "import_module", "invalidate_caches", "reload"] + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` def __import__( name: str, - globals: Mapping[str, Any] | None = ..., - locals: Mapping[str, Any] | None = ..., + globals: Mapping[str, object] | None = ..., + locals: Mapping[str, object] | None = ..., fromlist: Sequence[str] = ..., level: int = ..., -) -> types.ModuleType: ... -def import_module(name: str, package: str | None = ...) -> types.ModuleType: ... +) -> ModuleType: ... + +# `importlib.import_module` return type should be kept the same as `builtins.__import__` +def import_module(name: str, package: str | None = ...) -> ModuleType: ... def find_loader(name: str, path: str | None = ...) -> Loader | None: ... def invalidate_caches() -> None: ... -def reload(module: types.ModuleType) -> types.ModuleType: ... +def reload(module: ModuleType) -> ModuleType: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/abc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/abc.pyi index 69608c57cc4d..1ff8158582eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/abc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/abc.pyi @@ -1,10 +1,19 @@ import sys import types -from _typeshed import StrOrBytesPath +from _typeshed import ( + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + StrOrBytesPath, + StrPath, +) from abc import ABCMeta, abstractmethod from importlib.machinery import ModuleSpec -from typing import IO, Any, Iterator, Mapping, Protocol, Sequence, Tuple, Union -from typing_extensions import Literal, runtime_checkable +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper +from typing import IO, Any, BinaryIO, Iterator, Mapping, NoReturn, Protocol, Sequence, Union, overload, runtime_checkable +from typing_extensions import Literal _Path = Union[bytes, str] @@ -46,7 +55,7 @@ class MetaPathFinder(Finder): class PathEntryFinder(Finder): def find_module(self, fullname: str) -> Loader | None: ... - def find_loader(self, fullname: str) -> Tuple[Loader | None, Sequence[_Path]]: ... + def find_loader(self, fullname: str) -> tuple[Loader | None, Sequence[_Path]]: ... def invalidate_caches(self) -> None: ... # Not defined on the actual class, but expected to exist. def find_spec(self, fullname: str, target: types.ModuleType | None = ...) -> ModuleSpec | None: ... @@ -76,30 +85,100 @@ if sys.version_info >= (3, 7): def open_resource(self, resource: StrOrBytesPath) -> IO[bytes]: ... @abstractmethod def resource_path(self, resource: StrOrBytesPath) -> str: ... - @abstractmethod - def is_resource(self, name: str) -> bool: ... + if sys.version_info >= (3, 10): + @abstractmethod + def is_resource(self, path: str) -> bool: ... + else: + @abstractmethod + def is_resource(self, name: str) -> bool: ... + @abstractmethod def contents(self) -> Iterator[str]: ... if sys.version_info >= (3, 9): @runtime_checkable class Traversable(Protocol): + @abstractmethod + def is_dir(self) -> bool: ... + @abstractmethod + def is_file(self) -> bool: ... @abstractmethod def iterdir(self) -> Iterator[Traversable]: ... @abstractmethod - def read_bytes(self) -> bytes: ... + def joinpath(self, child: StrPath) -> Traversable: ... + # The .open method comes from pathlib.pyi and should be kept in sync. + @overload @abstractmethod - def read_text(self, encoding: str | None = ...) -> str: ... + def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> TextIOWrapper: ... + # Unbuffered binary mode: returns a FileIO + @overload @abstractmethod - def is_dir(self) -> bool: ... + def open( + self, mode: OpenBinaryMode, buffering: Literal[0], encoding: None = ..., errors: None = ..., newline: None = ... + ) -> FileIO: ... + # Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter + @overload @abstractmethod - def is_file(self) -> bool: ... + def open( + self, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + ) -> BufferedRandom: ... + @overload @abstractmethod - def joinpath(self, child: _Path) -> Traversable: ... + def open( + self, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + ) -> BufferedWriter: ... + @overload @abstractmethod - def __truediv__(self, child: _Path) -> Traversable: ... + def open( + self, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + ) -> BufferedReader: ... + # Buffering cannot be determined: fall back to BinaryIO + @overload @abstractmethod - def open(self, mode: Literal["r", "rb"] = ..., *args: Any, **kwargs: Any) -> IO[Any]: ... - @property + def open( + self, mode: OpenBinaryMode, buffering: int, encoding: None = ..., errors: None = ..., newline: None = ... + ) -> BinaryIO: ... + # Fallback if mode is not specified + @overload @abstractmethod + def open( + self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + ) -> IO[Any]: ... + @property def name(self) -> str: ... + @abstractmethod + def __truediv__(self, key: StrPath) -> Traversable: ... + @abstractmethod + def read_bytes(self) -> bytes: ... + @abstractmethod + def read_text(self, encoding: str | None = ...) -> str: ... + + class TraversableResources(ResourceReader): + @abstractmethod + def files(self) -> Traversable: ... + def open_resource(self, resource: StrPath) -> BufferedReader: ... # type: ignore[override] + def resource_path(self, resource: Any) -> NoReturn: ... + def is_resource(self, path: StrPath) -> bool: ... + def contents(self) -> Iterator[str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/machinery.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/machinery.pyi index a2e89bb2d8de..6a531c8cb414 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/machinery.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/machinery.pyi @@ -1,8 +1,11 @@ import importlib.abc +import sys import types -from typing import Any, Callable, Sequence, Tuple +from typing import Any, Callable, Iterable, Sequence + +if sys.version_info >= (3, 8): + from importlib.metadata import DistributionFinder, PathDistribution -# TODO: the loaders seem a bit backwards, attribute is protocol but __init__ arg isn't? class ModuleSpec: def __init__( self, @@ -14,13 +17,14 @@ class ModuleSpec: is_package: bool | None = ..., ) -> None: ... name: str - loader: importlib.abc._LoaderProtocol | None + loader: importlib.abc.Loader | None origin: str | None submodule_search_locations: list[str] | None loader_state: Any cached: str | None parent: str | None has_location: bool + def __eq__(self, other: object) -> bool: ... class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @@ -42,10 +46,16 @@ class BuiltinImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader) # Loader @staticmethod def module_repr(module: types.ModuleType) -> str: ... - @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... - @classmethod - def exec_module(cls, module: types.ModuleType) -> None: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + @staticmethod + def exec_module(module: types.ModuleType) -> None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + @classmethod + def exec_module(cls, module: types.ModuleType) -> None: ... class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # MetaPathFinder @@ -67,8 +77,13 @@ class FrozenImporter(importlib.abc.MetaPathFinder, importlib.abc.InspectLoader): # Loader @staticmethod def module_repr(m: types.ModuleType) -> str: ... - @classmethod - def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + if sys.version_info >= (3, 10): + @staticmethod + def create_module(spec: ModuleSpec) -> types.ModuleType | None: ... + else: + @classmethod + def create_module(cls, spec: ModuleSpec) -> types.ModuleType | None: ... + @staticmethod def exec_module(module: types.ModuleType) -> None: ... @@ -81,8 +96,19 @@ class WindowsRegistryFinder(importlib.abc.MetaPathFinder): ) -> ModuleSpec | None: ... class PathFinder: - @classmethod - def invalidate_caches(cls) -> None: ... + if sys.version_info >= (3, 10): + @staticmethod + def invalidate_caches() -> None: ... + else: + @classmethod + def invalidate_caches(cls) -> None: ... + if sys.version_info >= (3, 10): + @staticmethod + def find_distributions(context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + elif sys.version_info >= (3, 8): + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + @classmethod def find_spec( cls, fullname: str, path: Sequence[bytes | str] | None = ..., target: types.ModuleType | None = ... @@ -100,18 +126,23 @@ def all_suffixes() -> list[str]: ... class FileFinder(importlib.abc.PathEntryFinder): path: str - def __init__(self, path: str, *loader_details: Tuple[importlib.abc.Loader, list[str]]) -> None: ... + def __init__(self, path: str, *loader_details: tuple[type[importlib.abc.Loader], list[str]]) -> None: ... @classmethod def path_hook( - cls, *loader_details: Tuple[importlib.abc.Loader, list[str]] + cls, *loader_details: tuple[type[importlib.abc.Loader], list[str]] ) -> Callable[[str], importlib.abc.PathEntryFinder]: ... -class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): +class SourceFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): # type: ignore # argument disparities def set_data(self, path: importlib.abc._Path, data: bytes, *, _mode: int = ...) -> None: ... -class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... +class SourcelessFileLoader(importlib.abc.FileLoader, importlib.abc.SourceLoader): ... # type: ignore # argument disparities class ExtensionFileLoader(importlib.abc.ExecutionLoader): def __init__(self, name: str, path: importlib.abc._Path) -> None: ... def get_filename(self, name: str | None = ...) -> importlib.abc._Path: ... def get_source(self, fullname: str) -> None: ... + def create_module(self, spec: ModuleSpec) -> types.ModuleType: ... + def exec_module(self, module: types.ModuleType) -> None: ... + def is_package(self, fullname: str) -> bool: ... + def get_code(self, fullname: str) -> None: ... + def __eq__(self, other: object) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata.pyi deleted file mode 100644 index 2c1041b76503..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata.pyi +++ /dev/null @@ -1,90 +0,0 @@ -import abc -import pathlib -import sys -from _typeshed import StrPath -from collections.abc import Mapping -from email.message import Message -from importlib.abc import MetaPathFinder -from os import PathLike -from pathlib import Path -from typing import Any, Iterable, NamedTuple, Tuple, overload - -if sys.version_info >= (3, 10): - def packages_distributions() -> Mapping[str, list[str]]: ... - -if sys.version_info >= (3, 8): - class PackageNotFoundError(ModuleNotFoundError): ... - class _EntryPointBase(NamedTuple): - name: str - value: str - group: str - class EntryPoint(_EntryPointBase): - def load(self) -> Any: ... # Callable[[], Any] or an importable module - @property - def extras(self) -> list[str]: ... - class PackagePath(pathlib.PurePosixPath): - def read_text(self, encoding: str = ...) -> str: ... - def read_binary(self) -> bytes: ... - def locate(self) -> PathLike[str]: ... - # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: - hash: FileHash | None - size: int | None - dist: Distribution - class FileHash: - mode: str - value: str - def __init__(self, spec: str) -> None: ... - class Distribution: - @abc.abstractmethod - def read_text(self, filename: str) -> str | None: ... - @abc.abstractmethod - def locate_file(self, path: StrPath) -> PathLike[str]: ... - @classmethod - def from_name(cls, name: str) -> Distribution: ... - @overload - @classmethod - def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... - @overload - @classmethod - def discover( - cls, *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any - ) -> Iterable[Distribution]: ... - @staticmethod - def at(path: StrPath) -> PathDistribution: ... - @property - def metadata(self) -> Message: ... - @property - def version(self) -> str: ... - @property - def entry_points(self) -> list[EntryPoint]: ... - @property - def files(self) -> list[PackagePath] | None: ... - @property - def requires(self) -> list[str] | None: ... - class DistributionFinder(MetaPathFinder): - class Context: - name: str | None - def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... - @property - def path(self) -> list[str]: ... - @abc.abstractmethod - def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... - class MetadataPathFinder(DistributionFinder): - @classmethod - def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... - class PathDistribution(Distribution): - def __init__(self, path: Path) -> None: ... - def read_text(self, filename: StrPath) -> str: ... - def locate_file(self, path: StrPath) -> PathLike[str]: ... - def distribution(distribution_name: str) -> Distribution: ... - @overload - def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... - @overload - def distributions( - *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any - ) -> Iterable[Distribution]: ... - def metadata(distribution_name: str) -> Message: ... - def version(distribution_name: str) -> str: ... - def entry_points() -> dict[str, Tuple[EntryPoint, ...]]: ... - def files(distribution_name: str) -> list[PackagePath] | None: ... - def requires(distribution_name: str) -> list[str] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi new file mode 100644 index 000000000000..5b3878d1c5bb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/__init__.pyi @@ -0,0 +1,213 @@ +import abc +import pathlib +import sys +from _typeshed import Self, StrPath +from collections.abc import Mapping +from email.message import Message +from importlib.abc import MetaPathFinder +from os import PathLike +from pathlib import Path +from typing import Any, ClassVar, Iterable, NamedTuple, Pattern, overload + +if sys.version_info >= (3, 10): + __all__ = [ + "Distribution", + "DistributionFinder", + "PackageMetadata", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "packages_distributions", + "requires", + "version", + ] +else: + __all__ = [ + "Distribution", + "DistributionFinder", + "PackageNotFoundError", + "distribution", + "distributions", + "entry_points", + "files", + "metadata", + "requires", + "version", + ] + +if sys.version_info >= (3, 10): + from importlib.metadata._meta import PackageMetadata as PackageMetadata + def packages_distributions() -> Mapping[str, list[str]]: ... + +class PackageNotFoundError(ModuleNotFoundError): ... + +class _EntryPointBase(NamedTuple): + name: str + value: str + group: str + +class EntryPoint(_EntryPointBase): + pattern: ClassVar[Pattern[str]] + def load(self) -> Any: ... # Callable[[], Any] or an importable module + @property + def extras(self) -> list[str]: ... + if sys.version_info >= (3, 9): + @property + def module(self) -> str: ... + @property + def attr(self) -> str: ... + if sys.version_info >= (3, 10): + dist: ClassVar[Distribution | None] + def matches( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> bool: ... # undocumented + +if sys.version_info >= (3, 10): + class EntryPoints(list[EntryPoint]): # use as list is deprecated since 3.10 + # int argument is deprecated since 3.10 + def __getitem__(self, item: int | str) -> EntryPoint: ... # type: ignore[override] + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + @property + def names(self) -> set[str]: ... + @property + def groups(self) -> set[str]: ... + + class SelectableGroups(dict[str, EntryPoints]): # use as dict is deprecated since 3.10 + @classmethod + def load(cls: type[Self], eps: Iterable[EntryPoint]) -> Self: ... + @property + def groups(self) -> set[str]: ... + @property + def names(self) -> set[str]: ... + @overload + def select(self: Self) -> Self: ... # type: ignore[misc] + @overload + def select( + self, + *, + name: str = ..., + value: str = ..., + group: str = ..., + module: str = ..., + attr: str = ..., + extras: list[str] = ..., + ) -> EntryPoints: ... + +class PackagePath(pathlib.PurePosixPath): + def read_text(self, encoding: str = ...) -> str: ... + def read_binary(self) -> bytes: ... + def locate(self) -> PathLike[str]: ... + # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: + hash: FileHash | None + size: int | None + dist: Distribution + +class FileHash: + mode: str + value: str + def __init__(self, spec: str) -> None: ... + +class Distribution: + @abc.abstractmethod + def read_text(self, filename: str) -> str | None: ... + @abc.abstractmethod + def locate_file(self, path: StrPath) -> PathLike[str]: ... + @classmethod + def from_name(cls, name: str) -> Distribution: ... + @overload + @classmethod + def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... + @overload + @classmethod + def discover( + cls, *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any + ) -> Iterable[Distribution]: ... + @staticmethod + def at(path: StrPath) -> PathDistribution: ... + + if sys.version_info >= (3, 10): + @property + def metadata(self) -> PackageMetadata: ... + @property + def entry_points(self) -> EntryPoints: ... + else: + @property + def metadata(self) -> Message: ... + @property + def entry_points(self) -> list[EntryPoint]: ... + + @property + def version(self) -> str: ... + @property + def files(self) -> list[PackagePath] | None: ... + @property + def requires(self) -> list[str] | None: ... + if sys.version_info >= (3, 10): + @property + def name(self) -> str: ... + +class DistributionFinder(MetaPathFinder): + class Context: + name: str | None + def __init__(self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any) -> None: ... + @property + def path(self) -> list[str]: ... + + @abc.abstractmethod + def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... + +class MetadataPathFinder(DistributionFinder): + @classmethod + def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... + if sys.version_info >= (3, 10): + # Yes, this is an instance method that has argumend named "cls" + def invalidate_caches(cls) -> None: ... # type: ignore + +class PathDistribution(Distribution): + def __init__(self, path: Path) -> None: ... + def read_text(self, filename: StrPath) -> str: ... + def locate_file(self, path: StrPath) -> PathLike[str]: ... + +def distribution(distribution_name: str) -> Distribution: ... +@overload +def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... +@overload +def distributions( + *, context: None = ..., name: str | None = ..., path: list[str] = ..., **kwargs: Any +) -> Iterable[Distribution]: ... + +if sys.version_info >= (3, 10): + def metadata(distribution_name: str) -> PackageMetadata: ... + @overload + def entry_points() -> SelectableGroups: ... # type: ignore[misc] + @overload + def entry_points( + *, name: str = ..., value: str = ..., group: str = ..., module: str = ..., attr: str = ..., extras: list[str] = ... + ) -> EntryPoints: ... + +else: + def metadata(distribution_name: str) -> Message: ... + def entry_points() -> dict[str, list[EntryPoint]]: ... + +def version(distribution_name: str) -> str: ... +def files(distribution_name: str) -> list[PackagePath] | None: ... +def requires(distribution_name: str) -> list[str] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi new file mode 100644 index 000000000000..a5e5733396d7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/metadata/_meta.pyi @@ -0,0 +1,18 @@ +from typing import Any, Iterator, Protocol, TypeVar + +_T = TypeVar("_T") + +class PackageMetadata(Protocol): + def __len__(self) -> int: ... + def __contains__(self, item: str) -> bool: ... + def __getitem__(self, key: str) -> str: ... + def __iter__(self) -> Iterator[str]: ... + def get_all(self, name: str, failobj: _T = ...) -> list[Any] | _T: ... + @property + def json(self) -> dict[str, str | list[str]]: ... + +class SimplePath(Protocol): + def joinpath(self) -> SimplePath: ... + def __div__(self) -> SimplePath: ... + def parent(self) -> SimplePath: ... + def read_text(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources.pyi index 075761abf9da..cee3259332ff 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/resources.pyi @@ -1,25 +1,57 @@ +import os import sys -from typing import Any +from contextlib import AbstractContextManager +from pathlib import Path +from types import ModuleType +from typing import Any, BinaryIO, Iterator, TextIO, Union -# This is a >=3.7 module, so we conditionally include its source. -if sys.version_info >= (3, 7): - import os - from pathlib import Path - from types import ModuleType - from typing import BinaryIO, ContextManager, Iterator, TextIO, Union +if sys.version_info >= (3, 10): + __all__ = [ + "Package", + "Resource", + "ResourceReader", + "as_file", + "contents", + "files", + "is_resource", + "open_binary", + "open_text", + "path", + "read_binary", + "read_text", + ] +elif sys.version_info >= (3, 9): + __all__ = [ + "Package", + "Resource", + "as_file", + "contents", + "files", + "is_resource", + "open_binary", + "open_text", + "path", + "read_binary", + "read_text", + ] +else: + __all__ = ["Package", "Resource", "contents", "is_resource", "open_binary", "open_text", "path", "read_binary", "read_text"] - Package = Union[str, ModuleType] - Resource = Union[str, os.PathLike[Any]] - def open_binary(package: Package, resource: Resource) -> BinaryIO: ... - def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... - def read_binary(package: Package, resource: Resource) -> bytes: ... - def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... - def path(package: Package, resource: Resource) -> ContextManager[Path]: ... - def is_resource(package: Package, name: str) -> bool: ... - def contents(package: Package) -> Iterator[str]: ... +Package = Union[str, ModuleType] +Resource = Union[str, os.PathLike[Any]] + +def open_binary(package: Package, resource: Resource) -> BinaryIO: ... +def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... +def read_binary(package: Package, resource: Resource) -> bytes: ... +def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... +def path(package: Package, resource: Resource) -> AbstractContextManager[Path]: ... +def is_resource(package: Package, name: str) -> bool: ... +def contents(package: Package) -> Iterator[str]: ... if sys.version_info >= (3, 9): - from contextlib import AbstractContextManager from importlib.abc import Traversable def files(package: Package) -> Traversable: ... def as_file(path: Traversable) -> AbstractContextManager[Path]: ... + +if sys.version_info >= (3, 10): + from importlib.abc import ResourceReader as ResourceReader diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi index 30b8765fad04..c759d7def5be 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/importlib/util.pyi @@ -1,5 +1,6 @@ import importlib.abc import importlib.machinery +import sys import types from _typeshed import StrOrBytesPath from typing import Any, Callable @@ -7,9 +8,9 @@ from typing_extensions import ParamSpec _P = ParamSpec("_P") -def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... # type: ignore -def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... # type: ignore -def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... # type: ignore +def module_for_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +def set_loader(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... +def set_package(fxn: Callable[_P, types.ModuleType]) -> Callable[_P, types.ModuleType]: ... def resolve_name(name: str, package: str | None) -> str: ... MAGIC_NUMBER: bytes @@ -36,3 +37,6 @@ class LazyLoader(importlib.abc.Loader): def factory(cls, loader: importlib.abc.Loader) -> Callable[..., LazyLoader]: ... def create_module(self, spec: importlib.machinery.ModuleSpec) -> types.ModuleType | None: ... def exec_module(self, module: types.ModuleType) -> None: ... + +if sys.version_info >= (3, 7): + def source_hash(source_bytes: bytes) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/inspect.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/inspect.pyi index 005222646133..62fce2fca2b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/inspect.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/inspect.pyi @@ -3,21 +3,138 @@ import sys import types from _typeshed import Self from collections import OrderedDict -from collections.abc import Awaitable, Callable, Generator, Mapping, Sequence, Set +from collections.abc import Awaitable, Callable, Generator, Mapping, Sequence, Set as AbstractSet from types import ( AsyncGeneratorType, BuiltinFunctionType, + BuiltinMethodType, CodeType, CoroutineType, FrameType, FunctionType, GeneratorType, + GetSetDescriptorType, + LambdaType, MethodType, ModuleType, TracebackType, ) -from typing import Any, ClassVar, NamedTuple, Tuple, Type, Union -from typing_extensions import Literal, TypeGuard + +if sys.version_info >= (3, 7): + from types import ( + ClassMethodDescriptorType, + WrapperDescriptorType, + MemberDescriptorType, + MethodDescriptorType, + MethodWrapperType, + ) + +from typing import Any, ClassVar, Coroutine, NamedTuple, Protocol, TypeVar, Union +from typing_extensions import Literal, ParamSpec, TypeGuard + +if sys.version_info >= (3, 11): + __all__ = [ + "ArgInfo", + "Arguments", + "Attribute", + "BlockFinder", + "BoundArguments", + "CORO_CLOSED", + "CORO_CREATED", + "CORO_RUNNING", + "CORO_SUSPENDED", + "CO_ASYNC_GENERATOR", + "CO_COROUTINE", + "CO_GENERATOR", + "CO_ITERABLE_COROUTINE", + "CO_NESTED", + "CO_NEWLOCALS", + "CO_NOFREE", + "CO_OPTIMIZED", + "CO_VARARGS", + "CO_VARKEYWORDS", + "ClassFoundException", + "ClosureVars", + "EndOfBlock", + "FrameInfo", + "FullArgSpec", + "GEN_CLOSED", + "GEN_CREATED", + "GEN_RUNNING", + "GEN_SUSPENDED", + "Parameter", + "Signature", + "TPFLAGS_IS_ABSTRACT", + "Traceback", + "classify_class_attrs", + "cleandoc", + "currentframe", + "findsource", + "formatannotation", + "formatannotationrelativeto", + "formatargvalues", + "get_annotations", + "getabsfile", + "getargs", + "getargvalues", + "getattr_static", + "getblock", + "getcallargs", + "getclasstree", + "getclosurevars", + "getcomments", + "getcoroutinelocals", + "getcoroutinestate", + "getdoc", + "getfile", + "getframeinfo", + "getfullargspec", + "getgeneratorlocals", + "getgeneratorstate", + "getinnerframes", + "getlineno", + "getmembers", + "getmembers_static", + "getmodule", + "getmodulename", + "getmro", + "getouterframes", + "getsource", + "getsourcefile", + "getsourcelines", + "indentsize", + "isabstract", + "isasyncgen", + "isasyncgenfunction", + "isawaitable", + "isbuiltin", + "isclass", + "iscode", + "iscoroutine", + "iscoroutinefunction", + "isdatadescriptor", + "isframe", + "isfunction", + "isgenerator", + "isgeneratorfunction", + "isgetsetdescriptor", + "ismemberdescriptor", + "ismethod", + "ismethoddescriptor", + "ismethodwrapper", + "ismodule", + "isroutine", + "istraceback", + "signature", + "stack", + "trace", + "unwrap", + "walktree", + ] + +_P = ParamSpec("_P") +_T_cont = TypeVar("_T_cont", contravariant=True) +_V_cont = TypeVar("_V_cont", contravariant=True) # # Types and members @@ -32,24 +149,33 @@ class BlockFinder: indecorator: bool decoratorhasargs: bool last: int - def tokeneater(self, type: int, token: str, srowcol: Tuple[int, int], erowcol: Tuple[int, int], line: str) -> None: ... - -CO_OPTIMIZED: int -CO_NEWLOCALS: int -CO_VARARGS: int -CO_VARKEYWORDS: int -CO_NESTED: int -CO_GENERATOR: int -CO_NOFREE: int -CO_COROUTINE: int -CO_ITERABLE_COROUTINE: int -CO_ASYNC_GENERATOR: int -TPFLAGS_IS_ABSTRACT: int - -def getmembers(object: object, predicate: Callable[[Any], bool] | None = ...) -> list[Tuple[str, Any]]: ... + def tokeneater(self, type: int, token: str, srowcol: tuple[int, int], erowcol: tuple[int, int], line: str) -> None: ... + +CO_OPTIMIZED: Literal[1] +CO_NEWLOCALS: Literal[2] +CO_VARARGS: Literal[4] +CO_VARKEYWORDS: Literal[8] +CO_NESTED: Literal[16] +CO_GENERATOR: Literal[32] +CO_NOFREE: Literal[64] +CO_COROUTINE: Literal[128] +CO_ITERABLE_COROUTINE: Literal[256] +CO_ASYNC_GENERATOR: Literal[512] +TPFLAGS_IS_ABSTRACT: Literal[1048576] + +modulesbyfile: dict[str, Any] + +_GetMembersPredicate = Callable[[Any], bool] +_GetMembersReturn = list[tuple[str, Any]] + +def getmembers(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... + +if sys.version_info >= (3, 11): + def getmembers_static(object: object, predicate: _GetMembersPredicate | None = ...) -> _GetMembersReturn: ... + def getmodulename(path: str) -> str | None: ... def ismodule(object: object) -> TypeGuard[ModuleType]: ... -def isclass(object: object) -> TypeGuard[Type[Any]]: ... +def isclass(object: object) -> TypeGuard[type[Any]]: ... def ismethod(object: object) -> TypeGuard[MethodType]: ... def isfunction(object: object) -> TypeGuard[FunctionType]: ... @@ -62,7 +188,7 @@ else: def iscoroutinefunction(object: object) -> bool: ... def isgenerator(object: object) -> TypeGuard[GeneratorType[Any, Any, Any]]: ... -def iscoroutine(object: object) -> TypeGuard[CoroutineType]: ... +def iscoroutine(object: object) -> TypeGuard[CoroutineType[Any, Any, Any]]: ... def isawaitable(object: object) -> TypeGuard[Awaitable[Any]]: ... if sys.version_info >= (3, 8): @@ -71,24 +197,54 @@ if sys.version_info >= (3, 8): else: def isasyncgenfunction(object: object) -> bool: ... +class _SupportsSet(Protocol[_T_cont, _V_cont]): + def __set__(self, __instance: _T_cont, __value: _V_cont) -> None: ... + +class _SupportsDelete(Protocol[_T_cont]): + def __delete__(self, __instance: _T_cont) -> None: ... + def isasyncgen(object: object) -> TypeGuard[AsyncGeneratorType[Any, Any]]: ... def istraceback(object: object) -> TypeGuard[TracebackType]: ... def isframe(object: object) -> TypeGuard[FrameType]: ... def iscode(object: object) -> TypeGuard[CodeType]: ... def isbuiltin(object: object) -> TypeGuard[BuiltinFunctionType]: ... -def isroutine(object: object) -> bool: ... + +if sys.version_info >= (3, 11): + def ismethodwrapper(object: object) -> TypeGuard[MethodWrapperType]: ... + +if sys.version_info >= (3, 7): + def isroutine( + object: object, + ) -> TypeGuard[ + FunctionType + | LambdaType + | MethodType + | BuiltinFunctionType + | BuiltinMethodType + | WrapperDescriptorType + | MethodDescriptorType + | ClassMethodDescriptorType + ]: ... + def ismethoddescriptor(object: object) -> TypeGuard[MethodDescriptorType]: ... + def ismemberdescriptor(object: object) -> TypeGuard[MemberDescriptorType]: ... + +else: + def isroutine( + object: object, + ) -> TypeGuard[FunctionType | LambdaType | MethodType | BuiltinFunctionType | BuiltinMethodType]: ... + def ismethoddescriptor(object: object) -> bool: ... + def ismemberdescriptor(object: object) -> bool: ... + def isabstract(object: object) -> bool: ... -def ismethoddescriptor(object: object) -> bool: ... -def isdatadescriptor(object: object) -> bool: ... -def isgetsetdescriptor(object: object) -> bool: ... -def ismemberdescriptor(object: object) -> bool: ... +def isgetsetdescriptor(object: object) -> TypeGuard[GetSetDescriptorType]: ... +def isdatadescriptor(object: object) -> TypeGuard[_SupportsSet[Any, Any] | _SupportsDelete[Any]]: ... # # Retrieving source code # -_SourceObjectType = Union[ModuleType, Type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any]] +_SourceObjectType = Union[ModuleType, type[Any], MethodType, FunctionType, TracebackType, FrameType, CodeType, Callable[..., Any]] -def findsource(object: _SourceObjectType) -> Tuple[list[str], int]: ... +def findsource(object: _SourceObjectType) -> tuple[list[str], int]: ... def getabsfile(object: _SourceObjectType, _filename: str | None = ...) -> str: ... def getblock(lines: Sequence[str]) -> Sequence[str]: ... def getdoc(object: object) -> str | None: ... @@ -96,7 +252,7 @@ def getcomments(object: object) -> str | None: ... def getfile(object: _SourceObjectType) -> str: ... def getmodule(object: object, _filename: str | None = ...) -> ModuleType | None: ... def getsourcefile(object: _SourceObjectType) -> str | None: ... -def getsourcelines(object: _SourceObjectType) -> Tuple[list[str], int]: ... +def getsourcelines(object: _SourceObjectType) -> tuple[list[str], int]: ... def getsource(object: _SourceObjectType) -> str: ... def cleandoc(doc: str) -> str: ... def indentsize(line: str) -> int: ... @@ -124,35 +280,36 @@ class Signature: def __init__( self, parameters: Sequence[Parameter] | None = ..., *, return_annotation: Any = ..., __validate_parameters__: bool = ... ) -> None: ... - empty: _empty + empty = _empty @property def parameters(self) -> types.MappingProxyType[str, Parameter]: ... - # TODO: can we be more specific here? @property def return_annotation(self) -> Any: ... def bind(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def bind_partial(self, *args: Any, **kwargs: Any) -> BoundArguments: ... def replace( - self: Self, *, parameters: Sequence[Parameter] | Type[_void] | None = ..., return_annotation: Any = ... + self: Self, *, parameters: Sequence[Parameter] | type[_void] | None = ..., return_annotation: Any = ... ) -> Self: ... if sys.version_info >= (3, 10): @classmethod def from_callable( - cls, + cls: type[Self], obj: Callable[..., Any], *, follow_wrapped: bool = ..., globals: Mapping[str, Any] | None = ..., locals: Mapping[str, Any] | None = ..., eval_str: bool = ..., - ) -> Signature: ... + ) -> Self: ... else: @classmethod - def from_callable(cls, obj: Callable[..., Any], *, follow_wrapped: bool = ...) -> Signature: ... + def from_callable(cls: type[Self], obj: Callable[..., Any], *, follow_wrapped: bool = ...) -> Self: ... + + def __eq__(self, other: object) -> bool: ... if sys.version_info >= (3, 10): def get_annotations( - obj: Callable[..., Any] | Type[Any] | ModuleType, + obj: Callable[..., Any] | type[Any] | ModuleType, *, globals: Mapping[str, Any] | None = ..., locals: Mapping[str, Any] | None = ..., @@ -168,37 +325,44 @@ class _ParameterKind(enum.IntEnum): VAR_KEYWORD: int if sys.version_info >= (3, 8): - description: str + @property + def description(self) -> str: ... class Parameter: def __init__(self, name: str, kind: _ParameterKind, *, default: Any = ..., annotation: Any = ...) -> None: ... - empty: _empty - name: str - default: Any - annotation: Any + empty = _empty - kind: _ParameterKind POSITIONAL_ONLY: ClassVar[Literal[_ParameterKind.POSITIONAL_ONLY]] POSITIONAL_OR_KEYWORD: ClassVar[Literal[_ParameterKind.POSITIONAL_OR_KEYWORD]] VAR_POSITIONAL: ClassVar[Literal[_ParameterKind.VAR_POSITIONAL]] KEYWORD_ONLY: ClassVar[Literal[_ParameterKind.KEYWORD_ONLY]] VAR_KEYWORD: ClassVar[Literal[_ParameterKind.VAR_KEYWORD]] + @property + def name(self) -> str: ... + @property + def default(self) -> Any: ... + @property + def kind(self) -> _ParameterKind: ... + @property + def annotation(self) -> Any: ... def replace( self: Self, *, - name: str | Type[_void] = ..., - kind: _ParameterKind | Type[_void] = ..., + name: str | type[_void] = ..., + kind: _ParameterKind | type[_void] = ..., default: Any = ..., annotation: Any = ..., ) -> Self: ... + def __eq__(self, other: object) -> bool: ... class BoundArguments: arguments: OrderedDict[str, Any] - args: Tuple[Any, ...] + args: tuple[Any, ...] kwargs: dict[str, Any] signature: Signature def __init__(self, signature: Signature, arguments: OrderedDict[str, Any]) -> None: ... def apply_defaults(self) -> None: ... + def __eq__(self, other: object) -> bool: ... # # Classes and functions @@ -208,13 +372,7 @@ class BoundArguments: # seem to be supporting this at the moment: # _ClassTreeItem = list[_ClassTreeItem] | Tuple[type, Tuple[type, ...]] def getclasstree(classes: list[type], unique: bool = ...) -> list[Any]: ... -def walktree(classes: list[type], children: dict[Type[Any], list[type]], parent: Type[Any] | None) -> list[Any]: ... - -class ArgSpec(NamedTuple): - args: list[str] - varargs: str | None - keywords: str | None - defaults: Tuple[Any, ...] +def walktree(classes: list[type], children: dict[type[Any], list[type]], parent: type[Any] | None) -> list[Any]: ... class Arguments(NamedTuple): args: list[str] @@ -222,13 +380,20 @@ class Arguments(NamedTuple): varkw: str | None def getargs(co: CodeType) -> Arguments: ... -def getargspec(func: object) -> ArgSpec: ... + +if sys.version_info < (3, 11): + class ArgSpec(NamedTuple): + args: list[str] + varargs: str | None + keywords: str | None + defaults: tuple[Any, ...] + def getargspec(func: object) -> ArgSpec: ... class FullArgSpec(NamedTuple): args: list[str] varargs: str | None varkw: str | None - defaults: Tuple[Any, ...] | None + defaults: tuple[Any, ...] | None kwonlyargs: list[str] kwonlydefaults: dict[str, Any] | None annotations: dict[str, Any] @@ -244,21 +409,24 @@ class ArgInfo(NamedTuple): def getargvalues(frame: FrameType) -> ArgInfo: ... def formatannotation(annotation: object, base_module: str | None = ...) -> str: ... def formatannotationrelativeto(object: object) -> Callable[[object], str]: ... -def formatargspec( - args: list[str], - varargs: str | None = ..., - varkw: str | None = ..., - defaults: Tuple[Any, ...] | None = ..., - kwonlyargs: Sequence[str] | None = ..., - kwonlydefaults: dict[str, Any] | None = ..., - annotations: dict[str, Any] = ..., - formatarg: Callable[[str], str] = ..., - formatvarargs: Callable[[str], str] = ..., - formatvarkw: Callable[[str], str] = ..., - formatvalue: Callable[[Any], str] = ..., - formatreturns: Callable[[Any], str] = ..., - formatannotation: Callable[[Any], str] = ..., -) -> str: ... + +if sys.version_info < (3, 11): + def formatargspec( + args: list[str], + varargs: str | None = ..., + varkw: str | None = ..., + defaults: tuple[Any, ...] | None = ..., + kwonlyargs: Sequence[str] | None = ..., + kwonlydefaults: dict[str, Any] | None = ..., + annotations: dict[str, Any] = ..., + formatarg: Callable[[str], str] = ..., + formatvarargs: Callable[[str], str] = ..., + formatvarkw: Callable[[str], str] = ..., + formatvalue: Callable[[Any], str] = ..., + formatreturns: Callable[[Any], str] = ..., + formatannotation: Callable[[Any], str] = ..., + ) -> str: ... + def formatargvalues( args: list[str], varargs: str | None, @@ -269,14 +437,14 @@ def formatargvalues( formatvarkw: Callable[[str], str] | None = ..., formatvalue: Callable[[Any], str] | None = ..., ) -> str: ... -def getmro(cls: type) -> Tuple[type, ...]: ... -def getcallargs(__func: Callable[..., Any], *args: Any, **kwds: Any) -> dict[str, Any]: ... +def getmro(cls: type) -> tuple[type, ...]: ... +def getcallargs(__func: Callable[_P, Any], *args: _P.args, **kwds: _P.kwargs) -> dict[str, Any]: ... class ClosureVars(NamedTuple): nonlocals: Mapping[str, Any] globals: Mapping[str, Any] builtins: Mapping[str, Any] - unbound: Set[str] + unbound: AbstractSet[str] def getclosurevars(func: Callable[..., Any]) -> ClosureVars: ... def unwrap(func: Callable[..., Any], *, stop: Callable[[Any], Any] | None = ...) -> Any: ... @@ -290,7 +458,7 @@ class Traceback(NamedTuple): lineno: int function: str code_context: list[str] | None - index: int | None # type: ignore + index: int | None # type: ignore[assignment] class FrameInfo(NamedTuple): frame: FrameType @@ -298,7 +466,7 @@ class FrameInfo(NamedTuple): lineno: int function: str code_context: list[str] | None - index: int | None # type: ignore + index: int | None # type: ignore[assignment] def getframeinfo(frame: FrameType | TracebackType, context: int = ...) -> Traceback: ... def getouterframes(frame: Any, context: int = ...) -> list[FrameInfo]: ... @@ -318,26 +486,25 @@ def getattr_static(obj: object, attr: str, default: Any | None = ...) -> Any: .. # Current State of Generators and Coroutines # -# TODO In the next two blocks of code, can we be more specific regarding the -# type of the "enums"? +GEN_CREATED: Literal["GEN_CREATED"] +GEN_RUNNING: Literal["GEN_RUNNING"] +GEN_SUSPENDED: Literal["GEN_SUSPENDED"] +GEN_CLOSED: Literal["GEN_CLOSED"] -GEN_CREATED: str -GEN_RUNNING: str -GEN_SUSPENDED: str -GEN_CLOSED: str +def getgeneratorstate( + generator: Generator[Any, Any, Any] +) -> Literal["GEN_CREATED", "GEN_RUNNING", "GEN_SUSPENDED", "GEN_CLOSED"]: ... -def getgeneratorstate(generator: Generator[Any, Any, Any]) -> str: ... +CORO_CREATED: Literal["CORO_CREATED"] +CORO_RUNNING: Literal["CORO_RUNNING"] +CORO_SUSPENDED: Literal["CORO_SUSPENDED"] +CORO_CLOSED: Literal["CORO_CLOSED"] -CORO_CREATED: str -CORO_RUNNING: str -CORO_SUSPENDED: str -CORO_CLOSED: str -# TODO can we be more specific than "object"? -def getcoroutinestate(coroutine: object) -> str: ... +def getcoroutinestate( + coroutine: Coroutine[Any, Any, Any] +) -> Literal["CORO_CREATED", "CORO_RUNNING", "CORO_SUSPENDED", "CORO_CLOSED"]: ... def getgeneratorlocals(generator: Generator[Any, Any, Any]) -> dict[str, Any]: ... - -# TODO can we be more specific than "object"? -def getcoroutinelocals(coroutine: object) -> dict[str, Any]: ... +def getcoroutinelocals(coroutine: Coroutine[Any, Any, Any]) -> dict[str, Any]: ... # Create private type alias to avoid conflict with symbol of same # name created in Attribute class. diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/io.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/io.pyi index 6342907004d5..c32691edb686 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/io.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/io.pyi @@ -4,13 +4,58 @@ import sys from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer from os import _Opener from types import TracebackType -from typing import IO, Any, BinaryIO, Callable, Iterable, Iterator, TextIO, Tuple, Type +from typing import IO, Any, BinaryIO, Callable, Iterable, Iterator, TextIO +from typing_extensions import Literal -DEFAULT_BUFFER_SIZE: int - -SEEK_SET: int -SEEK_CUR: int -SEEK_END: int +if sys.version_info >= (3, 8): + __all__ = [ + "BlockingIOError", + "open", + "open_code", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", + ] +else: + __all__ = [ + "BlockingIOError", + "open", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", + ] + +DEFAULT_BUFFER_SIZE: Literal[8192] + +SEEK_SET: Literal[0] +SEEK_CUR: Literal[1] +SEEK_END: Literal[2] open = builtins.open @@ -26,7 +71,7 @@ class IOBase: def __next__(self) -> bytes: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def close(self) -> None: ... def fileno(self) -> int: ... @@ -63,9 +108,9 @@ class BufferedIOBase(IOBase): def read(self, __size: int | None = ...) -> bytes: ... def read1(self, __size: int = ...) -> bytes: ... -class FileIO(RawIOBase, BinaryIO): +class FileIO(RawIOBase, BinaryIO): # type: ignore # argument disparities between the base classes mode: str - name: StrOrBytesPath | int # type: ignore + name: StrOrBytesPath | int # type: ignore[assignment] def __init__( self, file: StrOrBytesPath | int, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... ) -> None: ... @@ -75,7 +120,7 @@ class FileIO(RawIOBase, BinaryIO): def read(self, __size: int = ...) -> bytes: ... def __enter__(self: Self) -> Self: ... -class BytesIO(BufferedIOBase, BinaryIO): +class BytesIO(BufferedIOBase, BinaryIO): # type: ignore # argument disparities between the base classes def __init__(self, initial_bytes: bytes = ...) -> None: ... # BytesIO does not contain a "name" field. This workaround is necessary # to allow BytesIO sub-classes to add this field, as it is defined @@ -87,18 +132,18 @@ class BytesIO(BufferedIOBase, BinaryIO): if sys.version_info >= (3, 7): def read1(self, __size: int | None = ...) -> bytes: ... else: - def read1(self, __size: int | None) -> bytes: ... # type: ignore + def read1(self, __size: int | None) -> bytes: ... # type: ignore[override] -class BufferedReader(BufferedIOBase, BinaryIO): +class BufferedReader(BufferedIOBase, BinaryIO): # type: ignore # argument disparities between base classes def __enter__(self: Self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def peek(self, __size: int = ...) -> bytes: ... if sys.version_info >= (3, 7): def read1(self, __size: int = ...) -> bytes: ... else: - def read1(self, __size: int) -> bytes: ... # type: ignore + def read1(self, __size: int) -> bytes: ... # type: ignore[override] -class BufferedWriter(BufferedIOBase, BinaryIO): +class BufferedWriter(BufferedIOBase, BinaryIO): # type: ignore # argument disparities between base classes def __enter__(self: Self) -> Self: ... def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... def write(self, __buffer: ReadableBuffer) -> int: ... @@ -110,7 +155,7 @@ class BufferedRandom(BufferedReader, BufferedWriter): if sys.version_info >= (3, 7): def read1(self, __size: int = ...) -> bytes: ... else: - def read1(self, __size: int) -> bytes: ... # type: ignore + def read1(self, __size: int) -> bytes: ... # type: ignore[override] class BufferedRWPair(BufferedIOBase): def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... @@ -119,18 +164,18 @@ class BufferedRWPair(BufferedIOBase): class TextIOBase(IOBase): encoding: str errors: str | None - newlines: str | Tuple[str, ...] | None - def __iter__(self) -> Iterator[str]: ... # type: ignore - def __next__(self) -> str: ... # type: ignore + newlines: str | tuple[str, ...] | None + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] def detach(self) -> BinaryIO: ... def write(self, __s: str) -> int: ... - def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore - def readline(self, __size: int = ...) -> str: ... # type: ignore - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = ...) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] def read(self, __size: int | None = ...) -> str: ... def tell(self) -> int: ... -class TextIOWrapper(TextIOBase, TextIO): +class TextIOWrapper(TextIOBase, TextIO): # type: ignore # argument disparities between base classes def __init__( self, buffer: IO[bytes], @@ -160,11 +205,11 @@ class TextIOWrapper(TextIOBase, TextIO): ) -> None: ... # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. def __enter__(self: Self) -> Self: ... - def __iter__(self) -> Iterator[str]: ... # type: ignore - def __next__(self) -> str: ... # type: ignore - def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore - def readline(self, __size: int = ...) -> str: ... # type: ignore - def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = ...) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] def seek(self, __cookie: int, __whence: int = ...) -> int: ... class StringIO(TextIOWrapper): @@ -179,4 +224,5 @@ class IncrementalNewlineDecoder(codecs.IncrementalDecoder): def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... def decode(self, input: bytes | str, final: bool = ...) -> str: ... @property - def newlines(self) -> str | Tuple[str, ...] | None: ... + def newlines(self) -> str | tuple[str, ...] | None: ... + def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi index 710ad27dd466..d777cef74597 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ipaddress.pyi @@ -1,25 +1,23 @@ import sys -from typing import Any, Container, Generic, Iterable, Iterator, SupportsInt, Tuple, TypeVar, overload +from _typeshed import Self +from typing import Any, Container, Generic, Iterable, Iterator, SupportsInt, TypeVar, overload +from typing_extensions import Literal # Undocumented length constants -IPV4LENGTH: int -IPV6LENGTH: int +IPV4LENGTH: Literal[32] +IPV6LENGTH: Literal[128] _A = TypeVar("_A", IPv4Address, IPv6Address) _N = TypeVar("_N", IPv4Network, IPv6Network) -_T = TypeVar("_T") -def ip_address(address: object) -> Any: ... # morally IPv4Address | IPv6Address -def ip_network(address: object, strict: bool = ...) -> Any: ... # morally IPv4Network | IPv6Network -def ip_interface(address: object) -> Any: ... # morally IPv4Interface | IPv6Interface +_RawIPAddress = int | str | bytes | IPv4Address | IPv6Address +_RawNetworkPart = IPv4Network | IPv6Network | IPv4Interface | IPv6Interface + +def ip_address(address: _RawIPAddress) -> IPv4Address | IPv6Address: ... +def ip_network(address: _RawIPAddress | _RawNetworkPart, strict: bool = ...) -> IPv4Network | IPv6Network: ... +def ip_interface(address: _RawIPAddress | _RawNetworkPart) -> IPv4Interface | IPv6Interface: ... class _IPAddressBase: - def __eq__(self, other: Any) -> bool: ... - def __ge__(self: _T, other: _T) -> bool: ... - def __gt__(self: _T, other: _T) -> bool: ... - def __le__(self: _T, other: _T) -> bool: ... - def __lt__(self: _T, other: _T) -> bool: ... - def __ne__(self, other: Any) -> bool: ... @property def compressed(self) -> str: ... @property @@ -31,10 +29,22 @@ class _IPAddressBase: class _BaseAddress(_IPAddressBase, SupportsInt): def __init__(self, address: object) -> None: ... - def __add__(self: _T, other: int) -> _T: ... + def __add__(self: Self, other: int) -> Self: ... def __hash__(self) -> int: ... def __int__(self) -> int: ... - def __sub__(self: _T, other: int) -> _T: ... + def __sub__(self: Self, other: int) -> Self: ... + def __format__(self, fmt: str) -> str: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self: Self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self: Self, other: Self) -> bool: ... + def __gt__(self: Self, other: Self) -> bool: ... + def __le__(self: Self, other: Self) -> bool: ... + else: + def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + @property def is_global(self) -> bool: ... @property @@ -61,10 +71,21 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def __contains__(self, other: Any) -> bool: ... def __getitem__(self, n: int) -> _A: ... def __iter__(self) -> Iterator[_A]: ... - def address_exclude(self: _T, other: _T) -> Iterator[_T]: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self: Self, other: Self) -> bool: ... + if sys.version_info >= (3, 11): + def __ge__(self: Self, other: Self) -> bool: ... + def __gt__(self: Self, other: Self) -> bool: ... + def __le__(self: Self, other: Self) -> bool: ... + else: + def __ge__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __gt__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + def __le__(self: Self, other: Self, NotImplemented: Any = ...) -> bool: ... + + def address_exclude(self: Self, other: Self) -> Iterator[Self]: ... @property def broadcast_address(self) -> _A: ... - def compare_networks(self: _T, other: _T) -> int: ... + def compare_networks(self: Self, other: Self) -> int: ... def hosts(self) -> Iterator[_A]: ... @property def is_global(self) -> bool: ... @@ -84,14 +105,15 @@ class _BaseNetwork(_IPAddressBase, Container[_A], Iterable[_A], Generic[_A]): def max_prefixlen(self) -> int: ... @property def num_addresses(self) -> int: ... - def overlaps(self, other: _BaseNetwork[_A]) -> bool: ... + def overlaps(self, other: _BaseNetwork[IPv4Address] | _BaseNetwork[IPv6Address]) -> bool: ... @property def prefixlen(self) -> int: ... if sys.version_info >= (3, 7): - def subnet_of(self: _T, other: _T) -> bool: ... - def supernet_of(self: _T, other: _T) -> bool: ... - def subnets(self: _T, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Iterator[_T]: ... - def supernet(self: _T, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> _T: ... + def subnet_of(self: Self, other: Self) -> bool: ... + def supernet_of(self: Self, other: Self) -> bool: ... + + def subnets(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Iterator[Self]: ... + def supernet(self: Self, prefixlen_diff: int = ..., new_prefix: int | None = ...) -> Self: ... @property def with_hostmask(self) -> str: ... @property @@ -114,11 +136,23 @@ class _BaseInterface(_BaseAddress, Generic[_A, _N]): @property def with_prefixlen(self) -> str: ... -class IPv4Address(_BaseAddress): ... -class IPv4Network(_BaseNetwork[IPv4Address]): ... +class _BaseV4: + @property + def version(self) -> Literal[4]: ... + @property + def max_prefixlen(self) -> Literal[32]: ... + +class IPv4Address(_BaseV4, _BaseAddress): ... +class IPv4Network(_BaseV4, _BaseNetwork[IPv4Address]): ... class IPv4Interface(IPv4Address, _BaseInterface[IPv4Address, IPv4Network]): ... -class IPv6Address(_BaseAddress): +class _BaseV6: + @property + def version(self) -> Literal[6]: ... + @property + def max_prefixlen(self) -> Literal[128]: ... + +class IPv6Address(_BaseV6, _BaseAddress): @property def ipv4_mapped(self) -> IPv4Address | None: ... @property @@ -126,9 +160,12 @@ class IPv6Address(_BaseAddress): @property def sixtofour(self) -> IPv4Address | None: ... @property - def teredo(self) -> Tuple[IPv4Address, IPv4Address] | None: ... + def teredo(self) -> tuple[IPv4Address, IPv4Address] | None: ... + if sys.version_info >= (3, 9): + @property + def scope_id(self) -> str | None: ... -class IPv6Network(_BaseNetwork[IPv6Address]): +class IPv6Network(_BaseV6, _BaseNetwork[IPv6Address]): @property def is_site_local(self) -> bool: ... @@ -136,17 +173,23 @@ class IPv6Interface(IPv6Address, _BaseInterface[IPv6Address, IPv6Network]): ... def v4_int_to_packed(address: int) -> bytes: ... def v6_int_to_packed(address: int) -> bytes: ... + +# Third overload is technically incorrect, but convenient when first and last are return values of ip_address() @overload def summarize_address_range(first: IPv4Address, last: IPv4Address) -> Iterator[IPv4Network]: ... @overload def summarize_address_range(first: IPv6Address, last: IPv6Address) -> Iterator[IPv6Network]: ... +@overload +def summarize_address_range( + first: IPv4Address | IPv6Address, last: IPv4Address | IPv6Address +) -> Iterator[IPv4Network] | Iterator[IPv6Network]: ... def collapse_addresses(addresses: Iterable[_N]) -> Iterator[_N]: ... @overload -def get_mixed_type_key(obj: _A) -> Tuple[int, _A]: ... +def get_mixed_type_key(obj: _A) -> tuple[int, _A]: ... @overload -def get_mixed_type_key(obj: IPv4Network) -> Tuple[int, IPv4Address, IPv4Address]: ... +def get_mixed_type_key(obj: IPv4Network) -> tuple[int, IPv4Address, IPv4Address]: ... @overload -def get_mixed_type_key(obj: IPv6Network) -> Tuple[int, IPv6Address, IPv6Address]: ... +def get_mixed_type_key(obj: IPv6Network) -> tuple[int, IPv6Address, IPv6Address]: ... class AddressValueError(ValueError): ... class NetmaskValueError(ValueError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/itertools.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/itertools.pyi index 4ffa181bfd7d..35765712202a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/itertools.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/itertools.pyi @@ -1,5 +1,5 @@ import sys -from _typeshed import _T_co +from _typeshed import Self, _T_co from typing import ( Any, Callable, @@ -9,14 +9,15 @@ from typing import ( SupportsComplex, SupportsFloat, SupportsInt, - Tuple, - Type, TypeVar, Union, overload, ) from typing_extensions import Literal, SupportsIndex +if sys.version_info >= (3, 9): + from types import GenericAlias + _T = TypeVar("_T") _S = TypeVar("_S") _N = TypeVar("_N", int, float, SupportsFloat, SupportsInt, SupportsIndex, SupportsComplex) @@ -34,12 +35,12 @@ class count(Iterator[_N], Generic[_N]): @overload def __new__(cls, *, step: _N) -> count[_N]: ... def __next__(self) -> _N: ... - def __iter__(self) -> Iterator[_N]: ... + def __iter__(self: Self) -> Self: ... class cycle(Iterator[_T], Generic[_T]): def __init__(self, __iterable: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... class repeat(Iterator[_T], Generic[_T]): @overload @@ -47,7 +48,8 @@ class repeat(Iterator[_T], Generic[_T]): @overload def __init__(self, object: _T, times: int) -> None: ... def __next__(self) -> _T: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... + def __length_hint__(self) -> int: ... class accumulate(Iterator[_T], Generic[_T]): if sys.version_info >= (3, 8): @@ -57,66 +59,69 @@ class accumulate(Iterator[_T], Generic[_T]): def __init__(self, iterable: Iterable[_S], func: Callable[[_T, _S], _T], *, initial: _T | None = ...) -> None: ... else: def __init__(self, iterable: Iterable[_T], func: Callable[[_T, _T], _T] | None = ...) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... class chain(Iterator[_T], Generic[_T]): def __init__(self, *iterables: Iterable[_T]) -> None: ... def __next__(self) -> _T: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... @classmethod - # We use Type and not Type[_S] to not lose the type inference from __iterable - def from_iterable(cls: Type[Any], __iterable: Iterable[Iterable[_S]]) -> Iterator[_S]: ... + # We use type[Any] and not type[_S] to not lose the type inference from __iterable + def from_iterable(cls: type[Any], __iterable: Iterable[Iterable[_S]]) -> chain[_S]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... class compress(Iterator[_T], Generic[_T]): def __init__(self, data: Iterable[_T], selectors: Iterable[Any]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... class dropwhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... class filterfalse(Iterator[_T], Generic[_T]): def __init__(self, __predicate: Predicate[_T] | None, __iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") -class groupby(Iterator[Tuple[_T, Iterator[_S]]], Generic[_T, _S]): +class groupby(Iterator[tuple[_T, Iterator[_S]]], Generic[_T, _S]): @overload def __new__(cls, iterable: Iterable[_T1], key: None = ...) -> groupby[_T1, _T1]: ... @overload def __new__(cls, iterable: Iterable[_T1], key: Callable[[_T1], _T2]) -> groupby[_T2, _T1]: ... - def __iter__(self) -> Iterator[Tuple[_T, Iterator[_S]]]: ... - def __next__(self) -> Tuple[_T, Iterator[_S]]: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[_T, Iterator[_S]]: ... class islice(Iterator[_T], Generic[_T]): @overload def __init__(self, __iterable: Iterable[_T], __stop: int | None) -> None: ... @overload def __init__(self, __iterable: Iterable[_T], __start: int | None, __stop: int | None, __step: int | None = ...) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... class starmap(Iterator[_T], Generic[_T]): def __init__(self, __function: Callable[..., _T], __iterable: Iterable[Iterable[Any]]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... class takewhile(Iterator[_T], Generic[_T]): def __init__(self, __predicate: Predicate[_T], __iterable: Iterable[_T]) -> None: ... - def __iter__(self) -> Iterator[_T]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T: ... -def tee(__iterable: Iterable[_T], __n: int = ...) -> Tuple[Iterator[_T], ...]: ... +def tee(__iterable: Iterable[_T], __n: int = ...) -> tuple[Iterator[_T], ...]: ... class zip_longest(Iterator[Any]): def __init__(self, *p: Iterable[Any], fillvalue: Any = ...) -> None: ... - def __iter__(self) -> Iterator[Any]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> Any: ... _T3 = TypeVar("_T3") @@ -126,15 +131,15 @@ _T6 = TypeVar("_T6") class product(Iterator[_T_co], Generic[_T_co]): @overload - def __new__(cls, __iter1: Iterable[_T1]) -> product[Tuple[_T1]]: ... + def __new__(cls, __iter1: Iterable[_T1]) -> product[tuple[_T1]]: ... @overload - def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> product[Tuple[_T1, _T2]]: ... + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> product[tuple[_T1, _T2]]: ... @overload - def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> product[Tuple[_T1, _T2, _T3]]: ... + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> product[tuple[_T1, _T2, _T3]]: ... @overload def __new__( cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] - ) -> product[Tuple[_T1, _T2, _T3, _T4]]: ... + ) -> product[tuple[_T1, _T2, _T3, _T4]]: ... @overload def __new__( cls, @@ -143,7 +148,7 @@ class product(Iterator[_T_co], Generic[_T_co]): __iter3: Iterable[_T3], __iter4: Iterable[_T4], __iter5: Iterable[_T5], - ) -> product[Tuple[_T1, _T2, _T3, _T4, _T5]]: ... + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5]]: ... @overload def __new__( cls, @@ -153,7 +158,7 @@ class product(Iterator[_T_co], Generic[_T_co]): __iter4: Iterable[_T4], __iter5: Iterable[_T5], __iter6: Iterable[_T6], - ) -> product[Tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... + ) -> product[tuple[_T1, _T2, _T3, _T4, _T5, _T6]]: ... @overload def __new__( cls, @@ -165,40 +170,40 @@ class product(Iterator[_T_co], Generic[_T_co]): __iter6: Iterable[Any], __iter7: Iterable[Any], *iterables: Iterable[Any], - ) -> product[Tuple[Any, ...]]: ... + ) -> product[tuple[Any, ...]]: ... @overload - def __new__(cls, *iterables: Iterable[_T1], repeat: int) -> product[Tuple[_T1, ...]]: ... + def __new__(cls, *iterables: Iterable[_T1], repeat: int) -> product[tuple[_T1, ...]]: ... @overload - def __new__(cls, *iterables: Iterable[Any], repeat: int = ...) -> product[Tuple[Any, ...]]: ... - def __iter__(self) -> Iterator[_T_co]: ... + def __new__(cls, *iterables: Iterable[Any], repeat: int = ...) -> product[tuple[Any, ...]]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... -class permutations(Iterator[Tuple[_T, ...]], Generic[_T]): +class permutations(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int | None = ...) -> None: ... - def __iter__(self) -> Iterator[Tuple[_T, ...]]: ... - def __next__(self) -> Tuple[_T, ...]: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[_T, ...]: ... class combinations(Iterator[_T_co], Generic[_T_co]): @overload - def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[Tuple[_T, _T]]: ... + def __new__(cls, iterable: Iterable[_T], r: Literal[2]) -> combinations[tuple[_T, _T]]: ... @overload - def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations[Tuple[_T, _T, _T]]: ... + def __new__(cls, iterable: Iterable[_T], r: Literal[3]) -> combinations[tuple[_T, _T, _T]]: ... @overload - def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations[Tuple[_T, _T, _T, _T]]: ... + def __new__(cls, iterable: Iterable[_T], r: Literal[4]) -> combinations[tuple[_T, _T, _T, _T]]: ... @overload - def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[Tuple[_T, _T, _T, _T, _T]]: ... + def __new__(cls, iterable: Iterable[_T], r: Literal[5]) -> combinations[tuple[_T, _T, _T, _T, _T]]: ... @overload - def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[Tuple[_T, ...]]: ... - def __iter__(self) -> Iterator[_T_co]: ... + def __new__(cls, iterable: Iterable[_T], r: int) -> combinations[tuple[_T, ...]]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... -class combinations_with_replacement(Iterator[Tuple[_T, ...]], Generic[_T]): +class combinations_with_replacement(Iterator[tuple[_T, ...]], Generic[_T]): def __init__(self, iterable: Iterable[_T], r: int) -> None: ... - def __iter__(self) -> Iterator[Tuple[_T, ...]]: ... - def __next__(self) -> Tuple[_T, ...]: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[_T, ...]: ... if sys.version_info >= (3, 10): class pairwise(Iterator[_T_co], Generic[_T_co]): - def __new__(cls, __iterable: Iterable[_T]) -> pairwise[Tuple[_T, _T]]: ... - def __iter__(self) -> Iterator[_T_co]: ... + def __new__(cls, __iterable: Iterable[_T]) -> pairwise[tuple[_T, _T]]: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> _T_co: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/json/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/json/__init__.pyi index e37e68ca3b99..8e1a36756398 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/json/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/json/__init__.pyi @@ -1,9 +1,11 @@ from _typeshed import SupportsRead -from typing import IO, Any, Callable, Tuple, Type +from typing import IO, Any, Callable from .decoder import JSONDecodeError as JSONDecodeError, JSONDecoder as JSONDecoder from .encoder import JSONEncoder as JSONEncoder +__all__ = ["dump", "dumps", "load", "loads", "JSONDecoder", "JSONDecodeError", "JSONEncoder"] + def dumps( obj: Any, *, @@ -11,9 +13,9 @@ def dumps( ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., - cls: Type[JSONEncoder] | None = ..., + cls: type[JSONEncoder] | None = ..., indent: None | int | str = ..., - separators: Tuple[str, str] | None = ..., + separators: tuple[str, str] | None = ..., default: Callable[[Any], Any] | None = ..., sort_keys: bool = ..., **kwds: Any, @@ -26,9 +28,9 @@ def dump( ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., - cls: Type[JSONEncoder] | None = ..., + cls: type[JSONEncoder] | None = ..., indent: None | int | str = ..., - separators: Tuple[str, str] | None = ..., + separators: tuple[str, str] | None = ..., default: Callable[[Any], Any] | None = ..., sort_keys: bool = ..., **kwds: Any, @@ -36,23 +38,23 @@ def dump( def loads( s: str | bytes, *, - cls: Type[JSONDecoder] | None = ..., + cls: type[JSONDecoder] | None = ..., object_hook: Callable[[dict[Any, Any]], Any] | None = ..., parse_float: Callable[[str], Any] | None = ..., parse_int: Callable[[str], Any] | None = ..., parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[Tuple[Any, Any]]], Any] | None = ..., + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., **kwds: Any, ) -> Any: ... def load( fp: SupportsRead[str | bytes], *, - cls: Type[JSONDecoder] | None = ..., + cls: type[JSONDecoder] | None = ..., object_hook: Callable[[dict[Any, Any]], Any] | None = ..., parse_float: Callable[[str], Any] | None = ..., parse_int: Callable[[str], Any] | None = ..., parse_constant: Callable[[str], Any] | None = ..., - object_pairs_hook: Callable[[list[Tuple[Any, Any]]], Any] | None = ..., + object_pairs_hook: Callable[[list[tuple[Any, Any]]], Any] | None = ..., **kwds: Any, ) -> Any: ... def detect_encoding(b: bytes) -> str: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/json/decoder.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/json/decoder.pyi index 5135eb4e3e45..866836758545 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/json/decoder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/json/decoder.pyi @@ -1,4 +1,6 @@ -from typing import Any, Callable, Tuple +from typing import Any, Callable + +__all__ = ["JSONDecoder", "JSONDecodeError"] class JSONDecodeError(ValueError): msg: str @@ -14,7 +16,7 @@ class JSONDecoder: parse_int: Callable[[str], Any] parse_constant: Callable[[str], Any] strict: bool - object_pairs_hook: Callable[[list[Tuple[str, Any]]], Any] + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] def __init__( self, *, @@ -23,7 +25,7 @@ class JSONDecoder: parse_int: Callable[[str], Any] | None = ..., parse_constant: Callable[[str], Any] | None = ..., strict: bool = ..., - object_pairs_hook: Callable[[list[Tuple[str, Any]]], Any] | None = ..., + object_pairs_hook: Callable[[list[tuple[str, Any]]], Any] | None = ..., ) -> None: ... def decode(self, s: str, _w: Callable[..., Any] = ...) -> Any: ... # _w is undocumented - def raw_decode(self, s: str, idx: int = ...) -> Tuple[Any, int]: ... + def raw_decode(self, s: str, idx: int = ...) -> tuple[Any, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/json/encoder.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/json/encoder.pyi index 36113ed229ca..6dd74896e5a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/json/encoder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/json/encoder.pyi @@ -1,4 +1,10 @@ -from typing import Any, Callable, Iterator, Tuple +from typing import Any, Callable, Iterator, Pattern + +ESCAPE: Pattern[str] +ESCAPE_ASCII: Pattern[str] +HAS_UTF8: Pattern[bytes] +ESCAPE_DCT: dict[str, str] +INFINITY: float def py_encode_basestring(s: str) -> str: ... # undocumented def py_encode_basestring_ascii(s: str) -> str: ... # undocumented @@ -22,7 +28,7 @@ class JSONEncoder: allow_nan: bool = ..., sort_keys: bool = ..., indent: int | None = ..., - separators: Tuple[str, str] | None = ..., + separators: tuple[str, str] | None = ..., default: Callable[..., Any] | None = ..., ) -> None: ... def default(self, o: Any) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/keyword.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/keyword.pyi index ac052feeba58..e9a9877d57da 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/keyword.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/keyword.pyi @@ -1,6 +1,11 @@ import sys from typing import Sequence +if sys.version_info >= (3, 9): + __all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] +else: + __all__ = ["iskeyword", "kwlist"] + def iskeyword(s: str) -> bool: ... kwlist: Sequence[str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi index a8159dccf037..4ecba031942c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/driver.pyi @@ -4,6 +4,8 @@ from lib2to3.pytree import _NL, _Convert from logging import Logger from typing import IO, Any, Iterable +__all__ = ["Driver", "load_grammar"] + class Driver: grammar: Grammar logger: Logger diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi index 48cb4eae916c..3c3b90c52f7c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/grammar.pyi @@ -1,10 +1,9 @@ -from _typeshed import StrPath -from typing import Dict, List, Optional, Tuple, TypeVar +from _typeshed import Self, StrPath +from typing import Optional -_P = TypeVar("_P") -_Label = Tuple[int, Optional[str]] -_DFA = List[List[Tuple[int, int]]] -_DFAS = Tuple[_DFA, Dict[int, int]] +_Label = tuple[int, Optional[str]] +_DFA = list[list[tuple[int, int]]] +_DFAS = tuple[_DFA, dict[int, int]] class Grammar: symbol2number: dict[str, int] @@ -19,7 +18,7 @@ class Grammar: def __init__(self) -> None: ... def dump(self, filename: StrPath) -> None: ... def load(self, filename: StrPath) -> None: ... - def copy(self: _P) -> _P: ... + def copy(self: Self) -> Self: ... def report(self) -> None: ... opmap_raw: str diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi index d6e1ec4b72b8..e776ed1e5a61 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/parse.pyi @@ -1,6 +1,6 @@ from lib2to3.pgen2.grammar import _DFAS, Grammar from lib2to3.pytree import _NL, _Convert, _RawNode -from typing import Any, Sequence, Set, Tuple +from typing import Any, Sequence _Context = Sequence[Any] @@ -14,9 +14,9 @@ class ParseError(Exception): class Parser: grammar: Grammar convert: _Convert - stack: list[Tuple[_DFAS, int, _RawNode]] + stack: list[tuple[_DFAS, int, _RawNode]] rootnode: _NL | None - used_names: Set[str] + used_names: set[str] def __init__(self, grammar: Grammar, convert: _Convert | None = ...) -> None: ... def setup(self, start: int | None = ...) -> None: ... def addtoken(self, type: int, value: str | None, context: _Context) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/pgen.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/pgen.pyi index 87b4a8aad71b..2628e1223fb4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/pgen.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/pgen.pyi @@ -1,7 +1,7 @@ from _typeshed import StrPath from lib2to3.pgen2 import grammar from lib2to3.pgen2.tokenize import _TokenInfo -from typing import IO, Any, Iterable, Iterator, NoReturn, Tuple +from typing import IO, Any, Iterable, Iterator, NoReturn class PgenGrammar(grammar.Grammar): ... @@ -16,21 +16,21 @@ class ParserGenerator: def make_label(self, c: PgenGrammar, label: str) -> int: ... def addfirstsets(self) -> None: ... def calcfirst(self, name: str) -> None: ... - def parse(self) -> Tuple[dict[str, list[DFAState]], str]: ... + def parse(self) -> tuple[dict[str, list[DFAState]], str]: ... def make_dfa(self, start: NFAState, finish: NFAState) -> list[DFAState]: ... def dump_nfa(self, name: str, start: NFAState, finish: NFAState) -> list[DFAState]: ... def dump_dfa(self, name: str, dfa: Iterable[DFAState]) -> None: ... def simplify_dfa(self, dfa: list[DFAState]) -> None: ... - def parse_rhs(self) -> Tuple[NFAState, NFAState]: ... - def parse_alt(self) -> Tuple[NFAState, NFAState]: ... - def parse_item(self) -> Tuple[NFAState, NFAState]: ... - def parse_atom(self) -> Tuple[NFAState, NFAState]: ... + def parse_rhs(self) -> tuple[NFAState, NFAState]: ... + def parse_alt(self) -> tuple[NFAState, NFAState]: ... + def parse_item(self) -> tuple[NFAState, NFAState]: ... + def parse_atom(self) -> tuple[NFAState, NFAState]: ... def expect(self, type: int, value: Any | None = ...) -> str: ... def gettoken(self) -> None: ... def raise_error(self, msg: str, *args: Any) -> NoReturn: ... class NFAState: - arcs: list[Tuple[str | None, NFAState]] + arcs: list[tuple[str | None, NFAState]] def __init__(self) -> None: ... def addarc(self, next: NFAState, label: str | None = ...) -> None: ... @@ -41,6 +41,6 @@ class DFAState: def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: ... def addarc(self, next: DFAState, label: str) -> None: ... def unifystate(self, old: DFAState, new: DFAState) -> None: ... - def __eq__(self, other: Any) -> bool: ... + def __eq__(self, other: DFAState) -> bool: ... # type: ignore[override] def generate_grammar(filename: StrPath = ...) -> PgenGrammar: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi index c4ab376eca64..2f944c40a02c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/token.pyi @@ -1,3 +1,5 @@ +import sys + ENDMARKER: int NAME: int NUMBER: int @@ -57,6 +59,8 @@ ATEQUAL: int AWAIT: int ASYNC: int ERRORTOKEN: int +if sys.version_info >= (3, 7): + COLONEQUAL: int N_TOKENS: int NT_OFFSET: int tok_name: dict[int, str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi index 467fb0de25b9..c1b5a91df9e6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pgen2/tokenize.pyi @@ -1,9 +1,154 @@ -from lib2to3.pgen2.token import * # noqa -from typing import Callable, Iterable, Iterator, Tuple +import sys +from lib2to3.pgen2.token import * +from typing import Callable, Iterable, Iterator -_Coord = Tuple[int, int] +if sys.version_info >= (3, 8): + __all__ = [ + "AMPER", + "AMPEREQUAL", + "ASYNC", + "AT", + "ATEQUAL", + "AWAIT", + "BACKQUOTE", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COLONEQUAL", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "tokenize", + "generate_tokens", + "untokenize", + ] +else: + __all__ = [ + "AMPER", + "AMPEREQUAL", + "ASYNC", + "AT", + "ATEQUAL", + "AWAIT", + "BACKQUOTE", + "CIRCUMFLEX", + "CIRCUMFLEXEQUAL", + "COLON", + "COMMA", + "COMMENT", + "DEDENT", + "DOT", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "DOUBLESTAR", + "DOUBLESTAREQUAL", + "ENDMARKER", + "EQEQUAL", + "EQUAL", + "ERRORTOKEN", + "GREATER", + "GREATEREQUAL", + "INDENT", + "ISEOF", + "ISNONTERMINAL", + "ISTERMINAL", + "LBRACE", + "LEFTSHIFT", + "LEFTSHIFTEQUAL", + "LESS", + "LESSEQUAL", + "LPAR", + "LSQB", + "MINEQUAL", + "MINUS", + "NAME", + "NEWLINE", + "NL", + "NOTEQUAL", + "NT_OFFSET", + "NUMBER", + "N_TOKENS", + "OP", + "PERCENT", + "PERCENTEQUAL", + "PLUS", + "PLUSEQUAL", + "RARROW", + "RBRACE", + "RIGHTSHIFT", + "RIGHTSHIFTEQUAL", + "RPAR", + "RSQB", + "SEMI", + "SLASH", + "SLASHEQUAL", + "STAR", + "STAREQUAL", + "STRING", + "TILDE", + "VBAR", + "VBAREQUAL", + "tok_name", + "tokenize", + "generate_tokens", + "untokenize", + ] + +_Coord = tuple[int, int] _TokenEater = Callable[[int, str, _Coord, _Coord, str], None] -_TokenInfo = Tuple[int, str, _Coord, _Coord, str] +_TokenInfo = tuple[int, str, _Coord, _Coord, str] class TokenError(Exception): ... class StopTokenizing(Exception): ... @@ -17,7 +162,7 @@ class Untokenizer: def __init__(self) -> None: ... def add_whitespace(self, start: _Coord) -> None: ... def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ... - def compat(self, token: Tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... + def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ... def untokenize(iterable: Iterable[_TokenInfo]) -> str: ... def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pytree.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pytree.pyi index f926e6f7f8b3..23a0069db351 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pytree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lib2to3/pytree.pyi @@ -1,11 +1,11 @@ +from _typeshed import Self from lib2to3.pgen2.grammar import Grammar -from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, TypeVar, Union +from typing import Any, Callable, Iterator, Optional, Union -_P = TypeVar("_P") _NL = Union[Node, Leaf] -_Context = Tuple[str, int, int] -_Results = Dict[str, _NL] -_RawNode = Tuple[int, str, _Context, Optional[List[_NL]]] +_Context = tuple[str, int, int] +_Results = dict[str, _NL] +_RawNode = tuple[int, str, _Context, Optional[list[_NL]]] _Convert = Callable[[Grammar, _RawNode], Any] HUGE: int @@ -19,9 +19,9 @@ class Base: children: list[_NL] was_changed: bool was_checked: bool - def __eq__(self, other: Any) -> bool: ... - def _eq(self: _P, other: _P) -> bool: ... - def clone(self: _P) -> _P: ... + def __eq__(self, other: object) -> bool: ... + def _eq(self: Self, other: Self) -> bool: ... + def clone(self: Self) -> Self: ... def post_order(self) -> Iterator[_NL]: ... def pre_order(self) -> Iterator[_NL]: ... def replace(self, new: _NL | list[_NL]) -> None: ... @@ -68,7 +68,7 @@ class BasePattern: def optimize(self) -> BasePattern: ... # sic, subclasses are free to optimize themselves into different patterns def match(self, node: _NL, results: _Results | None = ...) -> bool: ... def match_seq(self, nodes: list[_NL], results: _Results | None = ...) -> bool: ... - def generate_matches(self, nodes: list[_NL]) -> Iterator[Tuple[int, _Results]]: ... + def generate_matches(self, nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... class LeafPattern(BasePattern): def __init__(self, type: int | None = ..., content: str | None = ..., name: str | None = ...) -> None: ... @@ -85,4 +85,4 @@ class WildcardPattern(BasePattern): class NegatedPattern(BasePattern): def __init__(self, content: str | None = ...) -> None: ... -def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[Tuple[int, _Results]]: ... +def generate_matches(patterns: list[BasePattern], nodes: list[_NL]) -> Iterator[tuple[int, _Results]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/linecache.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/linecache.pyi index a66614bf6b37..6b3761f4a0c9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/linecache.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/linecache.pyi @@ -1,7 +1,13 @@ -from typing import Any, Dict, List, Protocol, Tuple +import sys +from typing import Any, Protocol -_ModuleGlobals = Dict[str, Any] -_ModuleMetadata = Tuple[int, float, List[str], str] +if sys.version_info >= (3, 9): + __all__ = ["getline", "clearcache", "checkcache", "lazycache"] +else: + __all__ = ["getline", "clearcache", "checkcache"] + +_ModuleGlobals = dict[str, Any] +_ModuleMetadata = tuple[int, float, list[str], str] class _SourceLoader(Protocol): def __call__(self) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi index da518575ac7c..899bf0000a2c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/locale.pyi @@ -1,16 +1,46 @@ import sys +from _typeshed import StrPath + +__all__ = [ + "getlocale", + "getdefaultlocale", + "getpreferredencoding", + "Error", + "setlocale", + "resetlocale", + "localeconv", + "strcoll", + "strxfrm", + "str", + "atof", + "atoi", + "format", + "format_string", + "currency", + "normalize", + "LC_CTYPE", + "LC_COLLATE", + "LC_MESSAGES", + "LC_TIME", + "LC_MONETARY", + "LC_NUMERIC", + "LC_ALL", + "CHAR_MAX", +] # This module defines a function "str()", which is why "str" can't be used # as a type annotation or type alias. from builtins import str as _str from decimal import Decimal -from typing import Any, Callable, Iterable, Mapping, Sequence, Tuple +from typing import Any, Callable, Iterable, Mapping, Sequence CODESET: int D_T_FMT: int D_FMT: int T_FMT: int T_FMT_AMPM: int +AM_STR: int +PM_STR: int DAY_1: int DAY_2: int @@ -80,13 +110,13 @@ class Error(Exception): ... def setlocale(category: int, locale: _str | Iterable[_str] | None = ...) -> _str: ... def localeconv() -> Mapping[_str, int | _str | list[int]]: ... def nl_langinfo(__key: int) -> _str: ... -def getdefaultlocale(envvars: Tuple[_str, ...] = ...) -> Tuple[_str | None, _str | None]: ... +def getdefaultlocale(envvars: tuple[_str, ...] = ...) -> tuple[_str | None, _str | None]: ... def getlocale(category: int = ...) -> Sequence[_str]: ... def getpreferredencoding(do_setlocale: bool = ...) -> _str: ... def normalize(localename: _str) -> _str: ... def resetlocale(category: int = ...) -> None: ... -def strcoll(string1: _str, string2: _str) -> int: ... -def strxfrm(string: _str) -> _str: ... +def strcoll(__os1: _str, __os2: _str) -> int: ... +def strxfrm(__string: _str) -> _str: ... def format(percent: _str, value: float | Decimal, grouping: bool = ..., monetary: bool = ..., *additional: Any) -> _str: ... if sys.version_info >= (3, 7): @@ -101,6 +131,17 @@ def atof(string: _str, func: Callable[[_str], float] = ...) -> float: ... def atoi(string: _str) -> int: ... def str(val: float) -> _str: ... +# native gettext functions +# https://docs.python.org/3/library/locale.html#access-to-message-catalogs +# https://github.com/python/cpython/blob/f4c03484da59049eb62a9bf7777b963e2267d187/Modules/_localemodule.c#L626 +if sys.platform == "linux" or sys.platform == "darwin": + def gettext(__msg: _str) -> _str: ... + def dgettext(__domain: _str | None, __msg: _str) -> _str: ... + def dcgettext(__domain: _str | None, __msg: _str, __category: int) -> _str: ... + def textdomain(__domain: _str | None) -> _str: ... + def bindtextdomain(__domain: _str, __dir: StrPath | None) -> _str: ... + def bind_textdomain_codeset(__domain: _str, __codeset: _str | None) -> _str | None: ... + locale_alias: dict[_str, _str] # undocumented locale_encoding_alias: dict[_str, _str] # undocumented windows_locale: dict[int, _str] # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/logging/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/logging/__init__.pyi index c972559c9b60..c860938c31ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/logging/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/logging/__init__.pyi @@ -1,17 +1,62 @@ import sys import threading -from _typeshed import StrPath, SupportsWrite +from _typeshed import Self, StrPath, SupportsWrite from collections.abc import Callable, Iterable, Mapping, MutableMapping, Sequence from io import TextIOWrapper from string import Template from time import struct_time from types import FrameType, TracebackType -from typing import Any, ClassVar, Generic, Optional, Pattern, TextIO, Tuple, Type, TypeVar, Union, overload +from typing import Any, ClassVar, Generic, Optional, Pattern, TextIO, TypeVar, Union, overload from typing_extensions import Literal -_SysExcInfoType = Union[Tuple[Type[BaseException], BaseException, Optional[TracebackType]], Tuple[None, None, None]] +__all__ = [ + "BASIC_FORMAT", + "BufferingFormatter", + "CRITICAL", + "DEBUG", + "ERROR", + "FATAL", + "FileHandler", + "Filter", + "Formatter", + "Handler", + "INFO", + "LogRecord", + "Logger", + "LoggerAdapter", + "NOTSET", + "NullHandler", + "StreamHandler", + "WARN", + "WARNING", + "addLevelName", + "basicConfig", + "captureWarnings", + "critical", + "debug", + "disable", + "error", + "exception", + "fatal", + "getLevelName", + "getLogger", + "getLoggerClass", + "info", + "log", + "makeLogRecord", + "setLoggerClass", + "shutdown", + "warn", + "warning", + "getLogRecordFactory", + "setLogRecordFactory", + "lastResort", + "raiseExceptions", +] + +_SysExcInfoType = Union[tuple[type[BaseException], BaseException, Optional[TracebackType]], tuple[None, None, None]] _ExcInfoType = Union[None, bool, _SysExcInfoType, BaseException] -_ArgsType = Union[Tuple[object, ...], Mapping[str, object]] +_ArgsType = Union[tuple[object, ...], Mapping[str, object]] _FilterType = Union[Filter, Callable[[LogRecord], int]] _Level = Union[int, str] _FormatStyle = Literal["%", "{", "$"] @@ -27,23 +72,23 @@ def currentframe() -> FrameType: ... _levelToName: dict[int, str] _nameToLevel: dict[str, int] -class Filterer(object): +class Filterer: filters: list[Filter] def __init__(self) -> None: ... def addFilter(self, filter: _FilterType) -> None: ... def removeFilter(self, filter: _FilterType) -> None: ... def filter(self, record: LogRecord) -> bool: ... -class Manager(object): # undocumented +class Manager: # undocumented root: RootLogger disable: int emittedNoHandlerWarning: bool loggerDict: dict[str, Logger | PlaceHolder] - loggerClass: Type[Logger] | None + loggerClass: type[Logger] | None logRecordFactory: Callable[..., LogRecord] | None def __init__(self, rootnode: RootLogger) -> None: ... def getLogger(self, name: str) -> Logger: ... - def setLoggerClass(self, klass: Type[Logger]) -> None: ... + def setLoggerClass(self, klass: type[Logger]) -> None: ... def setLogRecordFactory(self, factory: Callable[..., LogRecord]) -> None: ... class Logger(Filterer): @@ -59,7 +104,7 @@ class Logger(Filterer): def setLevel(self, level: _Level) -> None: ... def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... - def getChild(self, suffix: str) -> Logger: ... + def getChild(self: Self, suffix: str) -> Self: ... # see python/typing#980 if sys.version_info >= (3, 8): def debug( self, @@ -227,6 +272,7 @@ class Logger(Filterer): def findCaller(self, stack_info: bool = ..., stacklevel: int = ...) -> tuple[str, int, str, str | None]: ... else: def findCaller(self, stack_info: bool = ...) -> tuple[str, int, str, str | None]: ... + def handle(self, record: LogRecord) -> None: ... def makeRecord( self, @@ -285,12 +331,23 @@ class Formatter: else: default_msec_format: str - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 10): + def __init__( + self, + fmt: str | None = ..., + datefmt: str | None = ..., + style: _FormatStyle = ..., + validate: bool = ..., + *, + defaults: Mapping[str, Any] | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): def __init__( self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ..., validate: bool = ... ) -> None: ... else: def __init__(self, fmt: str | None = ..., datefmt: str | None = ..., style: _FormatStyle = ...) -> None: ... + def format(self, record: LogRecord) -> str: ... def formatTime(self, record: LogRecord, datefmt: str | None = ...) -> str: ... def formatException(self, ei: _SysExcInfoType) -> str: ... @@ -326,6 +383,7 @@ class LogRecord: lineno: int module: str msecs: float + # Only created when logging.Formatter.format is called. See #6132. message: str msg: str name: str @@ -357,10 +415,11 @@ class LoggerAdapter(Generic[_L]): manager: Manager # undocumented if sys.version_info >= (3, 10): extra: Mapping[str, object] | None - def __init__(self, logger: _L, extra: Mapping[str, object] | None) -> None: ... + def __init__(self, logger: _L, extra: Mapping[str, object] | None = ...) -> None: ... else: extra: Mapping[str, object] def __init__(self, logger: _L, extra: Mapping[str, object]) -> None: ... + def process(self, msg: Any, kwargs: MutableMapping[str, Any]) -> tuple[Any, MutableMapping[str, Any]]: ... if sys.version_info >= (3, 8): def debug( @@ -518,6 +577,7 @@ class LoggerAdapter(Generic[_L]): extra: Mapping[str, object] | None = ..., **kwargs: object, ) -> None: ... + def isEnabledFor(self, level: int) -> bool: ... def getEffectiveLevel(self) -> int: ... def setLevel(self, level: _Level) -> None: ... @@ -535,7 +595,7 @@ class LoggerAdapter(Generic[_L]): def name(self) -> str: ... # undocumented def getLogger(name: str | None = ...) -> Logger: ... -def getLoggerClass() -> Type[Logger]: ... +def getLoggerClass() -> type[Logger]: ... def getLogRecordFactory() -> Callable[..., LogRecord]: ... if sys.version_info >= (3, 8): @@ -692,7 +752,7 @@ else: ) -> None: ... def shutdown(handlerList: Sequence[Any] = ...) -> None: ... # handlerList is undocumented -def setLoggerClass(klass: Type[Logger]) -> None: ... +def setLoggerClass(klass: type[Logger]) -> None: ... def captureWarnings(capture: bool) -> None: ... def setLogRecordFactory(factory: Callable[..., LogRecord]) -> None: ... @@ -722,6 +782,7 @@ class FileHandler(StreamHandler[TextIOWrapper]): ) -> None: ... else: def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def _open(self) -> TextIOWrapper: ... # undocumented class NullHandler(Handler): ... @@ -738,17 +799,22 @@ class RootLogger(Logger): root: RootLogger -class PercentStyle(object): # undocumented +class PercentStyle: # undocumented default_format: str asctime_format: str asctime_search: str if sys.version_info >= (3, 8): validation_pattern: Pattern[str] _fmt: str - def __init__(self, fmt: str) -> None: ... + if sys.version_info >= (3, 10): + def __init__(self, fmt: str, *, defaults: Mapping[str, Any] | None = ...) -> None: ... + else: + def __init__(self, fmt: str) -> None: ... + def usesTime(self) -> bool: ... if sys.version_info >= (3, 8): def validate(self) -> None: ... + def format(self, record: Any) -> str: ... class StrFormatStyle(PercentStyle): # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/logging/config.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/logging/config.pyi index f84865e35f92..8ee9e7b339b5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/logging/config.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/logging/config.pyi @@ -3,12 +3,14 @@ from _typeshed import StrOrBytesPath, StrPath from collections.abc import Callable from configparser import RawConfigParser from threading import Thread -from typing import IO, Any, Pattern +from typing import IO, Any, Pattern, Sequence + +from . import _Level if sys.version_info >= (3, 8): - from typing import Literal + from typing import Literal, TypedDict else: - from typing_extensions import Literal + from typing_extensions import Literal, TypedDict if sys.version_info >= (3, 7): _Path = StrOrBytesPath @@ -19,7 +21,34 @@ DEFAULT_LOGGING_CONFIG_PORT: int RESET_ERROR: int # undocumented IDENTIFIER: Pattern[str] # undocumented -def dictConfig(config: dict[str, Any]) -> None: ... +class _RootLoggerConfiguration(TypedDict, total=False): + level: _Level + filters: Sequence[str] + handlers: Sequence[str] + +class _LoggerConfiguration(_RootLoggerConfiguration, TypedDict, total=False): + propagate: bool + +class _OptionalDictConfigArgs(TypedDict, total=False): + # these two can have custom factories (key: `()`) which can have extra keys + formatters: dict[str, dict[str, Any]] + filters: dict[str, dict[str, Any]] + # type checkers would warn about extra keys if this was a TypedDict + handlers: dict[str, dict[str, Any]] + loggers: dict[str, _LoggerConfiguration] + root: _RootLoggerConfiguration | None + incremental: bool + disable_existing_loggers: bool + +class _DictConfigArgs(_OptionalDictConfigArgs, TypedDict): + version: Literal[1] + +# Accept dict[str, Any] to avoid false positives if called with a dict +# type, since dict types are not compatible with TypedDicts. +# +# Also accept a TypedDict type, to allow callers to use TypedDict +# types, and for somewhat stricter type checking of dict literals. +def dictConfig(config: _DictConfigArgs | dict[str, Any]) -> None: ... if sys.version_info >= (3, 10): def fileConfig( diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/logging/handlers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/logging/handlers.pyi index 762359bc17bb..06cddd94fc69 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/logging/handlers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/logging/handlers.pyi @@ -29,6 +29,7 @@ class WatchedFileHandler(FileHandler): ) -> None: ... else: def __init__(self, filename: StrPath, mode: str = ..., encoding: str | None = ..., delay: bool = ...) -> None: ... + def _statstream(self) -> None: ... # undocumented def reopenIfNeeded(self) -> None: ... @@ -41,12 +42,13 @@ class BaseRotatingHandler(FileHandler): ) -> None: ... else: def __init__(self, filename: StrPath, mode: str, encoding: str | None = ..., delay: bool = ...) -> None: ... + def rotation_filename(self, default_name: str) -> str: ... def rotate(self, source: str, dest: str) -> None: ... class RotatingFileHandler(BaseRotatingHandler): maxBytes: str # undocumented - backupCount: str # undocumented + backupCount: int # undocumented if sys.version_info >= (3, 9): def __init__( self, @@ -68,12 +70,13 @@ class RotatingFileHandler(BaseRotatingHandler): encoding: str | None = ..., delay: bool = ..., ) -> None: ... + def doRollover(self) -> None: ... def shouldRollover(self, record: LogRecord) -> int: ... # undocumented class TimedRotatingFileHandler(BaseRotatingHandler): when: str # undocumented - backupCount: str # undocumented + backupCount: int # undocumented utc: bool # undocumented atTime: datetime.datetime | None # undocumented interval: int # undocumented @@ -106,6 +109,7 @@ class TimedRotatingFileHandler(BaseRotatingHandler): utc: bool = ..., atTime: datetime.datetime | None = ..., ) -> None: ... + def doRollover(self) -> None: ... def shouldRollover(self, record: LogRecord) -> int: ... # undocumented def computeRollover(self, currentTime: int) -> int: ... # undocumented @@ -128,7 +132,7 @@ class SocketHandler(Handler): def createSocket(self) -> None: ... class DatagramHandler(SocketHandler): - def makeSocket(self) -> socket: ... # type: ignore + def makeSocket(self) -> socket: ... # type: ignore[override] class SysLogHandler(Handler): LOG_EMERG: int @@ -250,11 +254,12 @@ class QueueHandler(Handler): else: queue: Queue[Any] # undocumented def __init__(self, queue: Queue[Any]) -> None: ... + def prepare(self, record: LogRecord) -> Any: ... def enqueue(self, record: LogRecord) -> None: ... class QueueListener: - handlers: tuple[Handler] # undocumented + handlers: tuple[Handler, ...] # undocumented respect_handler_level: bool # undocumented if sys.version_info >= (3, 7): queue: SimpleQueue[Any] | Queue[Any] # undocumented @@ -264,6 +269,7 @@ class QueueListener: else: queue: Queue[Any] # undocumented def __init__(self, queue: Queue[Any], *handlers: Handler, respect_handler_level: bool = ...) -> None: ... + def dequeue(self, block: bool) -> LogRecord: ... def prepare(self, record: LogRecord) -> Any: ... def start(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/lzma.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/lzma.pyi index 7a26d15292d4..c469c218abcb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/lzma.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/lzma.pyi @@ -1,7 +1,46 @@ import io from _typeshed import ReadableBuffer, Self, StrOrBytesPath from typing import IO, Any, Mapping, Sequence, TextIO, Union, overload -from typing_extensions import Literal +from typing_extensions import Literal, final + +__all__ = [ + "CHECK_NONE", + "CHECK_CRC32", + "CHECK_CRC64", + "CHECK_SHA256", + "CHECK_ID_MAX", + "CHECK_UNKNOWN", + "FILTER_LZMA1", + "FILTER_LZMA2", + "FILTER_DELTA", + "FILTER_X86", + "FILTER_IA64", + "FILTER_ARM", + "FILTER_ARMTHUMB", + "FILTER_POWERPC", + "FILTER_SPARC", + "FORMAT_AUTO", + "FORMAT_XZ", + "FORMAT_ALONE", + "FORMAT_RAW", + "MF_HC3", + "MF_HC4", + "MF_BT2", + "MF_BT3", + "MF_BT4", + "MODE_FAST", + "MODE_NORMAL", + "PRESET_DEFAULT", + "PRESET_EXTREME", + "LZMACompressor", + "LZMADecompressor", + "LZMAFile", + "LZMAError", + "open", + "compress", + "decompress", + "is_check_supported", +] _OpenBinaryWritingMode = Literal["w", "wb", "x", "xb", "a", "ab"] _OpenTextWritingMode = Literal["wt", "xt", "at"] @@ -10,37 +49,38 @@ _PathOrFile = Union[StrOrBytesPath, IO[bytes]] _FilterChain = Sequence[Mapping[str, Any]] -FORMAT_AUTO: int -FORMAT_XZ: int -FORMAT_ALONE: int -FORMAT_RAW: int -CHECK_NONE: int -CHECK_CRC32: int -CHECK_CRC64: int -CHECK_SHA256: int -CHECK_ID_MAX: int -CHECK_UNKNOWN: int -FILTER_LZMA1: int -FILTER_LZMA2: int -FILTER_DELTA: int -FILTER_X86: int -FILTER_IA64: int -FILTER_ARM: int -FILTER_ARMTHUMB: int -FILTER_SPARC: int -FILTER_POWERPC: int -MF_HC3: int -MF_HC4: int -MF_BT2: int -MF_BT3: int -MF_BT4: int -MODE_FAST: int -MODE_NORMAL: int -PRESET_DEFAULT: int -PRESET_EXTREME: int +FORMAT_AUTO: Literal[0] +FORMAT_XZ: Literal[1] +FORMAT_ALONE: Literal[2] +FORMAT_RAW: Literal[3] +CHECK_NONE: Literal[0] +CHECK_CRC32: Literal[1] +CHECK_CRC64: Literal[4] +CHECK_SHA256: Literal[10] +CHECK_ID_MAX: Literal[15] +CHECK_UNKNOWN: Literal[16] +FILTER_LZMA1: int # v big number +FILTER_LZMA2: Literal[33] +FILTER_DELTA: Literal[3] +FILTER_X86: Literal[4] +FILTER_IA64: Literal[6] +FILTER_ARM: Literal[7] +FILTER_ARMTHUMB: Literal[8] +FILTER_SPARC: Literal[9] +FILTER_POWERPC: Literal[5] +MF_HC3: Literal[3] +MF_HC4: Literal[4] +MF_BT2: Literal[18] +MF_BT3: Literal[19] +MF_BT4: Literal[20] +MODE_FAST: Literal[1] +MODE_NORMAL: Literal[2] +PRESET_DEFAULT: Literal[6] +PRESET_EXTREME: int # v big number # from _lzma.c -class LZMADecompressor(object): +@final +class LZMADecompressor: def __init__(self, format: int | None = ..., memlimit: int | None = ..., filters: _FilterChain | None = ...) -> None: ... def decompress(self, data: bytes, max_length: int = ...) -> bytes: ... @property @@ -53,7 +93,8 @@ class LZMADecompressor(object): def needs_input(self) -> bool: ... # from _lzma.c -class LZMACompressor(object): +@final +class LZMACompressor: def __init__( self, format: int | None = ..., check: int = ..., preset: int | None = ..., filters: _FilterChain | None = ... ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/macpath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/macpath.pyi index 5657adde9a84..2512e086b735 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/macpath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/macpath.pyi @@ -32,7 +32,7 @@ from posixpath import ( splitext as splitext, supports_unicode_filenames as supports_unicode_filenames, ) -from typing import AnyStr, Tuple, overload +from typing import AnyStr, overload altsep: str | None @@ -64,6 +64,6 @@ def join(s: StrPath, *paths: StrPath) -> str: ... @overload def join(s: BytesPath, *paths: BytesPath) -> bytes: ... @overload -def split(s: PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... +def split(s: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def split(s: AnyStr) -> Tuple[AnyStr, AnyStr]: ... +def split(s: AnyStr) -> tuple[AnyStr, AnyStr]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mailbox.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mailbox.pyi index 544760cc5e8c..3fafd8891840 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mailbox.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mailbox.pyi @@ -1,38 +1,43 @@ import email.message import sys from _typeshed import Self, StrOrBytesPath +from abc import ABCMeta, abstractmethod from types import TracebackType -from typing import ( - IO, - Any, - AnyStr, - Callable, - Generic, - Iterable, - Iterator, - Mapping, - Protocol, - Sequence, - Tuple, - Type, - TypeVar, - Union, - overload, -) +from typing import IO, Any, AnyStr, Callable, Generic, Iterable, Iterator, Mapping, Protocol, Sequence, TypeVar, Union, overload from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = [ + "Mailbox", + "Maildir", + "mbox", + "MH", + "Babyl", + "MMDF", + "Message", + "MaildirMessage", + "mboxMessage", + "MHMessage", + "BabylMessage", + "MMDFMessage", + "Error", + "NoSuchMailboxError", + "NotEmptyError", + "ExternalClashError", + "FormatError", +] + _T = TypeVar("_T") _MessageT = TypeVar("_MessageT", bound=Message) _MessageData = Union[email.message.Message, bytes, str, IO[str], IO[bytes]] class _HasIteritems(Protocol): - def iteritems(self) -> Iterator[Tuple[str, _MessageData]]: ... + def iteritems(self) -> Iterator[tuple[str, _MessageData]]: ... class _HasItems(Protocol): - def items(self) -> Iterator[Tuple[str, _MessageData]]: ... + def items(self) -> Iterator[tuple[str, _MessageData]]: ... linesep: bytes @@ -43,40 +48,53 @@ class Mailbox(Generic[_MessageT]): def __init__( self, path: StrOrBytesPath, factory: Callable[[IO[Any]], _MessageT] | None = ..., create: bool = ... ) -> None: ... + @abstractmethod def add(self, message: _MessageData) -> str: ... + @abstractmethod def remove(self, key: str) -> None: ... def __delitem__(self, key: str) -> None: ... def discard(self, key: str) -> None: ... + @abstractmethod def __setitem__(self, key: str, message: _MessageData) -> None: ... @overload def get(self, key: str, default: None = ...) -> _MessageT | None: ... @overload def get(self, key: str, default: _T) -> _MessageT | _T: ... def __getitem__(self, key: str) -> _MessageT: ... + @abstractmethod def get_message(self, key: str) -> _MessageT: ... def get_string(self, key: str) -> str: ... + @abstractmethod def get_bytes(self, key: str) -> bytes: ... # As '_ProxyFile' doesn't implement the full IO spec, and BytesIO is incompatible with it, get_file return is Any here + @abstractmethod def get_file(self, key: str) -> Any: ... + @abstractmethod def iterkeys(self) -> Iterator[str]: ... def keys(self) -> list[str]: ... def itervalues(self) -> Iterator[_MessageT]: ... def __iter__(self) -> Iterator[_MessageT]: ... def values(self) -> list[_MessageT]: ... - def iteritems(self) -> Iterator[Tuple[str, _MessageT]]: ... - def items(self) -> list[Tuple[str, _MessageT]]: ... + def iteritems(self) -> Iterator[tuple[str, _MessageT]]: ... + def items(self) -> list[tuple[str, _MessageT]]: ... + @abstractmethod def __contains__(self, key: str) -> bool: ... + @abstractmethod def __len__(self) -> int: ... def clear(self) -> None: ... @overload def pop(self, key: str, default: None = ...) -> _MessageT | None: ... @overload def pop(self, key: str, default: _T = ...) -> _MessageT | _T: ... - def popitem(self) -> Tuple[str, _MessageT]: ... - def update(self, arg: _HasIteritems | _HasItems | Iterable[Tuple[str, _MessageData]] | None = ...) -> None: ... + def popitem(self) -> tuple[str, _MessageT]: ... + def update(self, arg: _HasIteritems | _HasItems | Iterable[tuple[str, _MessageData]] | None = ...) -> None: ... + @abstractmethod def flush(self) -> None: ... + @abstractmethod def lock(self) -> None: ... + @abstractmethod def unlock(self) -> None: ... + @abstractmethod def close(self) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -87,7 +105,19 @@ class Maildir(Mailbox[MaildirMessage]): def __init__( self, dirname: StrOrBytesPath, factory: Callable[[IO[Any]], MaildirMessage] | None = ..., create: bool = ... ) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MaildirMessage: ... + def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... def list_folders(self) -> list[str]: ... def get_folder(self, folder: str) -> Maildir: ... def add_folder(self, folder: str) -> Maildir: ... @@ -95,9 +125,20 @@ class Maildir(Mailbox[MaildirMessage]): def clean(self) -> None: ... def next(self) -> str | None: ... -class _singlefileMailbox(Mailbox[_MessageT]): ... +class _singlefileMailbox(Mailbox[_MessageT], metaclass=ABCMeta): + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def flush(self) -> None: ... + def close(self) -> None: ... class _mboxMMDF(_singlefileMailbox[_MessageT]): + def get_message(self, key: str) -> _MessageT: ... def get_file(self, key: str, from_: bool = ...) -> _PartialFile[bytes]: ... def get_bytes(self, key: str, from_: bool = ...) -> bytes: ... def get_string(self, key: str, from_: bool = ...) -> str: ... @@ -116,7 +157,19 @@ class MH(Mailbox[MHMessage]): def __init__( self, path: StrOrBytesPath, factory: Callable[[IO[Any]], MHMessage] | None = ..., create: bool = ... ) -> None: ... + def add(self, message: _MessageData) -> str: ... + def remove(self, key: str) -> None: ... + def __setitem__(self, key: str, message: _MessageData) -> None: ... + def get_message(self, key: str) -> MHMessage: ... + def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> _ProxyFile[bytes]: ... + def iterkeys(self) -> Iterator[str]: ... + def __contains__(self, key: str) -> bool: ... + def __len__(self) -> int: ... + def flush(self) -> None: ... + def lock(self) -> None: ... + def unlock(self) -> None: ... + def close(self) -> None: ... def list_folders(self) -> list[str]: ... def get_folder(self, folder: StrOrBytesPath) -> MH: ... def add_folder(self, folder: StrOrBytesPath) -> MH: ... @@ -129,6 +182,8 @@ class Babyl(_singlefileMailbox[BabylMessage]): def __init__( self, path: StrOrBytesPath, factory: Callable[[IO[Any]], BabylMessage] | None = ..., create: bool = ... ) -> None: ... + def get_message(self, key: str) -> BabylMessage: ... + def get_bytes(self, key: str) -> bytes: ... def get_file(self, key: str) -> IO[bytes]: ... def get_labels(self) -> list[str]: ... @@ -149,7 +204,7 @@ class MaildirMessage(Message): class _mboxMMDFMessage(Message): def get_from(self) -> str: ... - def set_from(self, from_: str, time_: bool | Tuple[int, int, int, int, int, int, int, int, int] | None = ...) -> None: ... + def set_from(self, from_: str, time_: bool | tuple[int, int, int, int, int, int, int, int, int] | None = ...) -> None: ... def get_flags(self) -> str: ... def set_flags(self, flags: Iterable[str]) -> None: ... def add_flag(self, flag: str) -> None: ... @@ -185,7 +240,7 @@ class _ProxyFile(Generic[AnyStr]): def seek(self, offset: int, whence: int = ...) -> None: ... def close(self) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, exc_type: Type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def __exit__(self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def seekable(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mailcap.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mailcap.pyi index 2d4008f48c6c..7bf2ebd1bdfe 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mailcap.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mailcap.pyi @@ -1,8 +1,10 @@ -from typing import Dict, Mapping, Sequence, Tuple, Union +from typing import Mapping, Sequence, Union -_Cap = Dict[str, Union[str, int]] +_Cap = dict[str, Union[str, int]] + +__all__ = ["getcaps", "findmatch"] def findmatch( caps: Mapping[str, list[_Cap]], MIMEtype: str, key: str = ..., filename: str = ..., plist: Sequence[str] = ... -) -> Tuple[str | None, _Cap | None]: ... +) -> tuple[str | None, _Cap | None]: ... def getcaps() -> dict[str, list[_Cap]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/math.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/math.pyi index 1046676bec4a..2218f68174d1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/math.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/math.pyi @@ -1,6 +1,12 @@ import sys from _typeshed import SupportsTrunc -from typing import Iterable, SupportsFloat, SupportsInt, Tuple, overload +from typing import Iterable, SupportsFloat, Union, overload +from typing_extensions import SupportsIndex + +if sys.version_info >= (3, 8): + _SupportsFloatOrIndex = Union[SupportsFloat, SupportsIndex] +else: + _SupportsFloatOrIndex = SupportsFloat e: float pi: float @@ -8,94 +14,114 @@ inf: float nan: float tau: float -def acos(__x: SupportsFloat) -> float: ... -def acosh(__x: SupportsFloat) -> float: ... -def asin(__x: SupportsFloat) -> float: ... -def asinh(__x: SupportsFloat) -> float: ... -def atan(__x: SupportsFloat) -> float: ... -def atan2(__y: SupportsFloat, __x: SupportsFloat) -> float: ... -def atanh(__x: SupportsFloat) -> float: ... -def ceil(__x: SupportsFloat) -> int: ... +def acos(__x: _SupportsFloatOrIndex) -> float: ... +def acosh(__x: _SupportsFloatOrIndex) -> float: ... +def asin(__x: _SupportsFloatOrIndex) -> float: ... +def asinh(__x: _SupportsFloatOrIndex) -> float: ... +def atan(__x: _SupportsFloatOrIndex) -> float: ... +def atan2(__y: _SupportsFloatOrIndex, __x: _SupportsFloatOrIndex) -> float: ... +def atanh(__x: _SupportsFloatOrIndex) -> float: ... + +if sys.version_info >= (3, 11): + def cbrt(__x: _SupportsFloatOrIndex) -> float: ... + +def ceil(__x: _SupportsFloatOrIndex) -> int: ... + +if sys.version_info >= (3, 8): + def comb(__n: SupportsIndex, __k: SupportsIndex) -> int: ... + +def copysign(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... +def cos(__x: _SupportsFloatOrIndex) -> float: ... +def cosh(__x: _SupportsFloatOrIndex) -> float: ... +def degrees(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): - def comb(__n: int, __k: int) -> int: ... + def dist(__p: Iterable[_SupportsFloatOrIndex], __q: Iterable[_SupportsFloatOrIndex]) -> float: ... + +def erf(__x: _SupportsFloatOrIndex) -> float: ... +def erfc(__x: _SupportsFloatOrIndex) -> float: ... +def exp(__x: _SupportsFloatOrIndex) -> float: ... -def copysign(__x: SupportsFloat, __y: SupportsFloat) -> float: ... -def cos(__x: SupportsFloat) -> float: ... -def cosh(__x: SupportsFloat) -> float: ... -def degrees(__x: SupportsFloat) -> float: ... +if sys.version_info >= (3, 11): + def exp2(__x: _SupportsFloatOrIndex) -> float: ... + +def expm1(__x: _SupportsFloatOrIndex) -> float: ... +def fabs(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): - def dist(__p: Iterable[SupportsFloat], __q: Iterable[SupportsFloat]) -> float: ... - -def erf(__x: SupportsFloat) -> float: ... -def erfc(__x: SupportsFloat) -> float: ... -def exp(__x: SupportsFloat) -> float: ... -def expm1(__x: SupportsFloat) -> float: ... -def fabs(__x: SupportsFloat) -> float: ... -def factorial(__x: SupportsInt) -> int: ... -def floor(__x: SupportsFloat) -> int: ... -def fmod(__x: SupportsFloat, __y: SupportsFloat) -> float: ... -def frexp(__x: SupportsFloat) -> Tuple[float, int]: ... -def fsum(__seq: Iterable[float]) -> float: ... -def gamma(__x: SupportsFloat) -> float: ... + def factorial(__x: SupportsIndex) -> int: ... + +else: + def factorial(__x: int) -> int: ... + +def floor(__x: _SupportsFloatOrIndex) -> int: ... +def fmod(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... +def frexp(__x: _SupportsFloatOrIndex) -> tuple[float, int]: ... +def fsum(__seq: Iterable[_SupportsFloatOrIndex]) -> float: ... +def gamma(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 9): - def gcd(*integers: int) -> int: ... + def gcd(*integers: SupportsIndex) -> int: ... else: - def gcd(__x: int, __y: int) -> int: ... + def gcd(__x: SupportsIndex, __y: SupportsIndex) -> int: ... if sys.version_info >= (3, 8): - def hypot(*coordinates: SupportsFloat) -> float: ... + def hypot(*coordinates: _SupportsFloatOrIndex) -> float: ... else: - def hypot(__x: SupportsFloat, __y: SupportsFloat) -> float: ... - -def isclose(a: SupportsFloat, b: SupportsFloat, *, rel_tol: SupportsFloat = ..., abs_tol: SupportsFloat = ...) -> bool: ... -def isinf(__x: SupportsFloat) -> bool: ... -def isfinite(__x: SupportsFloat) -> bool: ... -def isnan(__x: SupportsFloat) -> bool: ... + def hypot(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... + +def isclose( + a: _SupportsFloatOrIndex, + b: _SupportsFloatOrIndex, + *, + rel_tol: _SupportsFloatOrIndex = ..., + abs_tol: _SupportsFloatOrIndex = ..., +) -> bool: ... +def isinf(__x: _SupportsFloatOrIndex) -> bool: ... +def isfinite(__x: _SupportsFloatOrIndex) -> bool: ... +def isnan(__x: _SupportsFloatOrIndex) -> bool: ... if sys.version_info >= (3, 8): - def isqrt(__n: int) -> int: ... + def isqrt(__n: SupportsIndex) -> int: ... if sys.version_info >= (3, 9): - def lcm(*integers: int) -> int: ... + def lcm(*integers: SupportsIndex) -> int: ... -def ldexp(__x: SupportsFloat, __i: int) -> float: ... -def lgamma(__x: SupportsFloat) -> float: ... -def log(x: SupportsFloat, base: SupportsFloat = ...) -> float: ... -def log10(__x: SupportsFloat) -> float: ... -def log1p(__x: SupportsFloat) -> float: ... -def log2(__x: SupportsFloat) -> float: ... -def modf(__x: SupportsFloat) -> Tuple[float, float]: ... +def ldexp(__x: _SupportsFloatOrIndex, __i: int) -> float: ... +def lgamma(__x: _SupportsFloatOrIndex) -> float: ... +def log(x: _SupportsFloatOrIndex, base: _SupportsFloatOrIndex = ...) -> float: ... +def log10(__x: _SupportsFloatOrIndex) -> float: ... +def log1p(__x: _SupportsFloatOrIndex) -> float: ... +def log2(__x: _SupportsFloatOrIndex) -> float: ... +def modf(__x: _SupportsFloatOrIndex) -> tuple[float, float]: ... if sys.version_info >= (3, 9): - def nextafter(__x: SupportsFloat, __y: SupportsFloat) -> float: ... + def nextafter(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): - def perm(__n: int, __k: int | None = ...) -> int: ... + def perm(__n: SupportsIndex, __k: SupportsIndex | None = ...) -> int: ... -def pow(__x: SupportsFloat, __y: SupportsFloat) -> float: ... +def pow(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 8): @overload - def prod(__iterable: Iterable[int], *, start: int = ...) -> int: ... # type: ignore + def prod(__iterable: Iterable[SupportsIndex], *, start: SupportsIndex = ...) -> int: ... # type: ignore[misc] @overload - def prod(__iterable: Iterable[SupportsFloat], *, start: SupportsFloat = ...) -> float: ... + def prod(__iterable: Iterable[_SupportsFloatOrIndex], *, start: _SupportsFloatOrIndex = ...) -> float: ... -def radians(__x: SupportsFloat) -> float: ... +def radians(__x: _SupportsFloatOrIndex) -> float: ... if sys.version_info >= (3, 7): - def remainder(__x: SupportsFloat, __y: SupportsFloat) -> float: ... + def remainder(__x: _SupportsFloatOrIndex, __y: _SupportsFloatOrIndex) -> float: ... -def sin(__x: SupportsFloat) -> float: ... -def sinh(__x: SupportsFloat) -> float: ... -def sqrt(__x: SupportsFloat) -> float: ... -def tan(__x: SupportsFloat) -> float: ... -def tanh(__x: SupportsFloat) -> float: ... +def sin(__x: _SupportsFloatOrIndex) -> float: ... +def sinh(__x: _SupportsFloatOrIndex) -> float: ... +def sqrt(__x: _SupportsFloatOrIndex) -> float: ... +def tan(__x: _SupportsFloatOrIndex) -> float: ... +def tanh(__x: _SupportsFloatOrIndex) -> float: ... def trunc(__x: SupportsTrunc) -> int: ... if sys.version_info >= (3, 9): - def ulp(__x: SupportsFloat) -> float: ... + def ulp(__x: _SupportsFloatOrIndex) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi index 5a3ec91acbcd..e51b7cdf37bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mimetypes.pyi @@ -1,12 +1,28 @@ import sys from _typeshed import StrPath -from typing import IO, Sequence, Tuple +from typing import IO, Sequence + +__all__ = [ + "knownfiles", + "inited", + "MimeTypes", + "guess_type", + "guess_all_extensions", + "guess_extension", + "add_type", + "init", + "read_mime_types", + "suffix_map", + "encodings_map", + "types_map", + "common_types", +] if sys.version_info >= (3, 8): - def guess_type(url: StrPath, strict: bool = ...) -> Tuple[str | None, str | None]: ... + def guess_type(url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... else: - def guess_type(url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ... + def guess_type(url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... def guess_all_extensions(type: str, strict: bool = ...) -> list[str]: ... def guess_extension(type: str, strict: bool = ...) -> str | None: ... @@ -24,11 +40,15 @@ common_types: dict[str, str] class MimeTypes: suffix_map: dict[str, str] encodings_map: dict[str, str] - types_map: Tuple[dict[str, str], dict[str, str]] - types_map_inv: Tuple[dict[str, str], dict[str, str]] - def __init__(self, filenames: Tuple[str, ...] = ..., strict: bool = ...) -> None: ... + types_map: tuple[dict[str, str], dict[str, str]] + types_map_inv: tuple[dict[str, str], dict[str, str]] + def __init__(self, filenames: tuple[str, ...] = ..., strict: bool = ...) -> None: ... def guess_extension(self, type: str, strict: bool = ...) -> str | None: ... - def guess_type(self, url: str, strict: bool = ...) -> Tuple[str | None, str | None]: ... + if sys.version_info >= (3, 8): + def guess_type(self, url: StrPath, strict: bool = ...) -> tuple[str | None, str | None]: ... + else: + def guess_type(self, url: str, strict: bool = ...) -> tuple[str | None, str | None]: ... + def guess_all_extensions(self, type: str, strict: bool = ...) -> list[str]: ... def read(self, filename: str, strict: bool = ...) -> None: ... def readfp(self, fp: IO[str], strict: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi index 4dd8f8cd2cdd..5dd266e84607 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/mmap.pyi @@ -1,6 +1,7 @@ import sys from _typeshed import ReadableBuffer -from typing import ContextManager, Iterable, Iterator, NoReturn, Sized, overload +from contextlib import AbstractContextManager +from typing import Iterable, Iterator, NoReturn, Sized, overload ACCESS_DEFAULT: int ACCESS_READ: int @@ -12,6 +13,8 @@ ALLOCATIONGRANULARITY: int if sys.platform == "linux": MAP_DENYWRITE: int MAP_EXECUTABLE: int + if sys.version_info >= (3, 10): + MAP_POPULATE: int if sys.platform != "win32": MAP_ANON: int @@ -24,18 +27,20 @@ if sys.platform != "win32": PAGESIZE: int -class mmap(ContextManager[mmap], Iterable[int], Sized): +class mmap(AbstractContextManager[mmap], Iterable[int], Sized): if sys.platform == "win32": def __init__(self, fileno: int, length: int, tagname: str | None = ..., access: int = ..., offset: int = ...) -> None: ... else: def __init__( self, fileno: int, length: int, flags: int = ..., prot: int = ..., access: int = ..., offset: int = ... ) -> None: ... + def close(self) -> None: ... if sys.version_info >= (3, 8): def flush(self, offset: int = ..., size: int = ...) -> None: ... else: def flush(self, offset: int = ..., size: int = ...) -> int: ... + def move(self, dest: int, src: int, count: int) -> None: ... def read_byte(self) -> int: ... def readline(self) -> bytes: ... @@ -48,19 +53,20 @@ class mmap(ContextManager[mmap], Iterable[int], Sized): closed: bool if sys.version_info >= (3, 8) and sys.platform != "win32": def madvise(self, option: int, start: int = ..., length: int = ...) -> None: ... + def find(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... def rfind(self, sub: ReadableBuffer, start: int = ..., stop: int = ...) -> int: ... def read(self, n: int | None = ...) -> bytes: ... def write(self, bytes: ReadableBuffer) -> int: ... @overload - def __getitem__(self, index: int) -> int: ... + def __getitem__(self, __index: int) -> int: ... @overload - def __getitem__(self, index: slice) -> bytes: ... - def __delitem__(self, index: int | slice) -> NoReturn: ... + def __getitem__(self, __index: slice) -> bytes: ... + def __delitem__(self, __index: int | slice) -> NoReturn: ... @overload - def __setitem__(self, index: int, object: int) -> None: ... + def __setitem__(self, __index: int, __object: int) -> None: ... @overload - def __setitem__(self, index: slice, object: ReadableBuffer) -> None: ... + def __setitem__(self, __index: slice, __object: ReadableBuffer) -> None: ... # Doesn't actually exist, but the object is actually iterable because it has __getitem__ and # __len__, so we claim that there is also an __iter__ to help type checkers. def __iter__(self) -> Iterator[int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/modulefinder.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/modulefinder.pyi index 89f8bd45f106..9efe032cfd29 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/modulefinder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/modulefinder.pyi @@ -1,12 +1,12 @@ import sys from types import CodeType -from typing import IO, Any, Container, Iterable, Iterator, Sequence, Tuple +from typing import IO, Any, Container, Iterable, Iterator, Sequence LOAD_CONST: int # undocumented IMPORT_NAME: int # undocumented STORE_NAME: int # undocumented STORE_GLOBAL: int # undocumented -STORE_OPS: Tuple[int, int] # undocumented +STORE_OPS: tuple[int, int] # undocumented EXTENDED_ARG: int # undocumented packagePathMap: dict[str, list[str]] # undocumented @@ -19,7 +19,6 @@ def ReplacePackage(oldname: str, newname: str) -> None: ... class Module: # undocumented def __init__(self, name: str, file: str | None = ..., path: str | None = ...) -> None: ... - def __repr__(self) -> str: ... class ModuleFinder: @@ -29,7 +28,7 @@ class ModuleFinder: debug: int # undocumented indent: int # undocumented excludes: Container[str] # undocumented - replace_paths: Sequence[Tuple[str, str]] # undocumented + replace_paths: Sequence[tuple[str, str]] # undocumented if sys.version_info >= (3, 8): def __init__( @@ -37,7 +36,7 @@ class ModuleFinder: path: list[str] | None = ..., debug: int = ..., excludes: Container[str] | None = ..., - replace_paths: Sequence[Tuple[str, str]] | None = ..., + replace_paths: Sequence[tuple[str, str]] | None = ..., ) -> None: ... else: def __init__( @@ -45,8 +44,9 @@ class ModuleFinder: path: list[str] | None = ..., debug: int = ..., excludes: Container[str] = ..., - replace_paths: Sequence[Tuple[str, str]] = ..., + replace_paths: Sequence[tuple[str, str]] = ..., ) -> None: ... + def msg(self, level: int, str: str, *args: Any) -> None: ... # undocumented def msgin(self, *args: Any) -> None: ... # undocumented def msgout(self, *args: Any) -> None: ... # undocumented @@ -56,20 +56,20 @@ class ModuleFinder: self, name: str, caller: Module | None = ..., fromlist: list[str] | None = ..., level: int = ... ) -> Module | None: ... # undocumented def determine_parent(self, caller: Module | None, level: int = ...) -> Module | None: ... # undocumented - def find_head_package(self, parent: Module, name: str) -> Tuple[Module, str]: ... # undocumented + def find_head_package(self, parent: Module, name: str) -> tuple[Module, str]: ... # undocumented def load_tail(self, q: Module, tail: str) -> Module: ... # undocumented def ensure_fromlist(self, m: Module, fromlist: Iterable[str], recursive: int = ...) -> None: ... # undocumented def find_all_submodules(self, m: Module) -> Iterable[str]: ... # undocumented def import_module(self, partname: str, fqname: str, parent: Module) -> Module | None: ... # undocumented - def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: Tuple[str, str, str]) -> Module: ... # undocumented - def scan_opcodes(self, co: CodeType) -> Iterator[Tuple[str, Tuple[Any, ...]]]: ... # undocumented + def load_module(self, fqname: str, fp: IO[str], pathname: str, file_info: tuple[str, str, str]) -> Module: ... # undocumented + def scan_opcodes(self, co: CodeType) -> Iterator[tuple[str, tuple[Any, ...]]]: ... # undocumented def scan_code(self, co: CodeType, m: Module) -> None: ... # undocumented def load_package(self, fqname: str, pathname: str) -> Module: ... # undocumented def add_module(self, fqname: str) -> Module: ... # undocumented def find_module( self, name: str, path: str | None, parent: Module | None = ... - ) -> Tuple[IO[Any] | None, str | None, Tuple[str, str, int]]: ... # undocumented + ) -> tuple[IO[Any] | None, str | None, tuple[str, str, int]]: ... # undocumented def report(self) -> None: ... def any_missing(self) -> list[str]: ... # undocumented - def any_missing_maybe(self) -> Tuple[list[str], list[str]]: ... # undocumented + def any_missing_maybe(self) -> tuple[list[str], list[str]]: ... # undocumented def replace_paths_in_code(self, co: CodeType) -> CodeType: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/__init__.pyi index ca05ee6f4309..db6f27126247 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/__init__.pyi @@ -1,6 +1,6 @@ import sys from types import ModuleType -from typing import Any, Container, Iterable, Sequence, Set, Tuple, Type +from typing import Any, Container, Iterable, Sequence from typing_extensions import Literal if sys.platform == "win32": @@ -22,22 +22,25 @@ if sys.platform == "win32": type_nullable: Literal[0x1000] type_key: Literal[0x2000] knownbits: Literal[0x3FFF] + class Table: name: str - fields: list[Tuple[int, str, int]] + fields: list[tuple[int, str, int]] def __init__(self, name: str) -> None: ... def add_field(self, index: int, name: str, type: int) -> None: ... def sql(self) -> str: ... def create(self, db: _Database) -> None: ... + class _Unspecified: ... + def change_sequence( - seq: Sequence[Tuple[str, str | None, int]], + seq: Sequence[tuple[str, str | None, int]], action: str, - seqno: int | Type[_Unspecified] = ..., - cond: str | Type[_Unspecified] = ..., + seqno: int | type[_Unspecified] = ..., + cond: str | type[_Unspecified] = ..., ) -> None: ... - def add_data(db: _Database, table: str, values: Iterable[Tuple[Any, ...]]) -> None: ... + def add_data(db: _Database, table: str, values: Iterable[tuple[Any, ...]]) -> None: ... def add_stream(db: _Database, name: str, path: str) -> None: ... def init_database( name: str, schema: ModuleType, ProductName: str, ProductCode: str, ProductVersion: str, Manufacturer: str @@ -45,17 +48,19 @@ if sys.platform == "win32": def add_tables(db: _Database, module: ModuleType) -> None: ... def make_id(str: str) -> str: ... def gen_uuid() -> str: ... + class CAB: name: str - files: list[Tuple[str, str]] - filenames: Set[str] + files: list[tuple[str, str]] + filenames: set[str] index: int def __init__(self, name: str) -> None: ... def gen_id(self, file: str) -> str: ... - def append(self, full: str, file: str, logical: str) -> Tuple[int, str]: ... + def append(self, full: str, file: str, logical: str) -> tuple[int, str]: ... def commit(self, db: _Database) -> None: ... - _directories: Set[str] + _directories: set[str] + class Directory: db: _Database @@ -64,8 +69,8 @@ if sys.platform == "win32": physical: str logical: str component: str | None - short_names: Set[str] - ids: Set[str] + short_names: set[str] + ids: set[str] keyfiles: dict[str, str] componentflags: int | None absolute: str @@ -91,11 +96,12 @@ if sys.platform == "win32": def add_file(self, file: str, src: str | None = ..., version: str | None = ..., language: str | None = ...) -> str: ... def glob(self, pattern: str, exclude: Container[str] | None = ...) -> list[str]: ... def remove_pyc(self) -> None: ... + class Binary: name: str def __init__(self, fname: str) -> None: ... - def __repr__(self) -> str: ... + class Feature: id: str @@ -112,6 +118,7 @@ if sys.platform == "win32": attributes: int = ..., ) -> None: ... def set_current(self) -> None: ... + class Control: dlg: Dialog @@ -120,12 +127,14 @@ if sys.platform == "win32": def event(self, event: str, argument: str, condition: str = ..., ordering: int | None = ...) -> None: ... def mapping(self, event: str, attribute: str) -> None: ... def condition(self, action: str, condition: str) -> None: ... + class RadioButtonGroup(Control): property: str index: int def __init__(self, dlg: Dialog, name: str, property: str) -> None: ... def add(self, name: str, x: int, y: int, w: int, h: int, text: str, value: str | None = ...) -> None: ... + class Dialog: db: _Database diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/schema.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/schema.pyi index df57ade15a2f..4ad9a1783fcd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/schema.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/schema.pyi @@ -1,5 +1,4 @@ import sys -from typing import Tuple if sys.platform == "win32": from . import Table @@ -92,4 +91,4 @@ if sys.platform == "win32": tables: list[Table] - _Validation_records: list[Tuple[str, str, str, int | None, int | None, str | None, int | None, str | None, str | None, str]] + _Validation_records: list[tuple[str, str, str, int | None, int | None, str | None, int | None, str | None, str | None, str]] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/sequence.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/sequence.pyi index 123d232886f7..87dff754009d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/sequence.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/sequence.pyi @@ -1,9 +1,9 @@ import sys -from typing import List, Optional, Tuple +from typing import Optional if sys.platform == "win32": - _SequenceType = List[Tuple[str, Optional[str], int]] + _SequenceType = list[tuple[str, Optional[str], int]] AdminExecuteSequence: _SequenceType AdminUISequence: _SequenceType diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/text.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/text.pyi index fe2dc23830e0..879429ecea85 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/msilib/text.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/msilib/text.pyi @@ -1,9 +1,8 @@ import sys -from typing import Tuple if sys.platform == "win32": - ActionText: list[Tuple[str, str, str | None]] - UIText: list[Tuple[str, str | None]] + ActionText: list[tuple[str, str, str | None]] + UIText: list[tuple[str, str | None]] tables: list[str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/msvcrt.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/msvcrt.pyi index 0441ed8acd24..35841c62f67a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/msvcrt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/msvcrt.pyi @@ -1,12 +1,13 @@ import sys +from typing_extensions import Literal # This module is only available on Windows if sys.platform == "win32": - LK_LOCK: int - LK_NBLCK: int - LK_NBRLCK: int - LK_RLCK: int - LK_UNLCK: int + LK_UNLCK: Literal[0] + LK_LOCK: Literal[1] + LK_NBLCK: Literal[2] + LK_RLCK: Literal[3] + LK_NBRLCK: Literal[4] def locking(__fd: int, __mode: int, __nbytes: int) -> None: ... def setmode(__fd: int, __mode: int) -> int: ... def open_osfhandle(__handle: int, __flags: int) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/__init__.pyi index 53b0c983fbcf..3f489ca43dba 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/__init__.pyi @@ -1,7 +1,7 @@ import sys from collections.abc import Callable, Iterable from logging import Logger -from multiprocessing import connection, context, pool, synchronize +from multiprocessing import connection, context, pool, reduction as reducer, synchronize from multiprocessing.context import ( AuthenticationError as AuthenticationError, BaseContext, @@ -29,6 +29,86 @@ if sys.version_info >= (3, 8): if sys.platform != "win32": from multiprocessing.context import ForkContext, ForkServerContext +if sys.version_info >= (3, 8): + __all__ = [ + "Array", + "AuthenticationError", + "Barrier", + "BoundedSemaphore", + "BufferTooShort", + "Condition", + "Event", + "JoinableQueue", + "Lock", + "Manager", + "Pipe", + "Pool", + "Process", + "ProcessError", + "Queue", + "RLock", + "RawArray", + "RawValue", + "Semaphore", + "SimpleQueue", + "TimeoutError", + "Value", + "active_children", + "allow_connection_pickling", + "cpu_count", + "current_process", + "freeze_support", + "get_all_start_methods", + "get_context", + "get_logger", + "get_start_method", + "parent_process", + "log_to_stderr", + "reducer", + "set_executable", + "set_forkserver_preload", + "set_start_method", + ] +else: + __all__ = [ + "Array", + "AuthenticationError", + "Barrier", + "BoundedSemaphore", + "BufferTooShort", + "Condition", + "Event", + "JoinableQueue", + "Lock", + "Manager", + "Pipe", + "Pool", + "Process", + "ProcessError", + "Queue", + "RLock", + "RawArray", + "RawValue", + "Semaphore", + "SimpleQueue", + "TimeoutError", + "Value", + "active_children", + "allow_connection_pickling", + "cpu_count", + "current_process", + "freeze_support", + "get_all_start_methods", + "get_context", + "get_logger", + "get_start_method", + "log_to_stderr", + "reducer", + "set_executable", + "set_forkserver_preload", + "set_start_method", + ] + # The following type aliases can be used to annotate the return values of # the corresponding functions. They are not defined at runtime. # @@ -53,7 +133,7 @@ _SemaphoreType = synchronize.Semaphore # multiprocessing.context.BaseContext's methods, so the two signatures should # be identical (modulo self). -# Sychronization primitives +# Synchronization primitives _LockLike = Union[synchronize.Lock, synchronize.RLock] RawValue = context._default_context.RawValue RawArray = context._default_context.RawArray diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/connection.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/connection.pyi index 4f6dc1ba7efa..5db6fa4cda7e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/connection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/connection.pyi @@ -2,19 +2,16 @@ import socket import sys import types from _typeshed import Self -from typing import Any, Iterable, Tuple, Type, Union +from typing import Any, Iterable, Union +from typing_extensions import SupportsIndex -if sys.version_info >= (3, 8): - from typing import SupportsIndex +__all__ = ["Client", "Listener", "Pipe", "wait"] # https://docs.python.org/3/library/multiprocessing.html#address-formats -_Address = Union[str, Tuple[str, int]] +_Address = Union[str, tuple[str, int]] class _ConnectionBase: - if sys.version_info >= (3, 8): - def __init__(self, handle: SupportsIndex, readable: bool = ..., writable: bool = ...) -> None: ... - else: - def __init__(self, handle: int, readable: bool = ..., writable: bool = ...) -> None: ... + def __init__(self, handle: SupportsIndex, readable: bool = ..., writable: bool = ...) -> None: ... @property def closed(self) -> bool: ... # undocumented @property @@ -31,7 +28,7 @@ class _ConnectionBase: def poll(self, timeout: float | None = ...) -> bool: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... class Connection(_ConnectionBase): ... @@ -51,7 +48,7 @@ class Listener: def last_accepted(self) -> _Address | None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... def deliver_challenge(connection: Connection, authkey: bytes) -> None: ... @@ -60,4 +57,4 @@ def wait( object_list: Iterable[Connection | socket.socket | int], timeout: float | None = ... ) -> list[Connection | socket.socket | int]: ... def Client(address: _Address, family: str | None = ..., authkey: bytes | None = ...) -> Connection: ... -def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ... +def Pipe(duplex: bool = ...) -> tuple[Connection, Connection]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/context.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/context.pyi index 59ff0afadbc4..28ec0b140566 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/context.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/context.pyi @@ -5,11 +5,17 @@ from collections.abc import Callable, Iterable, Sequence from ctypes import _CData from logging import Logger from multiprocessing import queues, synchronize +from multiprocessing.pool import Pool as _Pool from multiprocessing.process import BaseProcess from multiprocessing.sharedctypes import SynchronizedArray, SynchronizedBase -from typing import Any, Type, TypeVar, Union, overload +from typing import Any, TypeVar, Union, overload from typing_extensions import Literal +if sys.version_info >= (3, 8): + __all__ = () +else: + __all__: list[str] = [] + _LockLike = Union[synchronize.Lock, synchronize.RLock] _CT = TypeVar("_CT", bound=_CData) @@ -18,12 +24,12 @@ class BufferTooShort(ProcessError): ... class TimeoutError(ProcessError): ... class AuthenticationError(ProcessError): ... -class BaseContext(object): - Process: Type[BaseProcess] - ProcessError: Type[Exception] - BufferTooShort: Type[Exception] - TimeoutError: Type[Exception] - AuthenticationError: Type[Exception] +class BaseContext: + Process: type[BaseProcess] + ProcessError: type[Exception] + BufferTooShort: type[Exception] + TimeoutError: type[Exception] + AuthenticationError: type[Exception] # N.B. The methods below are applied at runtime to generate # multiprocessing.*, so the signatures should be identical (modulo self). @@ -32,6 +38,7 @@ class BaseContext(object): if sys.version_info >= (3, 8): @staticmethod def parent_process() -> BaseProcess | None: ... + @staticmethod def active_children() -> list[BaseProcess]: ... def cpu_count(self) -> int: ... @@ -57,28 +64,28 @@ class BaseContext(object): initializer: Callable[..., Any] | None = ..., initargs: Iterable[Any] = ..., maxtasksperchild: int | None = ..., - ) -> multiprocessing.pool.Pool: ... + ) -> _Pool: ... @overload - def RawValue(self, typecode_or_type: Type[_CT], *args: Any) -> _CT: ... + def RawValue(self, typecode_or_type: type[_CT], *args: Any) -> _CT: ... @overload def RawValue(self, typecode_or_type: str, *args: Any) -> Any: ... @overload - def RawArray(self, typecode_or_type: Type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... + def RawArray(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... @overload def RawArray(self, typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload - def Value(self, typecode_or_type: Type[_CT], *args: Any, lock: Literal[False]) -> _CT: ... + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[False]) -> _CT: ... @overload - def Value(self, typecode_or_type: Type[_CT], *args: Any, lock: Literal[True] | _LockLike) -> SynchronizedBase[_CT]: ... + def Value(self, typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike) -> SynchronizedBase[_CT]: ... @overload def Value(self, typecode_or_type: str, *args: Any, lock: Literal[True] | _LockLike) -> SynchronizedBase[Any]: ... @overload - def Value(self, typecode_or_type: str | Type[_CData], *args: Any, lock: bool | _LockLike = ...) -> Any: ... + def Value(self, typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ...) -> Any: ... @overload - def Array(self, typecode_or_type: Type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]) -> _CT: ... + def Array(self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False]) -> _CT: ... @overload def Array( - self, typecode_or_type: Type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike + self, typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike ) -> SynchronizedArray[_CT]: ... @overload def Array( @@ -86,7 +93,7 @@ class BaseContext(object): ) -> SynchronizedArray[Any]: ... @overload def Array( - self, typecode_or_type: str | Type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = ... + self, typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = ... ) -> Any: ... def freeze_support(self) -> None: ... def get_logger(self) -> Logger: ... @@ -112,6 +119,7 @@ class BaseContext(object): def get_context(self, method: Literal["spawn"]) -> SpawnContext: ... @overload def get_context(self, method: str) -> BaseContext: ... + def get_start_method(self, allow_none: bool = ...) -> str: ... def set_start_method(self, method: str | None, force: bool = ...) -> None: ... @property @@ -126,7 +134,7 @@ class Process(BaseProcess): def _Popen(process_obj: BaseProcess) -> DefaultContext: ... class DefaultContext(BaseContext): - Process: Type[multiprocessing.Process] + Process: type[multiprocessing.Process] def __init__(self, context: BaseContext) -> None: ... def set_start_method(self, method: str | None, force: bool = ...) -> None: ... def get_start_method(self, allow_none: bool = ...) -> str: ... @@ -139,32 +147,38 @@ if sys.platform != "win32": _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... + class SpawnProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> SpawnProcess: ... + class ForkServerProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... + class ForkContext(BaseContext): _name: str - Process: Type[ForkProcess] + Process: type[ForkProcess] + class SpawnContext(BaseContext): _name: str - Process: Type[SpawnProcess] + Process: type[SpawnProcess] + class ForkServerContext(BaseContext): _name: str - Process: Type[ForkServerProcess] + Process: type[ForkServerProcess] else: class SpawnProcess(BaseProcess): _start_method: str @staticmethod def _Popen(process_obj: BaseProcess) -> Any: ... + class SpawnContext(BaseContext): _name: str - Process: Type[SpawnProcess] + Process: type[SpawnProcess] def _force_start_method(method: str) -> None: ... def get_spawning_popen() -> Any | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi index b4d1c8404d8d..1c29afb8f643 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/__init__.pyi @@ -4,6 +4,27 @@ import weakref from queue import Queue as Queue from typing import Any, Callable, Iterable, Mapping, Sequence +from .connection import Pipe as Pipe + +__all__ = [ + "Process", + "current_process", + "active_children", + "freeze_support", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Condition", + "Event", + "Barrier", + "Queue", + "Manager", + "Pipe", + "Pool", + "JoinableQueue", +] + JoinableQueue = Queue Barrier = threading.Barrier BoundedSemaphore = threading.BoundedSemaphore diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/connection.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/connection.pyi index 71e9a50fab21..d1ff83961f9a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/connection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/dummy/connection.pyi @@ -1,13 +1,15 @@ from _typeshed import Self from queue import Queue from types import TracebackType -from typing import Any, Tuple, Type, Union +from typing import Any, Union + +__all__ = ["Client", "Listener", "Pipe"] families: list[None] -_Address = Union[str, Tuple[str, int]] +_Address = Union[str, tuple[str, int]] -class Connection(object): +class Connection: _in: Any _out: Any recv: Any @@ -16,23 +18,23 @@ class Connection(object): send_bytes: Any def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __init__(self, _in: Any, _out: Any) -> None: ... def close(self) -> None: ... def poll(self, timeout: float = ...) -> bool: ... -class Listener(object): +class Listener: _backlog_queue: Queue[Any] | None @property def address(self) -> Queue[Any] | None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __init__(self, address: _Address | None = ..., family: int | None = ..., backlog: int = ...) -> None: ... def accept(self) -> Connection: ... def close(self) -> None: ... def Client(address: _Address) -> Connection: ... -def Pipe(duplex: bool = ...) -> Tuple[Connection, Connection]: ... +def Pipe(duplex: bool = ...) -> tuple[Connection, Connection]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi index 568845bfb9e4..fef3e1e21eb2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/managers.pyi @@ -1,9 +1,9 @@ -# NOTE: These are incomplete! - import queue import sys import threading -from typing import Any, AnyStr, Callable, ContextManager, Generic, Iterable, Mapping, Sequence, Tuple, TypeVar +from _typeshed import Self +from types import TracebackType +from typing import Any, AnyStr, Callable, Generic, Iterable, Mapping, Sequence, TypeVar from .connection import Connection from .context import BaseContext @@ -11,8 +11,12 @@ from .context import BaseContext if sys.version_info >= (3, 8): from .shared_memory import _SLT, ShareableList, SharedMemory + __all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token", "SharedMemoryManager"] + _SharedMemory = SharedMemory _ShareableList = ShareableList +else: + __all__ = ["BaseManager", "SyncManager", "BaseProxy", "Token"] if sys.version_info >= (3, 9): from types import GenericAlias @@ -28,16 +32,15 @@ class Namespace: _Namespace = Namespace -class Token(object): +class Token: typeid: str | bytes | None - address: Tuple[str | bytes, int] + address: tuple[str | bytes, int] id: str | bytes | int | None - def __init__(self, typeid: bytes | str | None, address: Tuple[str | bytes, int], id: str | bytes | int | None) -> None: ... - def __repr__(self) -> str: ... - def __getstate__(self) -> Tuple[str | bytes | None, Tuple[str | bytes, int], str | bytes | int | None]: ... - def __setstate__(self, state: Tuple[str | bytes | None, Tuple[str | bytes, int], str | bytes | int | None]) -> None: ... + def __init__(self, typeid: bytes | str | None, address: tuple[str | bytes, int], id: str | bytes | int | None) -> None: ... + def __getstate__(self) -> tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]: ... + def __setstate__(self, state: tuple[str | bytes | None, tuple[str | bytes, int], str | bytes | int | None]) -> None: ... -class BaseProxy(object): +class BaseProxy: _address_to_local: dict[Any, Any] _mutex: Any def __init__( @@ -51,9 +54,9 @@ class BaseProxy(object): manager_owned: bool = ..., ) -> None: ... def __deepcopy__(self, memo: Any | None) -> Any: ... - def _callmethod(self, methodname: str, args: Tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... + def _callmethod(self, methodname: str, args: tuple[Any, ...] = ..., kwds: dict[Any, Any] = ...) -> None: ... def _getvalue(self) -> Any: ... - def __reduce__(self) -> Tuple[Any, Tuple[Any, Any, str, dict[Any, Any]]]: ... + def __reduce__(self) -> tuple[Any, tuple[Any, Any, str, dict[Any, Any]]]: ... class ValueProxy(BaseProxy, Generic[_T]): def get(self) -> _T: ... @@ -66,12 +69,12 @@ class ValueProxy(BaseProxy, Generic[_T]): class Server: address: Any def __init__( - self, registry: dict[str, Tuple[Callable[..., Any], Any, Any, Any]], address: Any, authkey: bytes, serializer: str + self, registry: dict[str, tuple[Callable[..., Any], Any, Any, Any]], address: Any, authkey: bytes, serializer: str ) -> None: ... def serve_forever(self) -> None: ... def accept_connection(self, c: Connection, name: str) -> None: ... -class BaseManager(ContextManager[BaseManager]): +class BaseManager: def __init__( self, address: Any | None = ..., authkey: bytes | None = ..., serializer: str = ..., ctx: BaseContext | None = ... ) -> None: ... @@ -92,12 +95,14 @@ class BaseManager(ContextManager[BaseManager]): method_to_typeid: Mapping[str, str] | None = ..., create_method: bool = ..., ) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, exc_type: type[BaseException], exc_val: BaseException, exc_tb: TracebackType) -> None: ... # Conflicts with method names _dict = dict _list = list -class SyncManager(BaseManager, ContextManager[SyncManager]): +class SyncManager(BaseManager): def BoundedSemaphore(self, value: Any = ...) -> threading.BoundedSemaphore: ... def Condition(self, lock: Any = ...) -> threading.Condition: ... def Event(self) -> threading.Event: ... @@ -115,6 +120,7 @@ class RemoteError(Exception): ... if sys.version_info >= (3, 8): class SharedMemoryServer(Server): ... + class SharedMemoryManager(BaseManager): def get_server(self) -> SharedMemoryServer: ... def SharedMemory(self, size: int) -> _SharedMemory: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/pool.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/pool.pyi index 20cfc59633d5..5e38e0161834 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/pool.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/pool.pyi @@ -1,11 +1,14 @@ import sys from _typeshed import Self -from typing import Any, Callable, ContextManager, Dict, Generic, Iterable, Iterator, List, Mapping, TypeVar +from types import TracebackType +from typing import Any, Callable, Generic, Iterable, Iterator, Mapping, TypeVar +from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias -_PT = TypeVar("_PT", bound=Pool) +__all__ = ["Pool", "ThreadPool"] + _S = TypeVar("_S") _T = TypeVar("_T") @@ -17,10 +20,11 @@ class ApplyResult(Generic[_T]): else: def __init__( self, - cache: Dict[int, ApplyResult[Any]], + cache: dict[int, ApplyResult[Any]], callback: Callable[[_T], None] | None, error_callback: Callable[[BaseException], None] | None, ) -> None: ... + def get(self, timeout: float | None = ...) -> _T: ... def wait(self, timeout: float | None = ...) -> None: ... def ready(self) -> bool: ... @@ -31,23 +35,23 @@ class ApplyResult(Generic[_T]): # alias created during issue #17805 AsyncResult = ApplyResult -class MapResult(ApplyResult[List[_T]]): +class MapResult(ApplyResult[list[_T]]): if sys.version_info >= (3, 8): def __init__( self, pool: Pool, chunksize: int, length: int, - callback: Callable[[List[_T]], None] | None, + callback: Callable[[list[_T]], None] | None, error_callback: Callable[[BaseException], None] | None, ) -> None: ... else: def __init__( self, - cache: Dict[int, ApplyResult[Any]], + cache: dict[int, ApplyResult[Any]], chunksize: int, length: int, - callback: Callable[[List[_T]], None] | None, + callback: Callable[[list[_T]], None] | None, error_callback: Callable[[BaseException], None] | None, ) -> None: ... @@ -55,14 +59,15 @@ class IMapIterator(Iterator[_T]): if sys.version_info >= (3, 8): def __init__(self, pool: Pool) -> None: ... else: - def __init__(self, cache: Dict[int, IMapIterator[Any]]) -> None: ... - def __iter__(self: _S) -> _S: ... + def __init__(self, cache: dict[int, IMapIterator[Any]]) -> None: ... + + def __iter__(self: Self) -> Self: ... def next(self, timeout: float | None = ...) -> _T: ... def __next__(self, timeout: float | None = ...) -> _T: ... class IMapUnorderedIterator(IMapIterator[_T]): ... -class Pool(ContextManager[Pool]): +class Pool: def __init__( self, processes: int | None = ..., @@ -106,19 +111,22 @@ class Pool(ContextManager[Pool]): def terminate(self) -> None: ... def join(self) -> None: ... def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... -class ThreadPool(Pool, ContextManager[ThreadPool]): +class ThreadPool(Pool): def __init__( self, processes: int | None = ..., initializer: Callable[..., Any] | None = ..., initargs: Iterable[Any] = ... ) -> None: ... # undocumented if sys.version_info >= (3, 8): - INIT: str - RUN: str - CLOSE: str - TERMINATE: str + INIT: Literal["INIT"] + RUN: Literal["RUN"] + CLOSE: Literal["CLOSE"] + TERMINATE: Literal["TERMINATE"] else: - RUN: int - CLOSE: int - TERMINATE: int + RUN: Literal[0] + CLOSE: Literal[1] + TERMINATE: Literal[2] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi index 32c22d19f6e5..f4464c08c9bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/process.pyi @@ -1,17 +1,22 @@ import sys -from typing import Any, Callable, Mapping, Tuple +from typing import Any, Callable, Mapping + +if sys.version_info >= (3, 8): + __all__ = ["BaseProcess", "current_process", "active_children", "parent_process"] +else: + __all__ = ["BaseProcess", "current_process", "active_children"] class BaseProcess: name: str daemon: bool authkey: bytes - _identity: Tuple[int, ...] # undocumented + _identity: tuple[int, ...] # undocumented def __init__( self, group: None = ..., target: Callable[..., Any] | None = ..., name: str | None = ..., - args: Tuple[Any, ...] = ..., + args: tuple[Any, ...] = ..., kwargs: Mapping[str, Any] = ..., *, daemon: bool | None = ..., @@ -22,6 +27,7 @@ class BaseProcess: if sys.version_info >= (3, 7): def kill(self) -> None: ... def close(self) -> None: ... + def join(self, timeout: float | None = ...) -> None: ... def is_alive(self) -> bool: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/queues.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/queues.pyi index 1c9d76917292..1d31fa694c45 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/queues.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/queues.pyi @@ -5,6 +5,8 @@ from typing import Any, Generic, TypeVar if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = ["Queue", "SimpleQueue", "JoinableQueue"] + _T = TypeVar("_T") class Queue(queue.Queue[_T]): @@ -30,6 +32,7 @@ class SimpleQueue(Generic[_T]): def __init__(self, *, ctx: Any = ...) -> None: ... if sys.version_info >= (3, 9): def close(self) -> None: ... + def empty(self) -> bool: ... def get(self) -> _T: ... def put(self, item: _T) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/reduction.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/reduction.pyi new file mode 100644 index 000000000000..9e7387da64a5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/reduction.pyi @@ -0,0 +1,84 @@ +import pickle +import sys +from abc import ABCMeta +from copyreg import _DispatchTableType +from typing import Any +from typing_extensions import Literal + +if sys.platform == "win32": + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupHandle", "duplicate", "steal_handle"] +else: + __all__ = ["send_handle", "recv_handle", "ForkingPickler", "register", "dump", "DupFd", "sendfds", "recvfds"] + +class ForkingPickler(pickle.Pickler): + dispatch_table: _DispatchTableType + def __init__(self, *args) -> None: ... + @classmethod + def register(cls, type, reduce) -> None: ... + @classmethod + def dumps(cls, obj, protocol: Any | None = ...): ... + loads = pickle.loads + +register = ForkingPickler.register + +def dump(obj, file, protocol: Any | None = ...) -> None: ... + +if sys.platform == "win32": + if sys.version_info >= (3, 8): + def duplicate(handle, target_process: Any | None = ..., inheritable: bool = ..., *, source_process: Any | None = ...): ... + else: + def duplicate(handle, target_process: Any | None = ..., inheritable: bool = ...): ... + + def steal_handle(source_pid, handle): ... + def send_handle(conn, handle, destination_pid) -> None: ... + def recv_handle(conn): ... + + class DupHandle: + def __init__(self, handle, access, pid: Any | None = ...) -> None: ... + def detach(self): ... + +else: + if sys.platform == "darwin": + ACKNOWLEDGE: Literal[True] + else: + ACKNOWLEDGE: Literal[False] + + def recvfds(sock, size): ... + def send_handle(conn, handle, destination_pid) -> None: ... + def recv_handle(conn) -> None: ... + def sendfds(sock, fds) -> None: ... + def DupFd(fd): ... + +# These aliases are to work around pyright complaints. +# Pyright doesn't like it when a class object is defined as an alias +# of a global object with the same name. +_ForkingPickler = ForkingPickler +_register = register +_dump = dump +_send_handle = send_handle +_recv_handle = recv_handle + +if sys.platform == "win32": + _steal_handle = steal_handle + _duplicate = duplicate + _DupHandle = DupHandle +else: + _sendfds = sendfds + _recvfds = recvfds + _DupFd = DupFd + +class AbstractReducer(metaclass=ABCMeta): + ForkingPickler = _ForkingPickler + register = _register + dump = _dump + send_handle = _send_handle + recv_handle = _recv_handle + if sys.platform == "win32": + steal_handle = _steal_handle + duplicate = _duplicate + DupHandle = _DupHandle + else: + sendfds = _sendfds + recvfds = _recvfds + DupFd = _DupFd + def __init__(self, *args) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi index 47669aa4bcb5..a4c4fd071c5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/shared_memory.pyi @@ -1,33 +1,35 @@ import sys -from typing import Any, Generic, Iterable, Tuple, TypeVar +from _typeshed import Self +from typing import Any, Generic, Iterable, TypeVar if sys.version_info >= (3, 9): from types import GenericAlias -_S = TypeVar("_S") +__all__ = ["SharedMemory", "ShareableList"] + _SLT = TypeVar("_SLT", int, float, bool, str, bytes, None) -if sys.version_info >= (3, 8): - class SharedMemory: - def __init__(self, name: str | None = ..., create: bool = ..., size: int = ...) -> None: ... - @property - def buf(self) -> memoryview: ... - @property - def name(self) -> str: ... - @property - def size(self) -> int: ... - def close(self) -> None: ... - def unlink(self) -> None: ... - class ShareableList(Generic[_SLT]): - shm: SharedMemory - def __init__(self, sequence: Iterable[_SLT] | None = ..., *, name: str | None = ...) -> None: ... - def __getitem__(self, position: int) -> _SLT: ... - def __setitem__(self, position: int, value: _SLT) -> None: ... - def __reduce__(self: _S) -> Tuple[_S, Tuple[_SLT, ...]]: ... - def __len__(self) -> int: ... - @property - def format(self) -> str: ... - def count(self, value: _SLT) -> int: ... - def index(self, value: _SLT) -> int: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... +class SharedMemory: + def __init__(self, name: str | None = ..., create: bool = ..., size: int = ...) -> None: ... + @property + def buf(self) -> memoryview: ... + @property + def name(self) -> str: ... + @property + def size(self) -> int: ... + def close(self) -> None: ... + def unlink(self) -> None: ... + +class ShareableList(Generic[_SLT]): + shm: SharedMemory + def __init__(self, sequence: Iterable[_SLT] | None = ..., *, name: str | None = ...) -> None: ... + def __getitem__(self, position: int) -> _SLT: ... + def __setitem__(self, position: int, value: _SLT) -> None: ... + def __reduce__(self: Self) -> tuple[Self, tuple[_SLT, ...]]: ... + def __len__(self) -> int: ... + @property + def format(self) -> str: ... + def count(self, value: _SLT) -> int: ... + def index(self, value: _SLT) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi index bd9d8f089875..9b0407e2dfff 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/sharedctypes.pyi @@ -3,25 +3,27 @@ from collections.abc import Callable, Iterable, Sequence from ctypes import _CData, _SimpleCData, c_char from multiprocessing.context import BaseContext from multiprocessing.synchronize import _LockLike -from typing import Any, Generic, Protocol, Type, TypeVar, overload +from typing import Any, Generic, Protocol, TypeVar, overload from typing_extensions import Literal +__all__ = ["RawValue", "RawArray", "Value", "Array", "copy", "synchronized"] + _T = TypeVar("_T") _CT = TypeVar("_CT", bound=_CData) @overload -def RawValue(typecode_or_type: Type[_CT], *args: Any) -> _CT: ... +def RawValue(typecode_or_type: type[_CT], *args: Any) -> _CT: ... @overload def RawValue(typecode_or_type: str, *args: Any) -> Any: ... @overload -def RawArray(typecode_or_type: Type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... +def RawArray(typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any]) -> ctypes.Array[_CT]: ... @overload def RawArray(typecode_or_type: str, size_or_initializer: int | Sequence[Any]) -> Any: ... @overload -def Value(typecode_or_type: Type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = ...) -> _CT: ... +def Value(typecode_or_type: type[_CT], *args: Any, lock: Literal[False], ctx: BaseContext | None = ...) -> _CT: ... @overload def Value( - typecode_or_type: Type[_CT], *args: Any, lock: Literal[True] | _LockLike, ctx: BaseContext | None = ... + typecode_or_type: type[_CT], *args: Any, lock: Literal[True] | _LockLike, ctx: BaseContext | None = ... ) -> SynchronizedBase[_CT]: ... @overload def Value( @@ -29,15 +31,15 @@ def Value( ) -> SynchronizedBase[Any]: ... @overload def Value( - typecode_or_type: str | Type[_CData], *args: Any, lock: bool | _LockLike = ..., ctx: BaseContext | None = ... + typecode_or_type: str | type[_CData], *args: Any, lock: bool | _LockLike = ..., ctx: BaseContext | None = ... ) -> Any: ... @overload def Array( - typecode_or_type: Type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = ... + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[False], ctx: BaseContext | None = ... ) -> _CT: ... @overload def Array( - typecode_or_type: Type[_CT], + typecode_or_type: type[_CT], size_or_initializer: int | Sequence[Any], *, lock: Literal[True] | _LockLike, @@ -53,7 +55,7 @@ def Array( ) -> SynchronizedArray[Any]: ... @overload def Array( - typecode_or_type: str | Type[_CData], + typecode_or_type: str | type[_CData], size_or_initializer: int | Sequence[Any], *, lock: bool | _LockLike = ..., @@ -76,7 +78,7 @@ class SynchronizedBase(Generic[_CT]): acquire: _AcquireFunc release: Callable[[], None] def __init__(self, obj: Any, lock: _LockLike | None = ..., ctx: Any | None = ...) -> None: ... - def __reduce__(self) -> tuple[Callable[..., Any], tuple[Any, _LockLike]]: ... + def __reduce__(self) -> tuple[Callable[[Any, _LockLike], SynchronizedBase[Any]], tuple[Any, _LockLike]]: ... def get_obj(self) -> _CT: ... def get_lock(self) -> _LockLike: ... def __enter__(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/spawn.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/spawn.pyi index 34c7322e0d46..4f981ea467c4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/spawn.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/spawn.pyi @@ -1,6 +1,16 @@ from types import ModuleType from typing import Any, Mapping, Sequence +__all__ = [ + "_main", + "freeze_support", + "set_executable", + "get_executable", + "get_preparation_data", + "get_command_line", + "import_main_path", +] + WINEXE: bool WINSERVICE: bool diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi index 1741885f13bf..6764b7666152 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/multiprocessing/synchronize.pyi @@ -1,7 +1,10 @@ import sys import threading +from contextlib import AbstractContextManager from multiprocessing.context import BaseContext -from typing import Any, Callable, ContextManager, Union +from typing import Any, Callable, Union + +__all__ = ["Lock", "RLock", "Semaphore", "BoundedSemaphore", "Condition", "Event"] _LockLike = Union[Lock, RLock] @@ -13,19 +16,20 @@ class Barrier(threading.Barrier): class BoundedSemaphore(Semaphore): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... -class Condition(ContextManager[bool]): +class Condition(AbstractContextManager[bool]): def __init__(self, lock: _LockLike | None = ..., *, ctx: BaseContext) -> None: ... if sys.version_info >= (3, 7): def notify(self, n: int = ...) -> None: ... else: def notify(self) -> None: ... + def notify_all(self) -> None: ... def wait(self, timeout: float | None = ...) -> bool: ... def wait_for(self, predicate: Callable[[], bool], timeout: float | None = ...) -> bool: ... def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... def release(self) -> None: ... -class Event(ContextManager[bool]): +class Event(AbstractContextManager[bool]): def __init__(self, lock: _LockLike | None = ..., *, ctx: BaseContext) -> None: ... def is_set(self) -> bool: ... def set(self) -> None: ... @@ -42,6 +46,6 @@ class Semaphore(SemLock): def __init__(self, value: int = ..., *, ctx: BaseContext) -> None: ... # Not part of public API -class SemLock(ContextManager[bool]): +class SemLock(AbstractContextManager[bool]): def acquire(self, block: bool = ..., timeout: float | None = ...) -> bool: ... def release(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/netrc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/netrc.pyi index b8eac307740a..b1c3f5ec5024 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/netrc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/netrc.pyi @@ -1,5 +1,7 @@ from _typeshed import StrOrBytesPath -from typing import Optional, Tuple +from typing import Optional + +__all__ = ["netrc", "NetrcParseError"] class NetrcParseError(Exception): filename: str | None @@ -8,7 +10,7 @@ class NetrcParseError(Exception): def __init__(self, msg: str, filename: StrOrBytesPath | None = ..., lineno: int | None = ...) -> None: ... # (login, account, password) tuple -_NetrcTuple = Tuple[str, Optional[str], Optional[str]] +_NetrcTuple = tuple[str, Optional[str], Optional[str]] class netrc: hosts: dict[str, _NetrcTuple] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/nis.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/nis.pyi index b762ae46241c..10eef2336a83 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/nis.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/nis.pyi @@ -5,4 +5,5 @@ if sys.platform != "win32": def get_default_domain() -> str: ... def maps(domain: str = ...) -> list[str]: ... def match(key: str, map: str, domain: str = ...) -> str: ... + class error(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/nntplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/nntplib.pyi index 4acb8b210a7a..77cab1e154e8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/nntplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/nntplib.pyi @@ -3,7 +3,20 @@ import socket import ssl import sys from _typeshed import Self -from typing import IO, Any, Iterable, NamedTuple, Tuple, Union +from typing import IO, Any, Iterable, NamedTuple, Union +from typing_extensions import Literal + +__all__ = [ + "NNTP", + "NNTPError", + "NNTPReplyError", + "NNTPTemporaryError", + "NNTPPermanentError", + "NNTPProtocolError", + "NNTPDataError", + "decode_header", + "NNTP_SSL", +] _File = Union[IO[bytes], bytes, str, None] @@ -16,8 +29,8 @@ class NNTPPermanentError(NNTPError): ... class NNTPProtocolError(NNTPError): ... class NNTPDataError(NNTPError): ... -NNTP_PORT: int -NNTP_SSL_PORT: int +NNTP_PORT: Literal[119] +NNTP_SSL_PORT: Literal[563] class GroupInfo(NamedTuple): group: str @@ -34,11 +47,13 @@ def decode_header(header_str: str) -> str: ... _list = list # conflicts with a method named "list" -class _NNTPBase: +class NNTP: encoding: str errors: str host: str + port: int + sock: socket.socket file: IO[bytes] debugging: int welcome: str @@ -47,59 +62,56 @@ class _NNTPBase: authenticated: bool nntp_implementation: str nntp_version: int - def __init__(self, file: IO[bytes], host: str, readermode: bool | None = ..., timeout: float = ...) -> None: ... + def __init__( + self, + host: str, + port: int = ..., + user: str | None = ..., + password: str | None = ..., + readermode: bool | None = ..., + usenetrc: bool = ..., + timeout: float = ..., + ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... def getwelcome(self) -> str: ... def getcapabilities(self) -> dict[str, _list[str]]: ... def set_debuglevel(self, level: int) -> None: ... def debug(self, level: int) -> None: ... - def capabilities(self) -> Tuple[str, dict[str, _list[str]]]: ... - def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = ...) -> Tuple[str, _list[str]]: ... - def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = ...) -> Tuple[str, _list[str]]: ... - def list(self, group_pattern: str | None = ..., *, file: _File = ...) -> Tuple[str, _list[str]]: ... + def capabilities(self) -> tuple[str, dict[str, _list[str]]]: ... + def newgroups(self, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... + def newnews(self, group: str, date: datetime.date | datetime.datetime, *, file: _File = ...) -> tuple[str, _list[str]]: ... + def list(self, group_pattern: str | None = ..., *, file: _File = ...) -> tuple[str, _list[str]]: ... def description(self, group: str) -> str: ... - def descriptions(self, group_pattern: str) -> Tuple[str, dict[str, str]]: ... - def group(self, name: str) -> Tuple[str, int, int, int, str]: ... - def help(self, *, file: _File = ...) -> Tuple[str, _list[str]]: ... - def stat(self, message_spec: Any = ...) -> Tuple[str, int, str]: ... - def next(self) -> Tuple[str, int, str]: ... - def last(self) -> Tuple[str, int, str]: ... - def head(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... - def body(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... - def article(self, message_spec: Any = ..., *, file: _File = ...) -> Tuple[str, ArticleInfo]: ... + def descriptions(self, group_pattern: str) -> tuple[str, dict[str, str]]: ... + def group(self, name: str) -> tuple[str, int, int, int, str]: ... + def help(self, *, file: _File = ...) -> tuple[str, _list[str]]: ... + def stat(self, message_spec: Any = ...) -> tuple[str, int, str]: ... + def next(self) -> tuple[str, int, str]: ... + def last(self) -> tuple[str, int, str]: ... + def head(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... + def body(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... + def article(self, message_spec: Any = ..., *, file: _File = ...) -> tuple[str, ArticleInfo]: ... def slave(self) -> str: ... - def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> Tuple[str, _list[str]]: ... - def xover(self, start: int, end: int, *, file: _File = ...) -> Tuple[str, _list[Tuple[int, dict[str, str]]]]: ... + def xhdr(self, hdr: str, str: Any, *, file: _File = ...) -> tuple[str, _list[str]]: ... + def xover(self, start: int, end: int, *, file: _File = ...) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... def over( - self, message_spec: None | str | _list[Any] | Tuple[Any, ...], *, file: _File = ... - ) -> Tuple[str, _list[Tuple[int, dict[str, str]]]]: ... + self, message_spec: None | str | _list[Any] | tuple[Any, ...], *, file: _File = ... + ) -> tuple[str, _list[tuple[int, dict[str, str]]]]: ... if sys.version_info < (3, 9): - def xgtitle(self, group: str, *, file: _File = ...) -> Tuple[str, _list[Tuple[str, str]]]: ... - def xpath(self, id: Any) -> Tuple[str, str]: ... - def date(self) -> Tuple[str, datetime.datetime]: ... + def xgtitle(self, group: str, *, file: _File = ...) -> tuple[str, _list[tuple[str, str]]]: ... + def xpath(self, id: Any) -> tuple[str, str]: ... + + def date(self) -> tuple[str, datetime.datetime]: ... def post(self, data: bytes | Iterable[bytes]) -> str: ... def ihave(self, message_id: Any, data: bytes | Iterable[bytes]) -> str: ... def quit(self) -> str: ... def login(self, user: str | None = ..., password: str | None = ..., usenetrc: bool = ...) -> None: ... def starttls(self, context: ssl.SSLContext | None = ...) -> None: ... -class NNTP(_NNTPBase): - port: int - sock: socket.socket - def __init__( - self, - host: str, - port: int = ..., - user: str | None = ..., - password: str | None = ..., - readermode: bool | None = ..., - usenetrc: bool = ..., - timeout: float = ..., - ) -> None: ... - -class NNTP_SSL(_NNTPBase): - sock: socket.socket +class NNTP_SSL(NNTP): + ssl_context: ssl.SSLContext | None + sock: ssl.SSLSocket def __init__( self, host: str, diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ntpath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ntpath.pyi index d454d7f93fbf..ffe5cc1e5a2d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ntpath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ntpath.pyi @@ -42,17 +42,103 @@ from posixpath import ( splitext as splitext, supports_unicode_filenames as supports_unicode_filenames, ) -from typing import AnyStr, Tuple, overload +from typing import AnyStr, overload + +if sys.version_info >= (3, 7) or sys.platform != "win32": + __all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "samefile", + "sameopenfile", + "samestat", + "commonpath", + ] +else: + __all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "splitunc", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "samefile", + "sameopenfile", + "samestat", + "commonpath", + ] + + def splitunc(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... # deprecated altsep: str -if sys.version_info < (3, 7) and sys.platform == "win32": - def splitunc(p: AnyStr) -> Tuple[AnyStr, AnyStr]: ... # deprecated -# Similar to posixpath, but have slightly different argument names +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in posixpath.join() @overload -def join(path: StrPath, *paths: StrPath) -> str: ... +def join(__path: StrPath, *paths: StrPath) -> str: ... @overload -def join(path: BytesPath, *paths: BytesPath) -> bytes: ... +def join(__path: BytesPath, *paths: BytesPath) -> bytes: ... if sys.platform == "win32": if sys.version_info >= (3, 10): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/numbers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/numbers.pyi index eaf53d25347a..7c0c95853741 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/numbers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/numbers.pyi @@ -4,6 +4,8 @@ from abc import ABCMeta, abstractmethod from typing import Any, SupportsFloat, overload +__all__ = ["Number", "Complex", "Real", "Rational", "Integral"] + class Number(metaclass=ABCMeta): @abstractmethod def __hash__(self) -> int: ... @@ -42,7 +44,7 @@ class Complex(Number): def __rpow__(self, base: Any) -> Any: ... def __abs__(self) -> Real: ... def conjugate(self) -> Any: ... - def __eq__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... class Real(Complex, SupportsFloat): @abstractmethod diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/opcode.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/opcode.pyi index 982ddee43a63..402dbb74cf58 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/opcode.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/opcode.pyi @@ -1,7 +1,40 @@ import sys -from typing import Sequence +from typing_extensions import Literal -cmp_op: Sequence[str] +__all__ = [ + "cmp_op", + "hasconst", + "hasname", + "hasjrel", + "hasjabs", + "haslocal", + "hascompare", + "hasfree", + "opname", + "opmap", + "HAVE_ARGUMENT", + "EXTENDED_ARG", + "hasnargs", + "stack_effect", +] + +if sys.version_info >= (3, 9): + cmp_op: tuple[Literal["<"], Literal["<="], Literal["=="], Literal["!="], Literal[">"], Literal[">="]] +else: + cmp_op: tuple[ + Literal["<"], + Literal["<="], + Literal["=="], + Literal["!="], + Literal[">"], + Literal[">="], + Literal["in"], + Literal["not in"], + Literal["is"], + Literal["is not"], + Literal["exception match"], + Literal["BAD"], + ] hasconst: list[int] hasname: list[int] hasjrel: list[int] @@ -12,8 +45,8 @@ hasfree: list[int] opname: list[str] opmap: dict[str, int] -HAVE_ARGUMENT: int -EXTENDED_ARG: int +HAVE_ARGUMENT: Literal[90] +EXTENDED_ARG: Literal[144] if sys.version_info >= (3, 8): def stack_effect(__opcode: int, __oparg: int | None = ..., *, jump: bool | None = ...) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/operator.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/operator.pyi index c9fe47c4a70f..603e15ebc7be 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/operator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/operator.pyi @@ -1,176 +1,167 @@ -from typing import ( - Any, - Container, - Generic, - Mapping, - MutableMapping, - MutableSequence, - Sequence, - SupportsAbs, - Tuple, - TypeVar, - overload, -) +import sys -_T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) -_K = TypeVar("_K") -_V = TypeVar("_V") +from _operator import * -def lt(__a: Any, __b: Any) -> Any: ... -def le(__a: Any, __b: Any) -> Any: ... -def eq(__a: Any, __b: Any) -> Any: ... -def ne(__a: Any, __b: Any) -> Any: ... -def ge(__a: Any, __b: Any) -> Any: ... -def gt(__a: Any, __b: Any) -> Any: ... -def __lt__(a: Any, b: Any) -> Any: ... -def __le__(a: Any, b: Any) -> Any: ... -def __eq__(a: Any, b: Any) -> Any: ... -def __ne__(a: Any, b: Any) -> Any: ... -def __ge__(a: Any, b: Any) -> Any: ... -def __gt__(a: Any, b: Any) -> Any: ... -def not_(__a: Any) -> bool: ... -def __not__(a: Any) -> bool: ... -def truth(__a: Any) -> bool: ... -def is_(__a: Any, __b: Any) -> bool: ... -def is_not(__a: Any, __b: Any) -> bool: ... -def abs(__a: SupportsAbs[_T]) -> _T: ... -def __abs__(a: SupportsAbs[_T]) -> _T: ... -def add(__a: Any, __b: Any) -> Any: ... -def __add__(a: Any, b: Any) -> Any: ... -def and_(__a: Any, __b: Any) -> Any: ... -def __and__(a: Any, b: Any) -> Any: ... -def floordiv(__a: Any, __b: Any) -> Any: ... -def __floordiv__(a: Any, b: Any) -> Any: ... -def index(__a: Any) -> int: ... -def __index__(a: Any) -> int: ... -def inv(__a: Any) -> Any: ... -def invert(__a: Any) -> Any: ... -def __inv__(a: Any) -> Any: ... -def __invert__(a: Any) -> Any: ... -def lshift(__a: Any, __b: Any) -> Any: ... -def __lshift__(a: Any, b: Any) -> Any: ... -def mod(__a: Any, __b: Any) -> Any: ... -def __mod__(a: Any, b: Any) -> Any: ... -def mul(__a: Any, __b: Any) -> Any: ... -def __mul__(a: Any, b: Any) -> Any: ... -def matmul(__a: Any, __b: Any) -> Any: ... -def __matmul__(a: Any, b: Any) -> Any: ... -def neg(__a: Any) -> Any: ... -def __neg__(a: Any) -> Any: ... -def or_(__a: Any, __b: Any) -> Any: ... -def __or__(a: Any, b: Any) -> Any: ... -def pos(__a: Any) -> Any: ... -def __pos__(a: Any) -> Any: ... -def pow(__a: Any, __b: Any) -> Any: ... -def __pow__(a: Any, b: Any) -> Any: ... -def rshift(__a: Any, __b: Any) -> Any: ... -def __rshift__(a: Any, b: Any) -> Any: ... -def sub(__a: Any, __b: Any) -> Any: ... -def __sub__(a: Any, b: Any) -> Any: ... -def truediv(__a: Any, __b: Any) -> Any: ... -def __truediv__(a: Any, b: Any) -> Any: ... -def xor(__a: Any, __b: Any) -> Any: ... -def __xor__(a: Any, b: Any) -> Any: ... -def concat(__a: Sequence[_T], __b: Sequence[_T]) -> Sequence[_T]: ... -def __concat__(a: Sequence[_T], b: Sequence[_T]) -> Sequence[_T]: ... -def contains(__a: Container[Any], __b: Any) -> bool: ... -def __contains__(a: Container[Any], b: Any) -> bool: ... -def countOf(__a: Container[Any], __b: Any) -> int: ... -@overload -def delitem(__a: MutableSequence[Any], __b: int) -> None: ... -@overload -def delitem(__a: MutableSequence[Any], __b: slice) -> None: ... -@overload -def delitem(__a: MutableMapping[_K, Any], __b: _K) -> None: ... -@overload -def __delitem__(a: MutableSequence[Any], b: int) -> None: ... -@overload -def __delitem__(a: MutableSequence[Any], b: slice) -> None: ... -@overload -def __delitem__(a: MutableMapping[_K, Any], b: _K) -> None: ... -@overload -def getitem(__a: Sequence[_T], __b: int) -> _T: ... -@overload -def getitem(__a: Sequence[_T], __b: slice) -> Sequence[_T]: ... -@overload -def getitem(__a: Mapping[_K, _V], __b: _K) -> _V: ... -@overload -def __getitem__(a: Sequence[_T], b: int) -> _T: ... -@overload -def __getitem__(a: Sequence[_T], b: slice) -> Sequence[_T]: ... -@overload -def __getitem__(a: Mapping[_K, _V], b: _K) -> _V: ... -def indexOf(__a: Sequence[_T], __b: _T) -> int: ... -@overload -def setitem(__a: MutableSequence[_T], __b: int, __c: _T) -> None: ... -@overload -def setitem(__a: MutableSequence[_T], __b: slice, __c: Sequence[_T]) -> None: ... -@overload -def setitem(__a: MutableMapping[_K, _V], __b: _K, __c: _V) -> None: ... -@overload -def __setitem__(a: MutableSequence[_T], b: int, c: _T) -> None: ... -@overload -def __setitem__(a: MutableSequence[_T], b: slice, c: Sequence[_T]) -> None: ... -@overload -def __setitem__(a: MutableMapping[_K, _V], b: _K, c: _V) -> None: ... -def length_hint(__obj: Any, __default: int = ...) -> int: ... +if sys.version_info >= (3, 11): + __all__ = [ + "abs", + "add", + "and_", + "attrgetter", + "call", + "concat", + "contains", + "countOf", + "delitem", + "eq", + "floordiv", + "ge", + "getitem", + "gt", + "iadd", + "iand", + "iconcat", + "ifloordiv", + "ilshift", + "imatmul", + "imod", + "imul", + "index", + "indexOf", + "inv", + "invert", + "ior", + "ipow", + "irshift", + "is_", + "is_not", + "isub", + "itemgetter", + "itruediv", + "ixor", + "le", + "length_hint", + "lshift", + "lt", + "matmul", + "methodcaller", + "mod", + "mul", + "ne", + "neg", + "not_", + "or_", + "pos", + "pow", + "rshift", + "setitem", + "sub", + "truediv", + "truth", + "xor", + ] +else: + __all__ = [ + "abs", + "add", + "and_", + "attrgetter", + "concat", + "contains", + "countOf", + "delitem", + "eq", + "floordiv", + "ge", + "getitem", + "gt", + "iadd", + "iand", + "iconcat", + "ifloordiv", + "ilshift", + "imatmul", + "imod", + "imul", + "index", + "indexOf", + "inv", + "invert", + "ior", + "ipow", + "irshift", + "is_", + "is_not", + "isub", + "itemgetter", + "itruediv", + "ixor", + "le", + "length_hint", + "lshift", + "lt", + "matmul", + "methodcaller", + "mod", + "mul", + "ne", + "neg", + "not_", + "or_", + "pos", + "pow", + "rshift", + "setitem", + "sub", + "truediv", + "truth", + "xor", + ] -class attrgetter(Generic[_T_co]): - @overload - def __new__(cls, attr: str) -> attrgetter[Any]: ... - @overload - def __new__(cls, attr: str, __attr2: str) -> attrgetter[Tuple[Any, Any]]: ... - @overload - def __new__(cls, attr: str, __attr2: str, __attr3: str) -> attrgetter[Tuple[Any, Any, Any]]: ... - @overload - def __new__(cls, attr: str, __attr2: str, __attr3: str, __attr4: str) -> attrgetter[Tuple[Any, Any, Any, Any]]: ... - @overload - def __new__(cls, attr: str, *attrs: str) -> attrgetter[Tuple[Any, ...]]: ... - def __call__(self, obj: Any) -> _T_co: ... - -class itemgetter(Generic[_T_co]): - @overload - def __new__(cls, item: Any) -> itemgetter[Any]: ... - @overload - def __new__(cls, item: Any, __item2: Any) -> itemgetter[Tuple[Any, Any]]: ... - @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any) -> itemgetter[Tuple[Any, Any, Any]]: ... - @overload - def __new__(cls, item: Any, __item2: Any, __item3: Any, __item4: Any) -> itemgetter[Tuple[Any, Any, Any, Any]]: ... - @overload - def __new__(cls, item: Any, *items: Any) -> itemgetter[Tuple[Any, ...]]: ... - def __call__(self, obj: Any) -> _T_co: ... - -class methodcaller: - def __init__(self, __name: str, *args: Any, **kwargs: Any) -> None: ... - def __call__(self, obj: Any) -> Any: ... - -def iadd(__a: Any, __b: Any) -> Any: ... -def __iadd__(a: Any, b: Any) -> Any: ... -def iand(__a: Any, __b: Any) -> Any: ... -def __iand__(a: Any, b: Any) -> Any: ... -def iconcat(__a: Any, __b: Any) -> Any: ... -def __iconcat__(a: Any, b: Any) -> Any: ... -def ifloordiv(__a: Any, __b: Any) -> Any: ... -def __ifloordiv__(a: Any, b: Any) -> Any: ... -def ilshift(__a: Any, __b: Any) -> Any: ... -def __ilshift__(a: Any, b: Any) -> Any: ... -def imod(__a: Any, __b: Any) -> Any: ... -def __imod__(a: Any, b: Any) -> Any: ... -def imul(__a: Any, __b: Any) -> Any: ... -def __imul__(a: Any, b: Any) -> Any: ... -def imatmul(__a: Any, __b: Any) -> Any: ... -def __imatmul__(a: Any, b: Any) -> Any: ... -def ior(__a: Any, __b: Any) -> Any: ... -def __ior__(a: Any, b: Any) -> Any: ... -def ipow(__a: Any, __b: Any) -> Any: ... -def __ipow__(a: Any, b: Any) -> Any: ... -def irshift(__a: Any, __b: Any) -> Any: ... -def __irshift__(a: Any, b: Any) -> Any: ... -def isub(__a: Any, __b: Any) -> Any: ... -def __isub__(a: Any, b: Any) -> Any: ... -def itruediv(__a: Any, __b: Any) -> Any: ... -def __itruediv__(a: Any, b: Any) -> Any: ... -def ixor(__a: Any, __b: Any) -> Any: ... -def __ixor__(a: Any, b: Any) -> Any: ... +__lt__ = lt +__le__ = le +__eq__ = eq +__ne__ = ne +__ge__ = ge +__gt__ = gt +__not__ = not_ +__abs__ = abs +__add__ = add +__and__ = and_ +__floordiv__ = floordiv +__index__ = index +__inv__ = inv +__invert__ = invert +__lshift__ = lshift +__mod__ = mod +__mul__ = mul +__matmul__ = matmul +__neg__ = neg +__or__ = or_ +__pos__ = pos +__pow__ = pow +__rshift__ = rshift +__sub__ = sub +__truediv__ = truediv +__xor__ = xor +__concat__ = concat +__contains__ = contains +__delitem__ = delitem +__getitem__ = getitem +__setitem__ = setitem +__iadd__ = iadd +__iand__ = iand +__iconcat__ = iconcat +__ifloordiv__ = ifloordiv +__ilshift__ = ilshift +__imod__ = imod +__imul__ = imul +__imatmul__ = imatmul +__ior__ = ior +__ipow__ = ipow +__irshift__ = irshift +__isub__ = isub +__itruediv__ = itruediv +__ixor__ = ixor +if sys.version_info >= (3, 11): + __call__ = call diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi index 38c7746a9ff8..7aedf583e556 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/optparse.pyi @@ -1,11 +1,32 @@ -from typing import IO, Any, AnyStr, Callable, Iterable, Mapping, Sequence, Tuple, Type, overload - -NO_DEFAULT: Tuple[str, ...] +from abc import abstractmethod +from typing import IO, Any, AnyStr, Callable, Iterable, Mapping, Sequence, overload + +__all__ = [ + "Option", + "make_option", + "SUPPRESS_HELP", + "SUPPRESS_USAGE", + "Values", + "OptionContainer", + "OptionGroup", + "OptionParser", + "HelpFormatter", + "IndentedHelpFormatter", + "TitledHelpFormatter", + "OptParseError", + "OptionError", + "OptionConflictError", + "OptionValueError", + "BadOptionError", + "check_choice", +] + +NO_DEFAULT: tuple[str, ...] SUPPRESS_HELP: str SUPPRESS_USAGE: str def check_builtin(option: Option, opt: Any, value: str) -> Any: ... -def check_choice(option: Option, opt: Any, value: str) -> Any: ... +def check_choice(option: Option, opt: Any, value: str) -> str: ... class OptParseError(Exception): msg: str @@ -47,9 +68,11 @@ class HelpFormatter: def expand_default(self, option: Option) -> str: ... def format_description(self, description: str) -> str: ... def format_epilog(self, epilog: str) -> str: ... + @abstractmethod def format_heading(self, heading: Any) -> str: ... def format_option(self, option: Option) -> str: ... def format_option_strings(self, option: Option) -> str: ... + @abstractmethod def format_usage(self, usage: Any) -> str: ... def indent(self) -> None: ... def set_long_opt_delimiter(self, delim: str) -> None: ... @@ -72,14 +95,14 @@ class TitledHelpFormatter(HelpFormatter): def format_usage(self, usage: str) -> str: ... class Option: - ACTIONS: Tuple[str, ...] - ALWAYS_TYPED_ACTIONS: Tuple[str, ...] + ACTIONS: tuple[str, ...] + ALWAYS_TYPED_ACTIONS: tuple[str, ...] ATTRS: list[str] CHECK_METHODS: list[Callable[..., Any]] | None - CONST_ACTIONS: Tuple[str, ...] - STORE_ACTIONS: Tuple[str, ...] - TYPED_ACTIONS: Tuple[str, ...] - TYPES: Tuple[str, ...] + CONST_ACTIONS: tuple[str, ...] + STORE_ACTIONS: tuple[str, ...] + TYPED_ACTIONS: tuple[str, ...] + TYPES: tuple[str, ...] TYPE_CHECKER: dict[str, Callable[..., Any]] _long_opts: list[str] _short_opts: list[str] @@ -89,7 +112,7 @@ class Option: nargs: int type: Any callback: Callable[..., Any] | None - callback_args: Tuple[Any, ...] | None + callback_args: tuple[Any, ...] | None callback_kwargs: dict[str, Any] | None help: str | None metavar: str | None @@ -119,8 +142,8 @@ class OptionContainer: conflict_handler: str defaults: dict[str, Any] description: Any - option_class: Type[Option] - def __init__(self, option_class: Type[Option], conflict_handler: Any, description: Any) -> None: ... + option_class: type[Option] + def __init__(self, option_class: type[Option], conflict_handler: Any, description: Any) -> None: ... def _check_conflict(self, option: Any) -> None: ... def _create_option_mappings(self) -> None: ... def _share_option_mappings(self, parser: OptionParser) -> None: ... @@ -157,7 +180,8 @@ class Values: def read_file(self, filename: str, mode: str = ...) -> None: ... def read_module(self, modname: str, mode: str = ...) -> None: ... def __getattr__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __eq__(self, other: object) -> bool: ... class OptionParser(OptionContainer): allow_interspersed_args: bool @@ -177,7 +201,7 @@ class OptionParser(OptionContainer): self, usage: str | None = ..., option_list: Iterable[Option] | None = ..., - option_class: Type[Option] = ..., + option_class: type[Option] = ..., version: str | None = ..., conflict_handler: str = ..., description: str | None = ..., @@ -201,7 +225,7 @@ class OptionParser(OptionContainer): def add_option_group(self, __opt_group: OptionGroup) -> OptionGroup: ... @overload def add_option_group(self, *args: Any, **kwargs: Any) -> OptionGroup: ... - def check_values(self, values: Values, args: list[str]) -> Tuple[Values, list[str]]: ... + def check_values(self, values: Values, args: list[str]) -> tuple[Values, list[str]]: ... def disable_interspersed_args(self) -> None: ... def enable_interspersed_args(self) -> None: ... def error(self, msg: str) -> None: ... @@ -215,7 +239,7 @@ class OptionParser(OptionContainer): def get_prog_name(self) -> str: ... def get_usage(self) -> str: ... def get_version(self) -> str: ... - def parse_args(self, args: Sequence[AnyStr] | None = ..., values: Values | None = ...) -> Tuple[Values, list[AnyStr]]: ... + def parse_args(self, args: Sequence[AnyStr] | None = ..., values: Values | None = ...) -> tuple[Values, list[AnyStr]]: ... def print_usage(self, file: IO[str] | None = ...) -> None: ... def print_help(self, file: IO[str] | None = ...) -> None: ... def print_version(self, file: IO[str] | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi index 9af9aa34a6ca..d38148e7921f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/os/__init__.pyi @@ -1,5 +1,6 @@ import sys from _typeshed import ( + BytesPath, FileDescriptorLike, OpenBinaryMode, OpenBinaryModeReading, @@ -9,10 +10,11 @@ from _typeshed import ( Self, StrOrBytesPath, StrPath, + structseq, ) from builtins import OSError +from contextlib import AbstractContextManager from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper as _TextIOWrapper -from posix import listdir as listdir, times_result from subprocess import Popen from typing import ( IO, @@ -20,24 +22,20 @@ from typing import ( AnyStr, BinaryIO, Callable, - ContextManager, Generic, Iterable, Iterator, - List, Mapping, MutableMapping, NoReturn, Protocol, Sequence, - Set, - Tuple, TypeVar, Union, overload, runtime_checkable, ) -from typing_extensions import Literal +from typing_extensions import Literal, final from . import path as _path @@ -48,6 +46,8 @@ if sys.version_info >= (3, 9): path = _path _T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") _AnyStr_co = TypeVar("_AnyStr_co", str, bytes, covariant=True) # ----- os variables ----- @@ -56,10 +56,10 @@ error = OSError supports_bytes_environ: bool -supports_dir_fd: Set[Callable[..., Any]] -supports_fd: Set[Callable[..., Any]] -supports_effective_ids: Set[Callable[..., Any]] -supports_follow_symlinks: Set[Callable[..., Any]] +supports_dir_fd: set[Callable[..., Any]] +supports_fd: set[Callable[..., Any]] +supports_effective_ids: set[Callable[..., Any]] +supports_follow_symlinks: set[Callable[..., Any]] if sys.platform != "win32": # Unix only @@ -93,6 +93,9 @@ if sys.platform != "win32": P_PGID: int P_ALL: int + if sys.platform == "linux" and sys.version_info >= (3, 9): + P_PIDFD: int + WEXITED: int WSTOPPED: int WNOWAIT: int @@ -102,6 +105,12 @@ if sys.platform != "win32": CLD_TRAPPED: int CLD_CONTINUED: int + if sys.version_info >= (3, 9): + CLD_KILLED: int + CLD_STOPPED: int + + # TODO: SCHED_RESET_ON_FORK not available on darwin? + # TODO: SCHED_BATCH and SCHED_IDLE are linux only? SCHED_OTHER: int # some flavors of Unix SCHED_BATCH: int # some flavors of Unix SCHED_IDLE: int # some flavors of Unix @@ -120,6 +129,8 @@ if sys.platform != "win32": if sys.platform == "linux": RTLD_DEEPBIND: int + GRND_NONBLOCK: int + GRND_RANDOM: int SEEK_SET: int SEEK_CUR: int @@ -162,6 +173,24 @@ O_NOATIME: int # Gnu extension if in C library O_PATH: int # Gnu extension if in C library O_TMPFILE: int # Gnu extension if in C library O_LARGEFILE: int # Gnu extension if in C library +O_ACCMODE: int # TODO: when does this exist? + +if sys.platform != "win32" and sys.platform != "darwin": + # posix, but apparently missing on macos + ST_APPEND: int + ST_MANDLOCK: int + ST_NOATIME: int + ST_NODEV: int + ST_NODIRATIME: int + ST_NOEXEC: int + ST_RELATIME: int + ST_SYNCHRONOUS: int + ST_WRITE: int + +if sys.platform != "win32": + NGROUPS_MAX: int + ST_NOSUID: int + ST_RDONLY: int curdir: str pardir: str @@ -211,13 +240,24 @@ class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): putenv: Callable[[AnyStr, AnyStr], None], unsetenv: Callable[[AnyStr, AnyStr], None], ) -> None: ... - def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... # type: ignore + + def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... # type: ignore[override] def copy(self) -> dict[AnyStr, AnyStr]: ... def __delitem__(self, key: AnyStr) -> None: ... def __getitem__(self, key: AnyStr) -> AnyStr: ... def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... def __len__(self) -> int: ... + if sys.version_info >= (3, 9): + def __or__(self, value: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + def __ror__(self, value: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + # We use @overload instead of a Union for reasons similar to those given for + # overloading MutableMapping.update in stdlib/typing.pyi + # The type: ignore is needed due to incompatible __or__/__ior__ signatures + @overload # type: ignore[misc] + def __ior__(self: Self, value: Mapping[AnyStr, AnyStr]) -> Self: ... + @overload + def __ior__(self: Self, value: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... environ: _Environ[str] if sys.platform != "win32": @@ -262,60 +302,89 @@ if sys.platform != "win32": TMP_MAX: int # Undocumented, but used by tempfile # ----- os classes (structures) ----- -class stat_result: - # For backward compatibility, the return value of stat() is also - # accessible as a tuple of at least 10 integers giving the most important - # (and portable) members of the stat structure, in the order st_mode, - # st_ino, st_dev, st_nlink, st_uid, st_gid, st_size, st_atime, st_mtime, - # st_ctime. More items may be added at the end by some implementations. - - st_mode: int # protection bits, - st_ino: int # inode number, - st_dev: int # device, - st_nlink: int # number of hard links, - st_uid: int # user id of owner, - st_gid: int # group id of owner, - st_size: int # size of file, in bytes, - st_atime: float # time of most recent access, - st_mtime: float # time of most recent content modification, - st_ctime: float # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) - st_atime_ns: int # time of most recent access, in nanoseconds - st_mtime_ns: int # time of most recent content modification in nanoseconds - st_ctime_ns: int # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds - if sys.version_info >= (3, 8) and sys.platform == "win32": - st_reparse_tag: int +@final +class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): + # The constructor of this class takes an iterable of variable length (though it must be at least 10). + # + # However, this class behaves like a tuple of 10 elements, + # no matter how long the iterable supplied to the constructor is. + # https://github.com/python/typeshed/pull/6560#discussion_r767162532 + # + # The 10 elements always present are st_mode, st_ino, st_dev, st_nlink, + # st_uid, st_gid, st_size, st_atime, st_mtime, st_ctime. + # + # More items may be added at the end by some implementations. + @property + def st_mode(self) -> int: ... # protection bits, + @property + def st_ino(self) -> int: ... # inode number, + @property + def st_dev(self) -> int: ... # device, + @property + def st_nlink(self) -> int: ... # number of hard links, + @property + def st_uid(self) -> int: ... # user id of owner, + @property + def st_gid(self) -> int: ... # group id of owner, + @property + def st_size(self) -> int: ... # size of file, in bytes, + @property + def st_atime(self) -> float: ... # time of most recent access, + @property + def st_mtime(self) -> float: ... # time of most recent content modification, + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) + @property + def st_ctime(self) -> float: ... + @property + def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds + @property + def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds + @property + def st_ctime_ns(self) -> int: ... if sys.platform == "win32": - st_file_attributes: int - def __getitem__(self, i: int) -> int: ... - # not documented - def __init__(self, tuple: Tuple[int, ...]) -> None: ... - # On some Unix systems (such as Linux), the following attributes may also - # be available: - st_blocks: int # number of blocks allocated for file - st_blksize: int # filesystem blocksize - st_rdev: int # type of device if an inode device - st_flags: int # user defined flags for file - - # On other Unix systems (such as FreeBSD), the following attributes may be - # available (but may be only filled out if root tries to use them): - st_gen: int # file generation number - st_birthtime: int # time of file creation - - # On Mac OS systems, the following attributes may also be available: - st_rsize: int - st_creator: int - st_type: int + @property + def st_file_attributes(self) -> int: ... + if sys.version_info >= (3, 8): + @property + def st_reparse_tag(self) -> int: ... + else: + @property + def st_blocks(self) -> int: ... # number of blocks allocated for file + @property + def st_blksize(self) -> int: ... # filesystem blocksize + @property + def st_rdev(self) -> int: ... # type of device if an inode device + if sys.platform != "linux": + # These properties are available on MacOS, but not on Windows or Ubuntu. + # On other Unix systems (such as FreeBSD), the following attributes may be + # available (but may be only filled out if root tries to use them): + @property + def st_gen(self) -> int: ... # file generation number + @property + def st_birthtime(self) -> int: ... # time of file creation + if sys.platform == "darwin": + @property + def st_flags(self) -> int: ... # user defined flags for file + # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. + # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 @runtime_checkable class PathLike(Protocol[_AnyStr_co]): def __fspath__(self) -> _AnyStr_co: ... - if sys.version_info >= (3, 9): - def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@overload +def listdir(path: StrPath | None = ...) -> list[str]: ... +@overload +def listdir(path: BytesPath) -> list[bytes]: ... +@overload +def listdir(path: int) -> list[str]: ... _FdOrAnyPath = Union[int, StrOrBytesPath] +@final class DirEntry(Generic[AnyStr]): - # This is what the scandir interator yields + # This is what the scandir iterator yields # The constructor is hidden name: AnyStr @@ -329,44 +398,36 @@ class DirEntry(Generic[AnyStr]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -if sys.platform != "win32": - _Tuple10Int = Tuple[int, int, int, int, int, int, int, int, int, int] - _Tuple11Int = Tuple[int, int, int, int, int, int, int, int, int, int, int] +if sys.version_info >= (3, 7): + _StatVfsTuple = tuple[int, int, int, int, int, int, int, int, int, int, int] +else: + _StatVfsTuple = tuple[int, int, int, int, int, int, int, int, int, int] + +@final +class statvfs_result(structseq[int], _StatVfsTuple): + @property + def f_bsize(self) -> int: ... + @property + def f_frsize(self) -> int: ... + @property + def f_blocks(self) -> int: ... + @property + def f_bfree(self) -> int: ... + @property + def f_bavail(self) -> int: ... + @property + def f_files(self) -> int: ... + @property + def f_ffree(self) -> int: ... + @property + def f_favail(self) -> int: ... + @property + def f_flag(self) -> int: ... + @property + def f_namemax(self) -> int: ... if sys.version_info >= (3, 7): - # f_fsid was added in https://github.com/python/cpython/pull/4571 - class statvfs_result(_Tuple10Int): # Unix only - def __new__(cls, seq: _Tuple10Int | _Tuple11Int, dict: dict[str, int] = ...) -> statvfs_result: ... - n_fields: int - n_sequence_fields: int - n_unnamed_fields: int - - f_bsize: int - f_frsize: int - f_blocks: int - f_bfree: int - f_bavail: int - f_files: int - f_ffree: int - f_favail: int - f_flag: int - f_namemax: int - f_fsid: int - else: - class statvfs_result(_Tuple10Int): # Unix only - n_fields: int - n_sequence_fields: int - n_unnamed_fields: int - - f_bsize: int - f_frsize: int - f_blocks: int - f_bfree: int - f_bavail: int - f_files: int - f_ffree: int - f_favail: int - f_flag: int - f_namemax: int + @property + def f_fsid(self) -> int: ... # ----- os function stubs ----- def fsencode(filename: StrOrBytesPath) -> bytes: ... @@ -383,9 +444,20 @@ def getpid() -> int: ... def getppid() -> int: ... def strerror(__code: int) -> str: ... def umask(__mask: int) -> int: ... +@final +class uname_result(structseq[str], tuple[str, str, str, str, str]): + @property + def sysname(self) -> str: ... + @property + def nodename(self) -> str: ... + @property + def release(self) -> str: ... + @property + def version(self) -> str: ... + @property + def machine(self) -> str: ... if sys.platform != "win32": - # Unix only def ctermid() -> str: ... def getegid() -> int: ... def geteuid() -> int: ... @@ -398,8 +470,9 @@ if sys.platform != "win32": def getpriority(which: int, who: int) -> int: ... def setpriority(which: int, who: int, priority: int) -> None: ... if sys.platform != "darwin": - def getresuid() -> Tuple[int, int, int]: ... - def getresgid() -> Tuple[int, int, int]: ... + def getresuid() -> tuple[int, int, int]: ... + def getresgid() -> tuple[int, int, int]: ... + def getuid() -> int: ... def setegid(__egid: int) -> None: ... def seteuid(__euid: int) -> None: ... @@ -411,11 +484,11 @@ if sys.platform != "win32": if sys.platform != "darwin": def setresgid(rgid: int, egid: int, sgid: int) -> None: ... def setresuid(ruid: int, euid: int, suid: int) -> None: ... + def setreuid(__ruid: int, __euid: int) -> None: ... def getsid(__pid: int) -> int: ... def setsid() -> None: ... def setuid(__uid: int) -> None: ... - from posix import uname_result def uname() -> uname_result: ... @overload @@ -431,7 +504,7 @@ if sys.platform != "win32": def putenv(__name: bytes | str, __value: bytes | str) -> None: ... -if sys.platform != "win32": +if sys.platform != "win32" or sys.version_info >= (3, 9): def unsetenv(__name: bytes | str) -> None: ... _Opener = Callable[[str, int], int] @@ -525,32 +598,42 @@ else: def dup2(fd: int, fd2: int, inheritable: bool = ...) -> None: ... def fstat(fd: int) -> stat_result: ... +def ftruncate(__fd: int, __length: int) -> None: ... def fsync(fd: FileDescriptorLike) -> None: ... +def isatty(__fd: int) -> bool: ... def lseek(__fd: int, __position: int, __how: int) -> int: ... def open(path: StrOrBytesPath, flags: int, mode: int = ..., *, dir_fd: int | None = ...) -> int: ... -def pipe() -> Tuple[int, int]: ... +def pipe() -> tuple[int, int]: ... def read(__fd: int, __length: int) -> bytes: ... if sys.platform != "win32": # Unix only def fchmod(fd: int, mode: int) -> None: ... def fchown(fd: int, uid: int, gid: int) -> None: ... - if sys.platform != "darwin": - def fdatasync(fd: FileDescriptorLike) -> None: ... # Unix only, not Mac def fpathconf(__fd: int, __name: str | int) -> int: ... def fstatvfs(__fd: int) -> statvfs_result: ... - def ftruncate(__fd: int, __length: int) -> None: ... def get_blocking(__fd: int) -> bool: ... def set_blocking(__fd: int, __blocking: bool) -> None: ... - def isatty(__fd: int) -> bool: ... def lockf(__fd: int, __command: int, __length: int) -> None: ... - def openpty() -> Tuple[int, int]: ... # some flavors of Unix + def openpty() -> tuple[int, int]: ... # some flavors of Unix if sys.platform != "darwin": - def pipe2(flags: int) -> Tuple[int, int]: ... # some flavors of Unix + def fdatasync(fd: FileDescriptorLike) -> None: ... + def pipe2(__flags: int) -> tuple[int, int]: ... # some flavors of Unix def posix_fallocate(fd: int, offset: int, length: int) -> None: ... def posix_fadvise(fd: int, offset: int, length: int, advice: int) -> None: ... + def pread(__fd: int, __length: int, __offset: int) -> bytes: ... def pwrite(__fd: int, __buffer: bytes, __offset: int) -> int: ... + if sys.platform != "darwin": + if sys.version_info >= (3, 10): + RWF_APPEND: int # docs say available on 3.7+, stubtest says otherwise + if sys.version_info >= (3, 7): + def preadv(__fd: int, __buffers: Iterable[bytes], __offset: int, __flags: int = ...) -> int: ... + def pwritev(__fd: int, __buffers: Iterable[bytes], __offset: int, __flags: int = ...) -> int: ... + RWF_DSYNC: int + RWF_SYNC: int + RWF_HIPRI: int + RWF_NOWAIT: int @overload def sendfile(out_fd: int, in_fd: int, offset: int | None, count: int) -> int: ... @overload @@ -566,9 +649,12 @@ if sys.platform != "win32": def readv(__fd: int, __buffers: Sequence[bytearray]) -> int: ... def writev(__fd: int, __buffers: Sequence[bytes]) -> int: ... -class terminal_size(Tuple[int, int]): - columns: int - lines: int +@final +class terminal_size(structseq[int], tuple[int, int]): + @property + def columns(self) -> int: ... + @property + def lines(self) -> int: ... def get_terminal_size(fd: int = ...) -> terminal_size: ... def get_inheritable(__fd: int) -> bool: ... @@ -593,17 +679,14 @@ def getcwd() -> str: ... def getcwdb() -> bytes: ... def chmod(path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... -if sys.platform != "win32": +if sys.platform != "win32" and sys.platform != "linux": def chflags(path: StrOrBytesPath, flags: int, follow_symlinks: bool = ...) -> None: ... # some flavors of Unix - def chown( - path: _FdOrAnyPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ... - ) -> None: ... # Unix only + def lchflags(path: StrOrBytesPath, flags: int) -> None: ... + def lchmod(path: StrOrBytesPath, mode: int) -> None: ... if sys.platform != "win32": - # Unix only def chroot(path: StrOrBytesPath) -> None: ... - def lchflags(path: StrOrBytesPath, flags: int) -> None: ... - def lchmod(path: StrOrBytesPath, mode: int) -> None: ... + def chown(path: _FdOrAnyPath, uid: int, gid: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> None: ... def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... def link( @@ -637,7 +720,7 @@ def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... def replace(src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ...) -> None: ... def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... -class _ScandirIterator(Iterator[DirEntry[AnyStr]], ContextManager[_ScandirIterator[AnyStr]]): +class _ScandirIterator(Iterator[DirEntry[AnyStr]], AbstractContextManager[_ScandirIterator[AnyStr]]): def __next__(self) -> DirEntry[AnyStr]: ... def close(self) -> None: ... @@ -675,9 +758,9 @@ def truncate(path: _FdOrAnyPath, length: int) -> None: ... # Unix only up to ve def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... def utime( path: _FdOrAnyPath, - times: Tuple[int, int] | Tuple[float, float] | None = ..., + times: tuple[int, int] | tuple[float, float] | None = ..., *, - ns: Tuple[int, int] = ..., + ns: tuple[int, int] = ..., dir_fd: int | None = ..., follow_symlinks: bool = ..., ) -> None: ... @@ -686,7 +769,7 @@ _OnError = Callable[[OSError], Any] def walk( top: AnyStr | PathLike[AnyStr], topdown: bool = ..., onerror: _OnError | None = ..., followlinks: bool = ... -) -> Iterator[Tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... if sys.platform != "win32": if sys.version_info >= (3, 7): @@ -698,7 +781,7 @@ if sys.platform != "win32": *, follow_symlinks: bool = ..., dir_fd: int | None = ..., - ) -> Iterator[Tuple[str, list[str], list[str], int]]: ... + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... @overload def fwalk( top: bytes, @@ -707,7 +790,7 @@ if sys.platform != "win32": *, follow_symlinks: bool = ..., dir_fd: int | None = ..., - ) -> Iterator[Tuple[bytes, list[bytes], list[bytes], int]]: ... + ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... else: def fwalk( top: StrPath = ..., @@ -716,7 +799,7 @@ if sys.platform != "win32": *, follow_symlinks: bool = ..., dir_fd: int | None = ..., - ) -> Iterator[Tuple[str, list[str], list[str], int]]: ... + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... if sys.platform == "linux": def getxattr(path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ...) -> bytes: ... def listxattr(path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ...) -> list[str]: ... @@ -741,14 +824,14 @@ def execlpe(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: Any) -> NoRetur # in practice, and doing so would explode the number of combinations in this already long union. # All these combinations are necessary due to list being invariant. _ExecVArgs = Union[ - Tuple[StrOrBytesPath, ...], - List[bytes], - List[str], - List[PathLike[Any]], - List[Union[bytes, str]], - List[Union[bytes, PathLike[Any]]], - List[Union[str, PathLike[Any]]], - List[Union[bytes, str, PathLike[Any]]], + tuple[StrOrBytesPath, ...], + list[bytes], + list[str], + list[PathLike[Any]], + list[Union[bytes, str]], + list[Union[bytes, PathLike[Any]]], + list[Union[str, PathLike[Any]]], + list[Union[bytes, str, PathLike[Any]]], ] _ExecEnv = Union[Mapping[bytes, Union[bytes, str]], Mapping[str, Union[bytes, str]]] @@ -762,7 +845,7 @@ def kill(__pid: int, __signal: int) -> None: ... if sys.platform != "win32": # Unix only def fork() -> int: ... - def forkpty() -> Tuple[int, int]: ... # some flavors of Unix + def forkpty() -> tuple[int, int]: ... # some flavors of Unix def killpg(__pgid: int, __signal: int) -> None: ... def nice(__increment: int) -> int: ... if sys.platform != "darwin": @@ -770,7 +853,7 @@ if sys.platform != "win32": class _wrap_close(_TextIOWrapper): def __init__(self, stream: _TextIOWrapper, proc: Popen[str]) -> None: ... - def close(self) -> int | None: ... # type: ignore + def close(self) -> int | None: ... # type: ignore[override] def popen(cmd: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... def spawnl(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... @@ -785,24 +868,49 @@ else: def spawnve(__mode: int, __path: StrOrBytesPath, __argv: _ExecVArgs, __env: _ExecEnv) -> int: ... def system(command: StrOrBytesPath) -> int: ... +@final +class times_result(structseq[float], tuple[float, float, float, float, float]): + @property + def user(self) -> float: ... + @property + def system(self) -> float: ... + @property + def children_user(self) -> float: ... + @property + def children_system(self) -> float: ... + @property + def elapsed(self) -> float: ... + def times() -> times_result: ... -def waitpid(__pid: int, __options: int) -> Tuple[int, int]: ... +def waitpid(__pid: int, __options: int) -> tuple[int, int]: ... if sys.platform == "win32": def startfile(path: StrOrBytesPath, operation: str | None = ...) -> None: ... else: - # Unix only def spawnlp(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath) -> int: ... def spawnlpe(mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any) -> int: ... # Imprecise signature def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... - def wait() -> Tuple[int, int]: ... # Unix only + def wait() -> tuple[int, int]: ... # Unix only if sys.platform != "darwin": - from posix import waitid_result + @final + class waitid_result(structseq[int], tuple[int, int, int, int, int]): + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_signo(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_code(self) -> int: ... + def waitid(idtype: int, ident: int, options: int) -> waitid_result: ... - def wait3(options: int) -> Tuple[int, int, Any]: ... - def wait4(pid: int, options: int) -> Tuple[int, int, Any]: ... + + def wait3(options: int) -> tuple[int, int, Any]: ... + def wait4(pid: int, options: int) -> tuple[int, int, Any]: ... def WCOREDUMP(__status: int) -> bool: ... def WIFCONTINUED(status: int) -> bool: ... def WIFSTOPPED(status: int) -> bool: ... @@ -812,10 +920,43 @@ else: def WSTOPSIG(status: int) -> int: ... def WTERMSIG(status: int) -> int: ... if sys.version_info >= (3, 8): - from posix import posix_spawn as posix_spawn, posix_spawnp as posix_spawnp + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + POSIX_SPAWN_OPEN: int + POSIX_SPAWN_CLOSE: int + POSIX_SPAWN_DUP2: int if sys.platform != "win32": - from posix import sched_param + @final + class sched_param(structseq[int], tuple[int]): + def __new__(cls: type[Self], sched_priority: int) -> Self: ... + @property + def sched_priority(self) -> int: ... + def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix def sched_yield() -> None: ... # some flavors of Unix @@ -826,14 +967,14 @@ if sys.platform != "win32": def sched_setparam(pid: int, param: sched_param) -> None: ... # some flavors of Unix def sched_getparam(pid: int) -> sched_param: ... # some flavors of Unix def sched_setaffinity(pid: int, mask: Iterable[int]) -> None: ... # some flavors of Unix - def sched_getaffinity(pid: int) -> Set[int]: ... # some flavors of Unix + def sched_getaffinity(pid: int) -> set[int]: ... # some flavors of Unix def cpu_count() -> int | None: ... if sys.platform != "win32": # Unix only def confstr(__name: str | int) -> str | None: ... - def getloadavg() -> Tuple[float, float, float]: ... + def getloadavg() -> tuple[float, float, float]: ... def sysconf(__name: str | int) -> int: ... if sys.platform == "linux": @@ -857,6 +998,7 @@ if sys.version_info >= (3, 8): def close(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... + def add_dll_directory(path: str) -> _AddedDllDirectory: ... if sys.platform == "linux": MFD_CLOEXEC: int @@ -877,3 +1019,6 @@ if sys.version_info >= (3, 8): MFD_HUGE_2GB: int MFD_HUGE_16GB: int def memfd_create(name: str, flags: int = ...) -> int: ... + +if sys.version_info >= (3, 9): + def waitstatus_to_exitcode(status: int) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/os/path.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/os/path.pyi index 4533738983f7..dc688a9f877f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/os/path.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/os/path.pyi @@ -2,5 +2,7 @@ import sys if sys.platform == "win32": from ntpath import * + from ntpath import __all__ as __all__ else: from posixpath import * + from posixpath import __all__ as __all__ diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ossaudiodev.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ossaudiodev.pyi index f221c95b8036..d956a89729fd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ossaudiodev.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ossaudiodev.pyi @@ -1,131 +1,132 @@ +import sys from typing import Any, overload from typing_extensions import Literal -AFMT_AC3: int -AFMT_A_LAW: int -AFMT_IMA_ADPCM: int -AFMT_MPEG: int -AFMT_MU_LAW: int -AFMT_QUERY: int -AFMT_S16_BE: int -AFMT_S16_LE: int -AFMT_S16_NE: int -AFMT_S8: int -AFMT_U16_BE: int -AFMT_U16_LE: int -AFMT_U8: int -SNDCTL_COPR_HALT: int -SNDCTL_COPR_LOAD: int -SNDCTL_COPR_RCODE: int -SNDCTL_COPR_RCVMSG: int -SNDCTL_COPR_RDATA: int -SNDCTL_COPR_RESET: int -SNDCTL_COPR_RUN: int -SNDCTL_COPR_SENDMSG: int -SNDCTL_COPR_WCODE: int -SNDCTL_COPR_WDATA: int -SNDCTL_DSP_BIND_CHANNEL: int -SNDCTL_DSP_CHANNELS: int -SNDCTL_DSP_GETBLKSIZE: int -SNDCTL_DSP_GETCAPS: int -SNDCTL_DSP_GETCHANNELMASK: int -SNDCTL_DSP_GETFMTS: int -SNDCTL_DSP_GETIPTR: int -SNDCTL_DSP_GETISPACE: int -SNDCTL_DSP_GETODELAY: int -SNDCTL_DSP_GETOPTR: int -SNDCTL_DSP_GETOSPACE: int -SNDCTL_DSP_GETSPDIF: int -SNDCTL_DSP_GETTRIGGER: int -SNDCTL_DSP_MAPINBUF: int -SNDCTL_DSP_MAPOUTBUF: int -SNDCTL_DSP_NONBLOCK: int -SNDCTL_DSP_POST: int -SNDCTL_DSP_PROFILE: int -SNDCTL_DSP_RESET: int -SNDCTL_DSP_SAMPLESIZE: int -SNDCTL_DSP_SETDUPLEX: int -SNDCTL_DSP_SETFMT: int -SNDCTL_DSP_SETFRAGMENT: int -SNDCTL_DSP_SETSPDIF: int -SNDCTL_DSP_SETSYNCRO: int -SNDCTL_DSP_SETTRIGGER: int -SNDCTL_DSP_SPEED: int -SNDCTL_DSP_STEREO: int -SNDCTL_DSP_SUBDIVIDE: int -SNDCTL_DSP_SYNC: int -SNDCTL_FM_4OP_ENABLE: int -SNDCTL_FM_LOAD_INSTR: int -SNDCTL_MIDI_INFO: int -SNDCTL_MIDI_MPUCMD: int -SNDCTL_MIDI_MPUMODE: int -SNDCTL_MIDI_PRETIME: int -SNDCTL_SEQ_CTRLRATE: int -SNDCTL_SEQ_GETINCOUNT: int -SNDCTL_SEQ_GETOUTCOUNT: int -SNDCTL_SEQ_GETTIME: int -SNDCTL_SEQ_NRMIDIS: int -SNDCTL_SEQ_NRSYNTHS: int -SNDCTL_SEQ_OUTOFBAND: int -SNDCTL_SEQ_PANIC: int -SNDCTL_SEQ_PERCMODE: int -SNDCTL_SEQ_RESET: int -SNDCTL_SEQ_RESETSAMPLES: int -SNDCTL_SEQ_SYNC: int -SNDCTL_SEQ_TESTMIDI: int -SNDCTL_SEQ_THRESHOLD: int -SNDCTL_SYNTH_CONTROL: int -SNDCTL_SYNTH_ID: int -SNDCTL_SYNTH_INFO: int -SNDCTL_SYNTH_MEMAVL: int -SNDCTL_SYNTH_REMOVESAMPLE: int -SNDCTL_TMR_CONTINUE: int -SNDCTL_TMR_METRONOME: int -SNDCTL_TMR_SELECT: int -SNDCTL_TMR_SOURCE: int -SNDCTL_TMR_START: int -SNDCTL_TMR_STOP: int -SNDCTL_TMR_TEMPO: int -SNDCTL_TMR_TIMEBASE: int -SOUND_MIXER_ALTPCM: int -SOUND_MIXER_BASS: int -SOUND_MIXER_CD: int -SOUND_MIXER_DIGITAL1: int -SOUND_MIXER_DIGITAL2: int -SOUND_MIXER_DIGITAL3: int -SOUND_MIXER_IGAIN: int -SOUND_MIXER_IMIX: int -SOUND_MIXER_LINE: int -SOUND_MIXER_LINE1: int -SOUND_MIXER_LINE2: int -SOUND_MIXER_LINE3: int -SOUND_MIXER_MIC: int -SOUND_MIXER_MONITOR: int -SOUND_MIXER_NRDEVICES: int -SOUND_MIXER_OGAIN: int -SOUND_MIXER_PCM: int -SOUND_MIXER_PHONEIN: int -SOUND_MIXER_PHONEOUT: int -SOUND_MIXER_RADIO: int -SOUND_MIXER_RECLEV: int -SOUND_MIXER_SPEAKER: int -SOUND_MIXER_SYNTH: int -SOUND_MIXER_TREBLE: int -SOUND_MIXER_VIDEO: int -SOUND_MIXER_VOLUME: int +if sys.platform != "win32" and sys.platform != "darwin": + AFMT_AC3: int + AFMT_A_LAW: int + AFMT_IMA_ADPCM: int + AFMT_MPEG: int + AFMT_MU_LAW: int + AFMT_QUERY: int + AFMT_S16_BE: int + AFMT_S16_LE: int + AFMT_S16_NE: int + AFMT_S8: int + AFMT_U16_BE: int + AFMT_U16_LE: int + AFMT_U8: int + SNDCTL_COPR_HALT: int + SNDCTL_COPR_LOAD: int + SNDCTL_COPR_RCODE: int + SNDCTL_COPR_RCVMSG: int + SNDCTL_COPR_RDATA: int + SNDCTL_COPR_RESET: int + SNDCTL_COPR_RUN: int + SNDCTL_COPR_SENDMSG: int + SNDCTL_COPR_WCODE: int + SNDCTL_COPR_WDATA: int + SNDCTL_DSP_BIND_CHANNEL: int + SNDCTL_DSP_CHANNELS: int + SNDCTL_DSP_GETBLKSIZE: int + SNDCTL_DSP_GETCAPS: int + SNDCTL_DSP_GETCHANNELMASK: int + SNDCTL_DSP_GETFMTS: int + SNDCTL_DSP_GETIPTR: int + SNDCTL_DSP_GETISPACE: int + SNDCTL_DSP_GETODELAY: int + SNDCTL_DSP_GETOPTR: int + SNDCTL_DSP_GETOSPACE: int + SNDCTL_DSP_GETSPDIF: int + SNDCTL_DSP_GETTRIGGER: int + SNDCTL_DSP_MAPINBUF: int + SNDCTL_DSP_MAPOUTBUF: int + SNDCTL_DSP_NONBLOCK: int + SNDCTL_DSP_POST: int + SNDCTL_DSP_PROFILE: int + SNDCTL_DSP_RESET: int + SNDCTL_DSP_SAMPLESIZE: int + SNDCTL_DSP_SETDUPLEX: int + SNDCTL_DSP_SETFMT: int + SNDCTL_DSP_SETFRAGMENT: int + SNDCTL_DSP_SETSPDIF: int + SNDCTL_DSP_SETSYNCRO: int + SNDCTL_DSP_SETTRIGGER: int + SNDCTL_DSP_SPEED: int + SNDCTL_DSP_STEREO: int + SNDCTL_DSP_SUBDIVIDE: int + SNDCTL_DSP_SYNC: int + SNDCTL_FM_4OP_ENABLE: int + SNDCTL_FM_LOAD_INSTR: int + SNDCTL_MIDI_INFO: int + SNDCTL_MIDI_MPUCMD: int + SNDCTL_MIDI_MPUMODE: int + SNDCTL_MIDI_PRETIME: int + SNDCTL_SEQ_CTRLRATE: int + SNDCTL_SEQ_GETINCOUNT: int + SNDCTL_SEQ_GETOUTCOUNT: int + SNDCTL_SEQ_GETTIME: int + SNDCTL_SEQ_NRMIDIS: int + SNDCTL_SEQ_NRSYNTHS: int + SNDCTL_SEQ_OUTOFBAND: int + SNDCTL_SEQ_PANIC: int + SNDCTL_SEQ_PERCMODE: int + SNDCTL_SEQ_RESET: int + SNDCTL_SEQ_RESETSAMPLES: int + SNDCTL_SEQ_SYNC: int + SNDCTL_SEQ_TESTMIDI: int + SNDCTL_SEQ_THRESHOLD: int + SNDCTL_SYNTH_CONTROL: int + SNDCTL_SYNTH_ID: int + SNDCTL_SYNTH_INFO: int + SNDCTL_SYNTH_MEMAVL: int + SNDCTL_SYNTH_REMOVESAMPLE: int + SNDCTL_TMR_CONTINUE: int + SNDCTL_TMR_METRONOME: int + SNDCTL_TMR_SELECT: int + SNDCTL_TMR_SOURCE: int + SNDCTL_TMR_START: int + SNDCTL_TMR_STOP: int + SNDCTL_TMR_TEMPO: int + SNDCTL_TMR_TIMEBASE: int + SOUND_MIXER_ALTPCM: int + SOUND_MIXER_BASS: int + SOUND_MIXER_CD: int + SOUND_MIXER_DIGITAL1: int + SOUND_MIXER_DIGITAL2: int + SOUND_MIXER_DIGITAL3: int + SOUND_MIXER_IGAIN: int + SOUND_MIXER_IMIX: int + SOUND_MIXER_LINE: int + SOUND_MIXER_LINE1: int + SOUND_MIXER_LINE2: int + SOUND_MIXER_LINE3: int + SOUND_MIXER_MIC: int + SOUND_MIXER_MONITOR: int + SOUND_MIXER_NRDEVICES: int + SOUND_MIXER_OGAIN: int + SOUND_MIXER_PCM: int + SOUND_MIXER_PHONEIN: int + SOUND_MIXER_PHONEOUT: int + SOUND_MIXER_RADIO: int + SOUND_MIXER_RECLEV: int + SOUND_MIXER_SPEAKER: int + SOUND_MIXER_SYNTH: int + SOUND_MIXER_TREBLE: int + SOUND_MIXER_VIDEO: int + SOUND_MIXER_VOLUME: int -control_labels: list[str] -control_names: list[str] + control_labels: list[str] + control_names: list[str] -# TODO: oss_audio_device return type -@overload -def open(mode: Literal["r", "w", "rw"]) -> Any: ... -@overload -def open(device: str, mode: Literal["r", "w", "rw"]) -> Any: ... + # TODO: oss_audio_device return type + @overload + def open(mode: Literal["r", "w", "rw"]) -> Any: ... + @overload + def open(device: str, mode: Literal["r", "w", "rw"]) -> Any: ... -# TODO: oss_mixer_device return type -def openmixer(device: str = ...) -> Any: ... + # TODO: oss_mixer_device return type + def openmixer(device: str = ...) -> Any: ... -class OSSAudioError(Exception): ... - -error = OSSAudioError + class OSSAudioError(Exception): ... + error = OSSAudioError diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi index cefcad5b08f1..95e770b57256 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/parser.pyi @@ -1,22 +1,24 @@ from _typeshed import StrOrBytesPath from types import CodeType -from typing import Any, Sequence, Tuple +from typing import Any, Sequence +from typing_extensions import final def expr(source: str) -> STType: ... def suite(source: str) -> STType: ... def sequence2st(sequence: Sequence[Any]) -> STType: ... def tuple2st(sequence: Sequence[Any]) -> STType: ... def st2list(st: STType, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... -def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> Tuple[Any]: ... +def st2tuple(st: STType, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... def compilest(st: STType, filename: StrOrBytesPath = ...) -> CodeType: ... def isexpr(st: STType) -> bool: ... def issuite(st: STType) -> bool: ... class ParserError(Exception): ... +@final class STType: def compile(self, filename: StrOrBytesPath = ...) -> CodeType: ... def isexpr(self) -> bool: ... def issuite(self) -> bool: ... def tolist(self, line_info: bool = ..., col_info: bool = ...) -> list[Any]: ... - def totuple(self, line_info: bool = ..., col_info: bool = ...) -> Tuple[Any]: ... + def totuple(self, line_info: bool = ..., col_info: bool = ...) -> tuple[Any, ...]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pathlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pathlib.pyi index db1dff36bd8c..891f8fc8bdc9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pathlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pathlib.pyi @@ -11,16 +11,16 @@ from _typeshed import ( from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOWrapper from os import PathLike, stat_result from types import TracebackType -from typing import IO, Any, BinaryIO, Generator, Sequence, Tuple, Type, TypeVar, overload +from typing import IO, Any, BinaryIO, Generator, Sequence, overload from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias -_P = TypeVar("_P", bound=PurePath) +__all__ = ["PurePath", "PurePosixPath", "PureWindowsPath", "Path", "PosixPath", "WindowsPath"] class PurePath(PathLike[str]): - parts: Tuple[str, ...] + parts: tuple[str, ...] drive: str root: str anchor: str @@ -28,14 +28,15 @@ class PurePath(PathLike[str]): suffix: str suffixes: list[str] stem: str - def __new__(cls: Type[_P], *args: StrPath) -> _P: ... + def __new__(cls: type[Self], *args: StrPath) -> Self: ... def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... def __lt__(self, other: PurePath) -> bool: ... def __le__(self, other: PurePath) -> bool: ... def __gt__(self, other: PurePath) -> bool: ... def __ge__(self, other: PurePath) -> bool: ... - def __truediv__(self: _P, key: StrPath) -> _P: ... - def __rtruediv__(self: _P, key: StrPath) -> _P: ... + def __truediv__(self: Self, key: StrPath) -> Self: ... + def __rtruediv__(self: Self, key: StrPath) -> Self: ... def __bytes__(self) -> bytes: ... def as_posix(self) -> str: ... def as_uri(self) -> str: ... @@ -43,51 +44,56 @@ class PurePath(PathLike[str]): def is_reserved(self) -> bool: ... if sys.version_info >= (3, 9): def is_relative_to(self, *other: StrPath) -> bool: ... + def match(self, path_pattern: str) -> bool: ... - def relative_to(self: _P, *other: StrPath) -> _P: ... - def with_name(self: _P, name: str) -> _P: ... + def relative_to(self: Self, *other: StrPath) -> Self: ... + def with_name(self: Self, name: str) -> Self: ... if sys.version_info >= (3, 9): - def with_stem(self: _P, stem: str) -> _P: ... - def with_suffix(self: _P, suffix: str) -> _P: ... - def joinpath(self: _P, *other: StrPath) -> _P: ... + def with_stem(self: Self, stem: str) -> Self: ... + + def with_suffix(self: Self, suffix: str) -> Self: ... + def joinpath(self: Self, *other: StrPath) -> Self: ... @property - def parents(self: _P) -> Sequence[_P]: ... + def parents(self: Self) -> Sequence[Self]: ... @property - def parent(self: _P) -> _P: ... - if sys.version_info >= (3, 9): + def parent(self: Self) -> Self: ... + if sys.version_info >= (3, 9) and sys.version_info < (3, 11): def __class_getitem__(cls, type: Any) -> GenericAlias: ... class PurePosixPath(PurePath): ... class PureWindowsPath(PurePath): ... class Path(PurePath): - def __new__(cls: Type[_P], *args: StrPath, **kwargs: Any) -> _P: ... + def __new__(cls: type[Self], *args: StrPath, **kwargs: Any) -> Self: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> bool | None: ... + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + ) -> None: ... @classmethod - def cwd(cls: Type[_P]) -> _P: ... - def stat(self) -> stat_result: ... - def chmod(self, mode: int) -> None: ... + def cwd(cls: type[Self]) -> Self: ... + if sys.version_info >= (3, 10): + def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... + def chmod(self, mode: int, *, follow_symlinks: bool = ...) -> None: ... + else: + def stat(self) -> stat_result: ... + def chmod(self, mode: int) -> None: ... + def exists(self) -> bool: ... - def glob(self: _P, pattern: str) -> Generator[_P, None, None]: ... - def group(self) -> str: ... + def glob(self: Self, pattern: str) -> Generator[Self, None, None]: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... - if sys.version_info >= (3, 7): - def is_mount(self) -> bool: ... def is_symlink(self) -> bool: ... def is_socket(self) -> bool: ... def is_fifo(self) -> bool: ... def is_block_device(self) -> bool: ... def is_char_device(self) -> bool: ... - def iterdir(self: _P) -> Generator[_P, None, None]: ... + def iterdir(self: Self) -> Generator[Self, None, None]: ... def lchmod(self, mode: int) -> None: ... def lstat(self) -> stat_result: ... def mkdir(self, mode: int = ..., parents: bool = ..., exist_ok: bool = ...) -> None: ... # Adapted from builtins.open # Text mode: always returns a TextIOWrapper + # The Traversable .open in stdlib/importlib/abc.pyi should be kept in sync with this. @overload def open( self, @@ -140,33 +146,50 @@ class Path(PurePath): def open( self, mode: str, buffering: int = ..., encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... ) -> IO[Any]: ... - def owner(self) -> str: ... + if sys.platform != "win32": + # These methods do "exist" on Windows, but they always raise NotImplementedError, + # so it's safer to pretend they don't exist + def owner(self) -> str: ... + def group(self) -> str: ... + if sys.version_info >= (3, 7): + def is_mount(self) -> bool: ... + if sys.version_info >= (3, 9): - def readlink(self: _P) -> _P: ... + def readlink(self: Self) -> Self: ... if sys.version_info >= (3, 8): - def rename(self: _P, target: str | PurePath) -> _P: ... - def replace(self: _P, target: str | PurePath) -> _P: ... + def rename(self: Self, target: str | PurePath) -> Self: ... + def replace(self: Self, target: str | PurePath) -> Self: ... else: def rename(self, target: str | PurePath) -> None: ... def replace(self, target: str | PurePath) -> None: ... - def resolve(self: _P, strict: bool = ...) -> _P: ... - def rglob(self: _P, pattern: str) -> Generator[_P, None, None]: ... + + def resolve(self: Self, strict: bool = ...) -> Self: ... + def rglob(self: Self, pattern: str) -> Generator[Self, None, None]: ... def rmdir(self) -> None: ... def symlink_to(self, target: str | Path, target_is_directory: bool = ...) -> None: ... + if sys.version_info >= (3, 10): + def hardlink_to(self, target: str | Path) -> None: ... + def touch(self, mode: int = ..., exist_ok: bool = ...) -> None: ... if sys.version_info >= (3, 8): def unlink(self, missing_ok: bool = ...) -> None: ... else: def unlink(self) -> None: ... + @classmethod - def home(cls: Type[_P]) -> _P: ... - def absolute(self: _P) -> _P: ... - def expanduser(self: _P) -> _P: ... + def home(cls: type[Self]) -> Self: ... + def absolute(self: Self) -> Self: ... + def expanduser(self: Self) -> Self: ... def read_bytes(self) -> bytes: ... def read_text(self, encoding: str | None = ..., errors: str | None = ...) -> str: ... def samefile(self, other_path: str | bytes | int | Path) -> bool: ... def write_bytes(self, data: bytes) -> int: ... - def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... + if sys.version_info >= (3, 10): + def write_text( + self, data: str, encoding: str | None = ..., errors: str | None = ..., newline: str | None = ... + ) -> int: ... + else: + def write_text(self, data: str, encoding: str | None = ..., errors: str | None = ...) -> int: ... if sys.version_info >= (3, 8): def link_to(self, target: StrPath | bytes) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi index 0a25786d409f..c1cba5e8d23b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pdb.pyi @@ -1,12 +1,17 @@ import signal import sys +from _typeshed import Self from bdb import Bdb from cmd import Cmd from inspect import _SourceObjectType from types import CodeType, FrameType, TracebackType -from typing import IO, Any, Callable, ClassVar, Iterable, Mapping, Sequence, Tuple, TypeVar +from typing import IO, Any, Callable, ClassVar, Iterable, Mapping, Sequence, TypeVar +from typing_extensions import ParamSpec + +__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace", "post_mortem", "help"] _T = TypeVar("_T") +_P = ParamSpec("_P") line_prefix: str # undocumented @@ -15,7 +20,7 @@ class Restart(Exception): ... def run(statement: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> None: ... def runeval(expression: str, globals: dict[str, Any] | None = ..., locals: Mapping[str, Any] | None = ...) -> Any: ... def runctx(statement: str, globals: dict[str, Any], locals: Mapping[str, Any]) -> None: ... -def runcall(func: Callable[..., _T], *args: Any, **kwds: Any) -> _T | None: ... +def runcall(func: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> _T | None: ... if sys.version_info >= (3, 7): def set_trace(*, header: str | None = ...) -> None: ... @@ -41,7 +46,7 @@ class Pdb(Bdb, Cmd): commands_defining: bool commands_bnum: int | None lineno: int | None - stack: list[Tuple[FrameType, int]] + stack: list[tuple[FrameType, int]] curindex: int curframe: FrameType | None curframe_locals: Mapping[str, Any] @@ -62,11 +67,11 @@ class Pdb(Bdb, Cmd): def displayhook(self, obj: object) -> None: ... def handle_command_def(self, line: str) -> bool: ... def defaultFile(self) -> str: ... - def lineinfo(self, identifier: str) -> Tuple[None, None, None] | Tuple[str, str, int]: ... + def lineinfo(self, identifier: str) -> tuple[None, None, None] | tuple[str, str, int]: ... def checkline(self, filename: str, lineno: int) -> int: ... def _getval(self, arg: str) -> object: ... def print_stack_trace(self) -> None: ... - def print_stack_entry(self, frame_lineno: Tuple[FrameType, int], prompt_prefix: str = ...) -> None: ... + def print_stack_entry(self, frame_lineno: tuple[FrameType, int], prompt_prefix: str = ...) -> None: ... def lookupmodule(self, filename: str) -> str | None: ... def _runscript(self, filename: str) -> None: ... def do_commands(self, arg: str) -> bool | None: ... @@ -164,11 +169,11 @@ class Pdb(Bdb, Cmd): # undocumented -def find_function(funcname: str, filename: str) -> Tuple[str, str, int] | None: ... +def find_function(funcname: str, filename: str) -> tuple[str, str, int] | None: ... def main() -> None: ... def help() -> None: ... -def getsourcelines(obj: _SourceObjectType) -> Tuple[list[str], int]: ... +def getsourcelines(obj: _SourceObjectType) -> tuple[list[str], int]: ... def lasti2lineno(code: CodeType, lasti: int) -> int: ... class _rstr(str): - def __repr__(self) -> _rstr: ... + def __repr__(self: Self) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pickle.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pickle.pyi index 19564f31178e..fffd1e1ebd53 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pickle.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pickle.pyi @@ -1,13 +1,22 @@ import sys -from typing import IO, Any, Callable, ClassVar, Iterable, Iterator, Mapping, Optional, Tuple, Type, Union +from typing import Any, Callable, ClassVar, Iterable, Iterator, Mapping, Optional, Protocol, Union +from typing_extensions import final HIGHEST_PROTOCOL: int DEFAULT_PROTOCOL: int -bytes_types: Tuple[Type[Any], ...] # undocumented +bytes_types: tuple[type[Any], ...] # undocumented + +class _ReadableFileobj(Protocol): + def read(self, __n: int) -> bytes: ... + def readline(self) -> bytes: ... + +class _WritableFileobj(Protocol): + def write(self, __b: bytes) -> Any: ... if sys.version_info >= (3, 8): # TODO: holistic design for buffer interface (typing.Buffer?) + @final class PickleBuffer: # buffer must be a buffer-providing object def __init__(self, buffer: Any) -> None: ... @@ -15,22 +24,32 @@ if sys.version_info >= (3, 8): def release(self) -> None: ... _BufferCallback = Optional[Callable[[PickleBuffer], Any]] def dump( - obj: Any, file: IO[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., buffer_callback: _BufferCallback = ... + obj: Any, + file: _WritableFileobj, + protocol: int | None = ..., + *, + fix_imports: bool = ..., + buffer_callback: _BufferCallback = ..., ) -> None: ... def dumps( obj: Any, protocol: int | None = ..., *, fix_imports: bool = ..., buffer_callback: _BufferCallback = ... ) -> bytes: ... def load( - file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ..., buffers: Iterable[Any] | None = ... + file: _ReadableFileobj, + *, + fix_imports: bool = ..., + encoding: str = ..., + errors: str = ..., + buffers: Iterable[Any] | None = ..., ) -> Any: ... def loads( __data: bytes, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ..., buffers: Iterable[Any] | None = ... ) -> Any: ... else: - def dump(obj: Any, file: IO[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def dump(obj: Any, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... def dumps(obj: Any, protocol: int | None = ..., *, fix_imports: bool = ...) -> bytes: ... - def load(file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... + def load(file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... def loads(data: bytes, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> Any: ... class PickleError(Exception): ... @@ -39,10 +58,10 @@ class UnpicklingError(PickleError): ... _reducedtype = Union[ str, - Tuple[Callable[..., Any], Tuple[Any, ...]], - Tuple[Callable[..., Any], Tuple[Any, ...], Any], - Tuple[Callable[..., Any], Tuple[Any, ...], Any, Optional[Iterator[Any]]], - Tuple[Callable[..., Any], Tuple[Any, ...], Any, Optional[Iterator[Any]], Optional[Iterator[Any]]], + tuple[Callable[..., Any], tuple[Any, ...]], + tuple[Callable[..., Any], tuple[Any, ...], Any], + tuple[Callable[..., Any], tuple[Any, ...], Any, Optional[Iterator[Any]]], + tuple[Callable[..., Any], tuple[Any, ...], Any, Optional[Iterator[Any]], Optional[Iterator[Any]]], ] class Pickler: @@ -53,11 +72,17 @@ class Pickler: if sys.version_info >= (3, 8): def __init__( - self, file: IO[bytes], protocol: int | None = ..., *, fix_imports: bool = ..., buffer_callback: _BufferCallback = ... + self, + file: _WritableFileobj, + protocol: int | None = ..., + *, + fix_imports: bool = ..., + buffer_callback: _BufferCallback = ..., ) -> None: ... def reducer_override(self, obj: Any) -> Any: ... else: - def __init__(self, file: IO[bytes], protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def __init__(self, file: _WritableFileobj, protocol: int | None = ..., *, fix_imports: bool = ...) -> None: ... + def dump(self, __obj: Any) -> None: ... def clear_memo(self) -> None: ... def persistent_id(self, obj: Any) -> Any: ... @@ -68,7 +93,7 @@ class Unpickler: if sys.version_info >= (3, 8): def __init__( self, - file: IO[bytes], + file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., @@ -76,7 +101,10 @@ class Unpickler: buffers: Iterable[Any] | None = ..., ) -> None: ... else: - def __init__(self, file: IO[bytes], *, fix_imports: bool = ..., encoding: str = ..., errors: str = ...) -> None: ... + def __init__( + self, file: _ReadableFileobj, *, fix_imports: bool = ..., encoding: str = ..., errors: str = ... + ) -> None: ... + def load(self) -> Any: ... def find_class(self, __module_name: str, __global_name: str) -> Any: ... def persistent_load(self, pid: Any) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pickletools.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pickletools.pyi index 0f0fb47da5f9..7b79ddcff347 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pickletools.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pickletools.pyi @@ -1,7 +1,9 @@ -from typing import IO, Any, Callable, Iterator, MutableMapping, Tuple, Type +from typing import IO, Any, Callable, Iterator, MutableMapping + +__all__ = ["dis", "genops", "optimize"] _Reader = Callable[[IO[bytes]], Any] -bytes_types: Tuple[Type[Any], ...] +bytes_types: tuple[type[Any], ...] UP_TO_NEWLINE: int TAKEN_FROM_ARGUMENT1: int @@ -9,7 +11,7 @@ TAKEN_FROM_ARGUMENT4: int TAKEN_FROM_ARGUMENT4U: int TAKEN_FROM_ARGUMENT8U: int -class ArgumentDescriptor(object): +class ArgumentDescriptor: name: str n: int reader: _Reader @@ -106,11 +108,11 @@ def read_long4(f: IO[bytes]) -> int: ... long4: ArgumentDescriptor -class StackObject(object): +class StackObject: name: str - obtype: Type[Any] | Tuple[Type[Any], ...] + obtype: type[Any] | tuple[type[Any], ...] doc: str - def __init__(self, name: str, obtype: Type[Any] | Tuple[Type[Any], ...], doc: str) -> None: ... + def __init__(self, name: str, obtype: type[Any] | tuple[type[Any], ...], doc: str) -> None: ... pyint: StackObject pylong: StackObject @@ -131,7 +133,7 @@ anyobject: StackObject markobject: StackObject stackslice: StackObject -class OpcodeInfo(object): +class OpcodeInfo: name: str code: str arg: ArgumentDescriptor | None @@ -152,7 +154,7 @@ class OpcodeInfo(object): opcodes: list[OpcodeInfo] -def genops(pickle: bytes | IO[bytes]) -> Iterator[Tuple[OpcodeInfo, Any | None, int | None]]: ... +def genops(pickle: bytes | IO[bytes]) -> Iterator[tuple[OpcodeInfo, Any | None, int | None]]: ... def optimize(p: bytes | IO[bytes]) -> bytes: ... def dis( pickle: bytes | IO[bytes], diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pipes.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pipes.pyi index fb9d7e4e1d16..d6bbd7eafac3 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pipes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pipes.pyi @@ -1,5 +1,7 @@ import os +__all__ = ["Template"] + class Template: def __init__(self) -> None: ... def reset(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pkgutil.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pkgutil.pyi index 54e0f22e4915..607c6b371507 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pkgutil.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pkgutil.pyi @@ -1,7 +1,22 @@ import sys from _typeshed import SupportsRead from importlib.abc import Loader, MetaPathFinder, PathEntryFinder -from typing import IO, Any, Callable, Iterable, Iterator, NamedTuple, Tuple +from typing import IO, Any, Callable, Iterable, Iterator, NamedTuple + +__all__ = [ + "get_importer", + "iter_importers", + "get_loader", + "find_loader", + "walk_packages", + "iter_modules", + "get_data", + "ImpImporter", + "ImpLoader", + "read_code", + "extend_path", + "ModuleInfo", +] class ModuleInfo(NamedTuple): module_finder: MetaPathFinder | PathEntryFinder @@ -14,11 +29,11 @@ class ImpImporter: def __init__(self, path: str | None = ...) -> None: ... class ImpLoader: - def __init__(self, fullname: str, file: IO[str], filename: str, etc: Tuple[str, str, int]) -> None: ... + def __init__(self, fullname: str, file: IO[str], filename: str, etc: tuple[str, str, int]) -> None: ... def find_loader(fullname: str) -> Loader | None: ... def get_importer(path_item: str) -> PathEntryFinder | None: ... -def get_loader(module_or_name: str) -> Loader: ... +def get_loader(module_or_name: str) -> Loader | None: ... def iter_importers(fullname: str = ...) -> Iterator[MetaPathFinder | PathEntryFinder]: ... def iter_modules(path: Iterable[str] | None = ..., prefix: str = ...) -> Iterator[ModuleInfo]: ... def read_code(stream: SupportsRead[bytes]) -> Any: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/platform.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/platform.pyi index d0f08a20fcd6..765a7a5ea5f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/platform.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/platform.pyi @@ -4,40 +4,40 @@ if sys.version_info < (3, 8): import os DEV_NULL = os.devnull -from typing import NamedTuple, Tuple +from typing import NamedTuple if sys.version_info >= (3, 8): - def libc_ver(executable: str | None = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> Tuple[str, str]: ... + def libc_ver(executable: str | None = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... else: - def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> Tuple[str, str]: ... + def libc_ver(executable: str = ..., lib: str = ..., version: str = ..., chunksize: int = ...) -> tuple[str, str]: ... if sys.version_info < (3, 8): def linux_distribution( distname: str = ..., version: str = ..., id: str = ..., - supported_dists: Tuple[str, ...] = ..., + supported_dists: tuple[str, ...] = ..., full_distribution_name: bool = ..., - ) -> Tuple[str, str, str]: ... + ) -> tuple[str, str, str]: ... def dist( - distname: str = ..., version: str = ..., id: str = ..., supported_dists: Tuple[str, ...] = ... - ) -> Tuple[str, str, str]: ... + distname: str = ..., version: str = ..., id: str = ..., supported_dists: tuple[str, ...] = ... + ) -> tuple[str, str, str]: ... -def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> Tuple[str, str, str, str]: ... +def win32_ver(release: str = ..., version: str = ..., csd: str = ..., ptype: str = ...) -> tuple[str, str, str, str]: ... if sys.version_info >= (3, 8): def win32_edition() -> str: ... def win32_is_iot() -> bool: ... def mac_ver( - release: str = ..., versioninfo: Tuple[str, str, str] = ..., machine: str = ... -) -> Tuple[str, Tuple[str, str, str], str]: ... + release: str = ..., versioninfo: tuple[str, str, str] = ..., machine: str = ... +) -> tuple[str, tuple[str, str, str], str]: ... def java_ver( - release: str = ..., vendor: str = ..., vminfo: Tuple[str, str, str] = ..., osinfo: Tuple[str, str, str] = ... -) -> Tuple[str, str, Tuple[str, str, str], Tuple[str, str, str]]: ... -def system_alias(system: str, release: str, version: str) -> Tuple[str, str, str]: ... -def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> Tuple[str, str]: ... + release: str = ..., vendor: str = ..., vminfo: tuple[str, str, str] = ..., osinfo: tuple[str, str, str] = ... +) -> tuple[str, str, tuple[str, str, str], tuple[str, str, str]]: ... +def system_alias(system: str, release: str, version: str) -> tuple[str, str, str]: ... +def architecture(executable: str = ..., bits: str = ..., linkage: str = ...) -> tuple[str, str]: ... class uname_result(NamedTuple): system: str @@ -56,9 +56,12 @@ def machine() -> str: ... def processor() -> str: ... def python_implementation() -> str: ... def python_version() -> str: ... -def python_version_tuple() -> Tuple[str, str, str]: ... +def python_version_tuple() -> tuple[str, str, str]: ... def python_branch() -> str: ... def python_revision() -> str: ... -def python_build() -> Tuple[str, str]: ... +def python_build() -> tuple[str, str]: ... def python_compiler() -> str: ... def platform(aliased: bool = ..., terse: bool = ...) -> str: ... + +if sys.version_info >= (3, 10): + def freedesktop_os_release() -> dict[str, str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/plistlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/plistlib.pyi index 3b5c2b78c81a..67d1611de828 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/plistlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/plistlib.pyi @@ -1,6 +1,59 @@ import sys +from _typeshed import Self +from datetime import datetime from enum import Enum -from typing import IO, Any, Dict as _Dict, Mapping, MutableMapping, Type +from typing import IO, Any, Mapping, MutableMapping + +if sys.version_info >= (3, 9): + __all__ = ["InvalidFileException", "FMT_XML", "FMT_BINARY", "load", "dump", "loads", "dumps", "UID"] +elif sys.version_info >= (3, 8): + __all__ = [ + "readPlist", + "writePlist", + "readPlistFromBytes", + "writePlistToBytes", + "Data", + "InvalidFileException", + "FMT_XML", + "FMT_BINARY", + "load", + "dump", + "loads", + "dumps", + "UID", + ] +elif sys.version_info >= (3, 7): + __all__ = [ + "readPlist", + "writePlist", + "readPlistFromBytes", + "writePlistToBytes", + "Data", + "InvalidFileException", + "FMT_XML", + "FMT_BINARY", + "load", + "dump", + "loads", + "dumps", + ] +else: + __all__ = [ + "readPlist", + "writePlist", + "readPlistFromBytes", + "writePlistToBytes", + "Plist", + "Data", + "Dict", + "InvalidFileException", + "FMT_XML", + "FMT_BINARY", + "load", + "dump", + "loads", + "dumps", + ] class PlistFormat(Enum): FMT_XML: int @@ -10,8 +63,8 @@ FMT_XML = PlistFormat.FMT_XML FMT_BINARY = PlistFormat.FMT_BINARY if sys.version_info >= (3, 9): - def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: Type[MutableMapping[str, Any]] = ...) -> Any: ... - def loads(value: bytes, *, fmt: PlistFormat | None = ..., dict_type: Type[MutableMapping[str, Any]] = ...) -> Any: ... + def load(fp: IO[bytes], *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... + def loads(value: bytes, *, fmt: PlistFormat | None = ..., dict_type: type[MutableMapping[str, Any]] = ...) -> Any: ... else: def load( @@ -19,20 +72,31 @@ else: *, fmt: PlistFormat | None = ..., use_builtin_types: bool = ..., - dict_type: Type[MutableMapping[str, Any]] = ..., + dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def loads( value: bytes, *, fmt: PlistFormat | None = ..., use_builtin_types: bool = ..., - dict_type: Type[MutableMapping[str, Any]] = ..., + dict_type: type[MutableMapping[str, Any]] = ..., ) -> Any: ... def dump( - value: Mapping[str, Any], fp: IO[bytes], *, fmt: PlistFormat = ..., sort_keys: bool = ..., skipkeys: bool = ... + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + fp: IO[bytes], + *, + fmt: PlistFormat = ..., + sort_keys: bool = ..., + skipkeys: bool = ..., ) -> None: ... -def dumps(value: Mapping[str, Any], *, fmt: PlistFormat = ..., skipkeys: bool = ..., sort_keys: bool = ...) -> bytes: ... +def dumps( + value: Mapping[str, Any] | list[Any] | tuple[Any, ...] | str | bool | float | bytes | datetime, + *, + fmt: PlistFormat = ..., + skipkeys: bool = ..., + sort_keys: bool = ..., +) -> bytes: ... if sys.version_info < (3, 9): def readPlist(pathOrFile: str | IO[bytes]) -> Any: ... @@ -41,11 +105,20 @@ if sys.version_info < (3, 9): def writePlistToBytes(value: Mapping[str, Any]) -> bytes: ... if sys.version_info < (3, 7): - class Dict(_Dict[str, Any]): + class _InternalDict(dict[str, Any]): def __getattr__(self, attr: str) -> Any: ... def __setattr__(self, attr: str, value: Any) -> None: ... def __delattr__(self, attr: str) -> None: ... + class Dict(_InternalDict): # deprecated + def __init__(self, **kwargs: Any) -> None: ... + + class Plist(_InternalDict): # deprecated + def __init__(self, **kwargs: Any) -> None: ... + @classmethod + def fromFile(cls: type[Self], pathOrFile: str | IO[bytes]) -> Self: ... + def write(self, pathOrFile: str | IO[bytes]) -> None: ... + if sys.version_info < (3, 9): class Data: data: bytes @@ -56,8 +129,9 @@ if sys.version_info >= (3, 8): data: int def __init__(self, data: int) -> None: ... def __index__(self) -> int: ... - def __reduce__(self) -> Any: ... + def __reduce__(self: Self) -> tuple[type[Self], tuple[int]]: ... def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... class InvalidFileException(ValueError): def __init__(self, message: str = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/poplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/poplib.pyi index 8d19067f0445..111e5c4ca76a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/poplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/poplib.pyi @@ -1,16 +1,26 @@ import socket import ssl -from typing import Any, BinaryIO, List, Pattern, Tuple, overload +import sys +from typing import Any, BinaryIO, NoReturn, Pattern, overload +from typing_extensions import Literal -_LongResp = Tuple[bytes, List[bytes], int] +if sys.version_info >= (3, 10): + __all__ = ["POP3", "error_proto", "POP3_SSL"] +else: + __all__ = ["POP3", "error_proto"] + +_LongResp = tuple[bytes, list[bytes], int] class error_proto(Exception): ... -POP3_PORT: int -POP3_SSL_PORT: int -CR: bytes -LF: bytes -CRLF: bytes +POP3_PORT: Literal[110] +POP3_SSL_PORT: Literal[995] +CR: Literal[b"\r"] +LF: Literal[b"\n"] +CRLF: Literal[b"\r\n"] +HAVE_SSL: bool + +_list = list # conflicts with a method named "list" class POP3: encoding: str @@ -24,7 +34,7 @@ class POP3: def set_debuglevel(self, level: int) -> None: ... def user(self, user: str) -> bytes: ... def pass_(self, pswd: str) -> bytes: ... - def stat(self) -> Tuple[int, int]: ... + def stat(self) -> tuple[int, int]: ... def list(self, which: Any | None = ...) -> _LongResp: ... def retr(self, which: Any) -> _LongResp: ... def dele(self, which: Any) -> bytes: ... @@ -41,7 +51,7 @@ class POP3: @overload def uidl(self, which: Any) -> bytes: ... def utf8(self) -> bytes: ... - def capa(self) -> dict[str, List[str]]: ... + def capa(self) -> dict[str, _list[str]]: ... def stls(self, context: ssl.SSLContext | None = ...) -> bytes: ... class POP3_SSL(POP3): @@ -55,4 +65,4 @@ class POP3_SSL(POP3): context: ssl.SSLContext | None = ..., ) -> None: ... # "context" is actually the last argument, but that breaks LSP and it doesn't really matter because all the arguments are ignored - def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> bytes: ... + def stls(self, context: Any = ..., keyfile: Any = ..., certfile: Any = ...) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/posix.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/posix.pyi index aef3b54413a4..9f658039bcf2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/posix.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/posix.pyi @@ -1,201 +1,322 @@ import sys -from _typeshed import StrOrBytesPath -from os import PathLike, _ExecEnv, _ExecVArgs, stat_result as stat_result -from typing import Any, Iterable, NamedTuple, Sequence, Tuple, overload -class uname_result(NamedTuple): - sysname: str - nodename: str - release: str - version: str - machine: str +if sys.platform != "win32": + # Actually defined here, but defining in os allows sharing code with windows + from os import ( + CLD_CONTINUED as CLD_CONTINUED, + CLD_DUMPED as CLD_DUMPED, + CLD_EXITED as CLD_EXITED, + CLD_TRAPPED as CLD_TRAPPED, + EX_CANTCREAT as EX_CANTCREAT, + EX_CONFIG as EX_CONFIG, + EX_DATAERR as EX_DATAERR, + EX_IOERR as EX_IOERR, + EX_NOHOST as EX_NOHOST, + EX_NOINPUT as EX_NOINPUT, + EX_NOPERM as EX_NOPERM, + EX_NOTFOUND as EX_NOTFOUND, + EX_NOUSER as EX_NOUSER, + EX_OK as EX_OK, + EX_OSERR as EX_OSERR, + EX_OSFILE as EX_OSFILE, + EX_PROTOCOL as EX_PROTOCOL, + EX_SOFTWARE as EX_SOFTWARE, + EX_TEMPFAIL as EX_TEMPFAIL, + EX_UNAVAILABLE as EX_UNAVAILABLE, + EX_USAGE as EX_USAGE, + F_LOCK as F_LOCK, + F_OK as F_OK, + F_TEST as F_TEST, + F_TLOCK as F_TLOCK, + F_ULOCK as F_ULOCK, + O_APPEND as O_APPEND, + O_ASYNC as O_ASYNC, + O_CREAT as O_CREAT, + O_DIRECT as O_DIRECT, + O_DIRECTORY as O_DIRECTORY, + O_DSYNC as O_DSYNC, + O_EXCL as O_EXCL, + O_LARGEFILE as O_LARGEFILE, + O_NDELAY as O_NDELAY, + O_NOATIME as O_NOATIME, + O_NOCTTY as O_NOCTTY, + O_NOFOLLOW as O_NOFOLLOW, + O_NONBLOCK as O_NONBLOCK, + O_RDONLY as O_RDONLY, + O_RDWR as O_RDWR, + O_RSYNC as O_RSYNC, + O_SYNC as O_SYNC, + O_TRUNC as O_TRUNC, + O_WRONLY as O_WRONLY, + P_ALL as P_ALL, + P_PGID as P_PGID, + P_PID as P_PID, + PRIO_PGRP as PRIO_PGRP, + PRIO_PROCESS as PRIO_PROCESS, + PRIO_USER as PRIO_USER, + R_OK as R_OK, + RTLD_GLOBAL as RTLD_GLOBAL, + RTLD_LAZY as RTLD_LAZY, + RTLD_LOCAL as RTLD_LOCAL, + RTLD_NODELETE as RTLD_NODELETE, + RTLD_NOLOAD as RTLD_NOLOAD, + RTLD_NOW as RTLD_NOW, + SCHED_BATCH as SCHED_BATCH, + SCHED_FIFO as SCHED_FIFO, + SCHED_IDLE as SCHED_IDLE, + SCHED_OTHER as SCHED_OTHER, + SCHED_RESET_ON_FORK as SCHED_RESET_ON_FORK, + SCHED_RR as SCHED_RR, + SCHED_SPORADIC as SCHED_SPORADIC, + SEEK_DATA as SEEK_DATA, + SEEK_HOLE as SEEK_HOLE, + ST_NOSUID as ST_NOSUID, + ST_RDONLY as ST_RDONLY, + TMP_MAX as TMP_MAX, + W_OK as W_OK, + WCONTINUED as WCONTINUED, + WCOREDUMP as WCOREDUMP, + WEXITED as WEXITED, + WEXITSTATUS as WEXITSTATUS, + WIFCONTINUED as WIFCONTINUED, + WIFEXITED as WIFEXITED, + WIFSIGNALED as WIFSIGNALED, + WIFSTOPPED as WIFSTOPPED, + WNOHANG as WNOHANG, + WNOWAIT as WNOWAIT, + WSTOPPED as WSTOPPED, + WSTOPSIG as WSTOPSIG, + WTERMSIG as WTERMSIG, + WUNTRACED as WUNTRACED, + X_OK as X_OK, + DirEntry as DirEntry, + _exit as _exit, + abort as abort, + access as access, + chdir as chdir, + chmod as chmod, + chown as chown, + chroot as chroot, + close as close, + closerange as closerange, + confstr as confstr, + confstr_names as confstr_names, + cpu_count as cpu_count, + ctermid as ctermid, + device_encoding as device_encoding, + dup as dup, + dup2 as dup2, + error as error, + execv as execv, + execve as execve, + fchdir as fchdir, + fchmod as fchmod, + fchown as fchown, + fork as fork, + forkpty as forkpty, + fpathconf as fpathconf, + fspath as fspath, + fstat as fstat, + fstatvfs as fstatvfs, + fsync as fsync, + ftruncate as ftruncate, + get_blocking as get_blocking, + get_inheritable as get_inheritable, + get_terminal_size as get_terminal_size, + getcwd as getcwd, + getcwdb as getcwdb, + getegid as getegid, + geteuid as geteuid, + getgid as getgid, + getgrouplist as getgrouplist, + getgroups as getgroups, + getloadavg as getloadavg, + getlogin as getlogin, + getpgid as getpgid, + getpgrp as getpgrp, + getpid as getpid, + getppid as getppid, + getpriority as getpriority, + getsid as getsid, + getuid as getuid, + initgroups as initgroups, + isatty as isatty, + kill as kill, + killpg as killpg, + lchown as lchown, + link as link, + listdir as listdir, + lockf as lockf, + lseek as lseek, + lstat as lstat, + major as major, + makedev as makedev, + minor as minor, + mkdir as mkdir, + mkfifo as mkfifo, + mknod as mknod, + nice as nice, + open as open, + openpty as openpty, + pathconf as pathconf, + pathconf_names as pathconf_names, + pipe as pipe, + pread as pread, + putenv as putenv, + pwrite as pwrite, + read as read, + readlink as readlink, + readv as readv, + remove as remove, + rename as rename, + replace as replace, + rmdir as rmdir, + scandir as scandir, + sched_get_priority_max as sched_get_priority_max, + sched_get_priority_min as sched_get_priority_min, + sched_param as sched_param, + sched_yield as sched_yield, + sendfile as sendfile, + set_blocking as set_blocking, + set_inheritable as set_inheritable, + setegid as setegid, + seteuid as seteuid, + setgid as setgid, + setgroups as setgroups, + setpgid as setpgid, + setpgrp as setpgrp, + setpriority as setpriority, + setregid as setregid, + setreuid as setreuid, + setsid as setsid, + setuid as setuid, + stat as stat, + stat_result as stat_result, + statvfs as statvfs, + statvfs_result as statvfs_result, + strerror as strerror, + symlink as symlink, + sync as sync, + sysconf as sysconf, + sysconf_names as sysconf_names, + system as system, + tcgetpgrp as tcgetpgrp, + tcsetpgrp as tcsetpgrp, + terminal_size as terminal_size, + times as times, + times_result as times_result, + truncate as truncate, + ttyname as ttyname, + umask as umask, + uname as uname, + uname_result as uname_result, + unlink as unlink, + unsetenv as unsetenv, + urandom as urandom, + utime as utime, + wait as wait, + wait3 as wait3, + wait4 as wait4, + waitpid as waitpid, + write as write, + writev as writev, + ) -class times_result(NamedTuple): - user: float - system: float - children_user: float - children_system: float - elapsed: float + if sys.platform == "linux": + from os import ( + GRND_NONBLOCK as GRND_NONBLOCK, + GRND_RANDOM as GRND_RANDOM, + RTLD_DEEPBIND as RTLD_DEEPBIND, + XATTR_CREATE as XATTR_CREATE, + XATTR_REPLACE as XATTR_REPLACE, + XATTR_SIZE_MAX as XATTR_SIZE_MAX, + getrandom as getrandom, + getxattr as getxattr, + listxattr as listxattr, + removexattr as removexattr, + setxattr as setxattr, + ) + else: + from os import chflags as chflags, lchflags as lchflags, lchmod as lchmod -if sys.platform != "darwin": - class waitid_result(NamedTuple): - si_pid: int - si_uid: int - si_signo: int - si_status: int - si_code: int + if sys.platform != "darwin": + from os import ( + POSIX_FADV_DONTNEED as POSIX_FADV_DONTNEED, + POSIX_FADV_NOREUSE as POSIX_FADV_NOREUSE, + POSIX_FADV_NORMAL as POSIX_FADV_NORMAL, + POSIX_FADV_RANDOM as POSIX_FADV_RANDOM, + POSIX_FADV_SEQUENTIAL as POSIX_FADV_SEQUENTIAL, + POSIX_FADV_WILLNEED as POSIX_FADV_WILLNEED, + fdatasync as fdatasync, + getresgid as getresgid, + getresuid as getresuid, + pipe2 as pipe2, + posix_fadvise as posix_fadvise, + posix_fallocate as posix_fallocate, + sched_getaffinity as sched_getaffinity, + sched_getparam as sched_getparam, + sched_getscheduler as sched_getscheduler, + sched_rr_get_interval as sched_rr_get_interval, + sched_setaffinity as sched_setaffinity, + sched_setparam as sched_setparam, + sched_setscheduler as sched_setscheduler, + setresgid as setresgid, + setresuid as setresuid, + waitid as waitid, + waitid_result as waitid_result, + ) -class sched_param(NamedTuple): - sched_priority: int + if sys.version_info >= (3, 10): + from os import RWF_APPEND as RWF_APPEND -CLD_CONTINUED: int -CLD_DUMPED: int -CLD_EXITED: int -CLD_TRAPPED: int + if sys.version_info >= (3, 9): + from os import CLD_KILLED as CLD_KILLED, CLD_STOPPED as CLD_STOPPED, waitstatus_to_exitcode as waitstatus_to_exitcode -EX_CANTCREAT: int -EX_CONFIG: int -EX_DATAERR: int -EX_IOERR: int -EX_NOHOST: int -EX_NOINPUT: int -EX_NOPERM: int -EX_NOTFOUND: int -EX_NOUSER: int -EX_OK: int -EX_OSERR: int -EX_OSFILE: int -EX_PROTOCOL: int -EX_SOFTWARE: int -EX_TEMPFAIL: int -EX_UNAVAILABLE: int -EX_USAGE: int + if sys.platform == "linux": + from os import P_PIDFD as P_PIDFD + if sys.version_info >= (3, 8): + from os import ( + POSIX_SPAWN_CLOSE as POSIX_SPAWN_CLOSE, + POSIX_SPAWN_DUP2 as POSIX_SPAWN_DUP2, + POSIX_SPAWN_OPEN as POSIX_SPAWN_OPEN, + posix_spawn as posix_spawn, + posix_spawnp as posix_spawnp, + ) -F_OK: int -R_OK: int -W_OK: int -X_OK: int + if sys.platform == "linux": + from os import ( + MFD_ALLOW_SEALING as MFD_ALLOW_SEALING, + MFD_CLOEXEC as MFD_CLOEXEC, + MFD_HUGE_1GB as MFD_HUGE_1GB, + MFD_HUGE_1MB as MFD_HUGE_1MB, + MFD_HUGE_2GB as MFD_HUGE_2GB, + MFD_HUGE_2MB as MFD_HUGE_2MB, + MFD_HUGE_8MB as MFD_HUGE_8MB, + MFD_HUGE_16GB as MFD_HUGE_16GB, + MFD_HUGE_16MB as MFD_HUGE_16MB, + MFD_HUGE_32MB as MFD_HUGE_32MB, + MFD_HUGE_64KB as MFD_HUGE_64KB, + MFD_HUGE_256MB as MFD_HUGE_256MB, + MFD_HUGE_512KB as MFD_HUGE_512KB, + MFD_HUGE_512MB as MFD_HUGE_512MB, + MFD_HUGE_MASK as MFD_HUGE_MASK, + MFD_HUGE_SHIFT as MFD_HUGE_SHIFT, + MFD_HUGETLB as MFD_HUGETLB, + memfd_create as memfd_create, + ) + if sys.version_info >= (3, 7): + from os import register_at_fork as register_at_fork -F_LOCK: int -F_TEST: int -F_TLOCK: int -F_ULOCK: int + if sys.platform != "darwin": + from os import ( + RWF_DSYNC as RWF_DSYNC, + RWF_HIPRI as RWF_HIPRI, + RWF_NOWAIT as RWF_NOWAIT, + RWF_SYNC as RWF_SYNC, + preadv as preadv, + pwritev as pwritev, + ) -if sys.platform == "linux": - GRND_NONBLOCK: int - GRND_RANDOM: int -NGROUPS_MAX: int - -O_APPEND: int -O_ACCMODE: int -O_ASYNC: int -O_CREAT: int -O_DIRECT: int -O_DIRECTORY: int -O_DSYNC: int -O_EXCL: int -O_LARGEFILE: int -O_NDELAY: int -O_NOATIME: int -O_NOCTTY: int -O_NOFOLLOW: int -O_NONBLOCK: int -O_RDONLY: int -O_RDWR: int -O_RSYNC: int -O_SYNC: int -O_TRUNC: int -O_WRONLY: int - -if sys.platform != "darwin": - POSIX_FADV_DONTNEED: int - POSIX_FADV_NOREUSE: int - POSIX_FADV_NORMAL: int - POSIX_FADV_RANDOM: int - POSIX_FADV_SEQUENTIAL: int - POSIX_FADV_WILLNEED: int - -PRIO_PGRP: int -PRIO_PROCESS: int -PRIO_USER: int - -P_ALL: int -P_PGID: int -P_PID: int - -if sys.platform == "linux": - RTLD_DEEPBIND: int -RTLD_GLOBAL: int -RTLD_LAZY: int -RTLD_LOCAL: int -RTLD_NODELETE: int -RTLD_NOLOAD: int -RTLD_NOW: int - -SCHED_FIFO: int -SCHED_OTHER: int -SCHED_RR: int - -if sys.platform == "linux": - SCHED_BATCH: int - SCHED_IDLE: int -if sys.platform != "darwin": - SCHED_RESET_ON_FORK: int - -SEEK_DATA: int -SEEK_HOLE: int - -ST_APPEND: int -ST_MANDLOCK: int -ST_NOATIME: int -ST_NODEV: int -ST_NODIRATIME: int -ST_NOEXEC: int -ST_NOSUID: int -ST_RDONLY: int -ST_RELATIME: int -ST_SYNCHRONOUS: int -ST_WRITE: int - -TMP_MAX: int -WCONTINUED: int - -def WCOREDUMP(__status: int) -> bool: ... -def WEXITSTATUS(status: int) -> int: ... -def WIFCONTINUED(status: int) -> bool: ... -def WIFEXITED(status: int) -> bool: ... -def WIFSIGNALED(status: int) -> bool: ... -def WIFSTOPPED(status: int) -> bool: ... - -WNOHANG: int - -def WSTOPSIG(status: int) -> int: ... -def WTERMSIG(status: int) -> int: ... - -WUNTRACED: int - -XATTR_CREATE: int -XATTR_REPLACE: int -XATTR_SIZE_MAX: int - -@overload -def listdir(path: str | None = ...) -> list[str]: ... -@overload -def listdir(path: bytes) -> list[bytes]: ... -@overload -def listdir(path: int) -> list[str]: ... -@overload -def listdir(path: PathLike[str]) -> list[str]: ... - -if sys.platform != "win32" and sys.version_info >= (3, 8): - def posix_spawn( - path: StrOrBytesPath, - argv: _ExecVArgs, - env: _ExecEnv, - *, - file_actions: Sequence[Tuple[Any, ...]] | None = ..., - setpgroup: int | None = ..., - resetids: bool = ..., - setsid: bool = ..., - setsigmask: Iterable[int] = ..., - setsigdef: Iterable[int] = ..., - scheduler: Tuple[Any, sched_param] | None = ..., - ) -> int: ... - def posix_spawnp( - path: StrOrBytesPath, - argv: _ExecVArgs, - env: _ExecEnv, - *, - file_actions: Sequence[Tuple[Any, ...]] | None = ..., - setpgroup: int | None = ..., - resetids: bool = ..., - setsid: bool = ..., - setsigmask: Iterable[int] = ..., - setsigdef: Iterable[int] = ..., - scheduler: Tuple[Any, sched_param] | None = ..., - ) -> int: ... - -if sys.platform == "win32": - environ: dict[str, str] -else: + # Not same as os.environ or os.environb + # Because of this variable, we can't do "from posix import *" in os/__init__.pyi environ: dict[bytes, bytes] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/posixpath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/posixpath.pyi index cc5e5cb8ed50..c72ba8a99bdd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/posixpath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/posixpath.pyi @@ -14,7 +14,48 @@ from genericpath import ( samestat as samestat, ) from os import PathLike -from typing import AnyStr, Sequence, Tuple, overload +from typing import AnyStr, Sequence, overload + +__all__ = [ + "normcase", + "isabs", + "join", + "splitdrive", + "split", + "splitext", + "basename", + "dirname", + "commonprefix", + "getsize", + "getmtime", + "getatime", + "getctime", + "islink", + "exists", + "lexists", + "isdir", + "isfile", + "ismount", + "expanduser", + "expandvars", + "normpath", + "abspath", + "samefile", + "sameopenfile", + "samestat", + "curdir", + "pardir", + "sep", + "pathsep", + "defpath", + "altsep", + "extsep", + "devnull", + "realpath", + "supports_unicode_filenames", + "relpath", + "commonpath", +] supports_unicode_filenames: bool # aliases (also in os) @@ -60,10 +101,14 @@ def normpath(path: AnyStr) -> AnyStr: ... def commonpath(paths: Sequence[StrPath]) -> str: ... @overload def commonpath(paths: Sequence[BytesPath]) -> bytes: ... + +# First parameter is not actually pos-only, +# but must be defined as pos-only in the stub or cross-platform code doesn't type-check, +# as the parameter name is different in ntpath.join() @overload -def join(a: StrPath, *paths: StrPath) -> str: ... +def join(__a: StrPath, *paths: StrPath) -> str: ... @overload -def join(a: BytesPath, *paths: BytesPath) -> bytes: ... +def join(__a: BytesPath, *paths: BytesPath) -> bytes: ... if sys.version_info >= (3, 10): @overload @@ -82,18 +127,18 @@ def relpath(path: BytesPath, start: BytesPath | None = ...) -> bytes: ... @overload def relpath(path: StrPath, start: StrPath | None = ...) -> str: ... @overload -def split(p: PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... +def split(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def split(p: AnyStr) -> Tuple[AnyStr, AnyStr]: ... +def split(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... @overload -def splitdrive(p: PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... +def splitdrive(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def splitdrive(p: AnyStr) -> Tuple[AnyStr, AnyStr]: ... +def splitdrive(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... @overload -def splitext(p: PathLike[AnyStr]) -> Tuple[AnyStr, AnyStr]: ... +def splitext(p: PathLike[AnyStr]) -> tuple[AnyStr, AnyStr]: ... @overload -def splitext(p: AnyStr) -> Tuple[AnyStr, AnyStr]: ... +def splitext(p: AnyStr) -> tuple[AnyStr, AnyStr]: ... def isabs(s: StrOrBytesPath) -> bool: ... -def islink(path: StrOrBytesPath) -> bool: ... -def ismount(path: StrOrBytesPath) -> bool: ... -def lexists(path: StrOrBytesPath) -> bool: ... +def islink(path: StrOrBytesPath | int) -> bool: ... +def ismount(path: StrOrBytesPath | int) -> bool: ... +def lexists(path: StrOrBytesPath | int) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pprint.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pprint.pyi index dcf3fd6b81da..0addc8f538b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pprint.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pprint.pyi @@ -1,5 +1,10 @@ import sys -from typing import IO, Any, Tuple +from typing import IO + +if sys.version_info >= (3, 8): + __all__ = ["pprint", "pformat", "isreadable", "isrecursive", "saferepr", "PrettyPrinter", "pp"] +else: + __all__ = ["pprint", "pformat", "isreadable", "isrecursive", "saferepr", "PrettyPrinter"] if sys.version_info >= (3, 10): def pformat( @@ -126,8 +131,9 @@ class PrettyPrinter: *, compact: bool = ..., ) -> None: ... + def pformat(self, object: object) -> str: ... def pprint(self, object: object) -> None: ... def isreadable(self, object: object) -> bool: ... def isrecursive(self, object: object) -> bool: ... - def format(self, object: object, context: dict[int, Any], maxlevels: int, level: int) -> Tuple[str, bool, bool]: ... + def format(self, object: object, context: dict[int, int], maxlevels: int, level: int) -> tuple[str, bool, bool]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/profile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/profile.pyi index cb0cbf7c9388..982bcabad401 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/profile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/profile.pyi @@ -1,14 +1,17 @@ -from _typeshed import StrOrBytesPath -from typing import Any, Callable, Tuple, TypeVar +from _typeshed import Self, StrOrBytesPath +from typing import Any, Callable, TypeVar +from typing_extensions import ParamSpec + +__all__ = ["run", "runctx", "Profile"] def run(statement: str, filename: str | None = ..., sort: str | int = ...) -> None: ... def runctx( statement: str, globals: dict[str, Any], locals: dict[str, Any], filename: str | None = ..., sort: str | int = ... ) -> None: ... -_SelfT = TypeVar("_SelfT", bound=Profile) _T = TypeVar("_T") -_Label = Tuple[str, int, str] +_P = ParamSpec("_P") +_Label = tuple[str, int, str] class Profile: bias: int @@ -21,7 +24,7 @@ class Profile: def dump_stats(self, file: StrOrBytesPath) -> None: ... def create_stats(self) -> None: ... def snapshot_stats(self) -> None: ... - def run(self: _SelfT, cmd: str) -> _SelfT: ... - def runctx(self: _SelfT, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> _SelfT: ... - def runcall(self, __func: Callable[..., _T], *args: Any, **kw: Any) -> _T: ... + def run(self: Self, cmd: str) -> Self: ... + def runctx(self: Self, cmd: str, globals: dict[str, Any], locals: dict[str, Any]) -> Self: ... + def runcall(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... def calibrate(self, m: int, verbose: int = ...) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pstats.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pstats.pyi index 6811ec94b349..c4fe28477fb5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pstats.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pstats.pyi @@ -1,14 +1,22 @@ import sys -from _typeshed import StrOrBytesPath +from _typeshed import Self, StrOrBytesPath from cProfile import Profile as _cProfile from profile import Profile -from typing import IO, Any, Iterable, Tuple, TypeVar, Union, overload +from typing import IO, Any, Iterable, Union, overload +from typing_extensions import Literal + +if sys.version_info >= (3, 9): + __all__ = ["Stats", "SortKey", "FunctionProfile", "StatsProfile"] +elif sys.version_info >= (3, 7): + __all__ = ["Stats", "SortKey"] +else: + __all__ = ["Stats"] _Selector = Union[str, float, int] -_T = TypeVar("_T", bound=Stats) if sys.version_info >= (3, 7): from enum import Enum + class SortKey(str, Enum): CALLS: str CUMULATIVE: str @@ -20,32 +28,54 @@ if sys.version_info >= (3, 7): STDNAME: str TIME: str +if sys.version_info >= (3, 9): + from dataclasses import dataclass + + @dataclass(unsafe_hash=True) + class FunctionProfile: + ncalls: int + tottime: float + percall_tottime: float + cumtime: float + percall_cumtime: float + file_name: str + line_number: int + @dataclass(unsafe_hash=True) + class StatsProfile: + total_tt: float + func_profiles: dict[str, FunctionProfile] + +_SortArgDict = dict[str, tuple[tuple[tuple[int, int], ...], str]] + class Stats: - sort_arg_dict_default: dict[str, Tuple[Any, str]] + sort_arg_dict_default: _SortArgDict def __init__( - self: _T, + self: Self, __arg: None | str | Profile | _cProfile = ..., - *args: None | str | Profile | _cProfile | _T, + *args: None | str | Profile | _cProfile | Self, stream: IO[Any] | None = ..., ) -> None: ... def init(self, arg: None | str | Profile | _cProfile) -> None: ... def load_stats(self, arg: None | str | Profile | _cProfile) -> None: ... def get_top_level_stats(self) -> None: ... - def add(self: _T, *arg_list: None | str | Profile | _cProfile | _T) -> _T: ... + def add(self: Self, *arg_list: None | str | Profile | _cProfile | Self) -> Self: ... def dump_stats(self, filename: StrOrBytesPath) -> None: ... - def get_sort_arg_defs(self) -> dict[str, Tuple[Tuple[Tuple[int, int], ...], str]]: ... + def get_sort_arg_defs(self) -> _SortArgDict: ... @overload - def sort_stats(self: _T, field: int) -> _T: ... + def sort_stats(self: Self, field: Literal[-1, 0, 1, 2]) -> Self: ... @overload - def sort_stats(self: _T, *field: str) -> _T: ... - def reverse_order(self: _T) -> _T: ... - def strip_dirs(self: _T) -> _T: ... + def sort_stats(self: Self, *field: str) -> Self: ... + def reverse_order(self: Self) -> Self: ... + def strip_dirs(self: Self) -> Self: ... def calc_callees(self) -> None: ... - def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> Tuple[list[str], str]: ... - def get_print_list(self, sel_list: Iterable[_Selector]) -> Tuple[int, list[str]]: ... - def print_stats(self: _T, *amount: _Selector) -> _T: ... - def print_callees(self: _T, *amount: _Selector) -> _T: ... - def print_callers(self: _T, *amount: _Selector) -> _T: ... + def eval_print_amount(self, sel: _Selector, list: list[str], msg: str) -> tuple[list[str], str]: ... + if sys.version_info >= (3, 9): + def get_stats_profile(self) -> StatsProfile: ... + + def get_print_list(self, sel_list: Iterable[_Selector]) -> tuple[int, list[str]]: ... + def print_stats(self: Self, *amount: _Selector) -> Self: ... + def print_callees(self: Self, *amount: _Selector) -> Self: ... + def print_callers(self: Self, *amount: _Selector) -> Self: ... def print_call_heading(self, name_size: int, column_title: str) -> None: ... def print_call_line(self, name_size: int, source: str, call_dict: dict[str, Any], arrow: str = ...) -> None: ... def print_title(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pty.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pty.pyi index ba6b9b40edc0..73c6ddfbd0c4 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pty.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pty.pyi @@ -1,15 +1,18 @@ -from typing import Callable, Iterable, Tuple +import sys +from typing import Callable, Iterable +from typing_extensions import Literal -_Reader = Callable[[int], bytes] +if sys.platform != "win32": + __all__ = ["openpty", "fork", "spawn"] + _Reader = Callable[[int], bytes] -STDIN_FILENO: int -STDOUT_FILENO: int -STDERR_FILENO: int + STDIN_FILENO: Literal[0] + STDOUT_FILENO: Literal[1] + STDERR_FILENO: Literal[2] -CHILD: int - -def openpty() -> Tuple[int, int]: ... -def master_open() -> Tuple[int, str]: ... -def slave_open(tty_name: str) -> int: ... -def fork() -> Tuple[int, int]: ... -def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... + CHILD: Literal[0] + def openpty() -> tuple[int, int]: ... + def master_open() -> tuple[int, str]: ... # deprecated, use openpty() + def slave_open(tty_name: str) -> int: ... # deprecated, use openpty() + def fork() -> tuple[int, int]: ... + def spawn(argv: str | Iterable[str], master_read: _Reader = ..., stdin_read: _Reader = ...) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pwd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pwd.pyi index 2b931248edda..84f6dc5ccf91 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pwd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pwd.pyi @@ -1,18 +1,26 @@ -from typing import ClassVar, Tuple +import sys +from _typeshed import structseq +from typing import Any +from typing_extensions import final -class struct_passwd(Tuple[str, str, int, int, str, str, str]): - pw_name: str - pw_passwd: str - pw_uid: int - pw_gid: int - pw_gecos: str - pw_dir: str - pw_shell: str +if sys.platform != "win32": + @final + class struct_passwd(structseq[Any], tuple[str, str, int, int, str, str, str]): + @property + def pw_name(self) -> str: ... + @property + def pw_passwd(self) -> str: ... + @property + def pw_uid(self) -> int: ... + @property + def pw_gid(self) -> int: ... + @property + def pw_gecos(self) -> str: ... + @property + def pw_dir(self) -> str: ... + @property + def pw_shell(self) -> str: ... - n_fields: ClassVar[int] - n_sequence_fields: ClassVar[int] - n_unnamed_fields: ClassVar[int] - -def getpwall() -> list[struct_passwd]: ... -def getpwuid(__uid: int) -> struct_passwd: ... -def getpwnam(__name: str) -> struct_passwd: ... + def getpwall() -> list[struct_passwd]: ... + def getpwuid(__uid: int) -> struct_passwd: ... + def getpwnam(__name: str) -> struct_passwd: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/py_compile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/py_compile.pyi index 1df818509d0e..c544a7941981 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/py_compile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/py_compile.pyi @@ -1,15 +1,21 @@ import sys -from typing import AnyStr, Type +from typing import AnyStr + +if sys.version_info >= (3, 7): + __all__ = ["compile", "main", "PyCompileError", "PycInvalidationMode"] +else: + __all__ = ["compile", "main", "PyCompileError"] class PyCompileError(Exception): exc_type_name: str exc_value: BaseException file: str msg: str - def __init__(self, exc_type: Type[BaseException], exc_value: BaseException, file: str, msg: str = ...) -> None: ... + def __init__(self, exc_type: type[BaseException], exc_value: BaseException, file: str, msg: str = ...) -> None: ... if sys.version_info >= (3, 7): import enum + class PycInvalidationMode(enum.Enum): TIMESTAMP: int CHECKED_HASH: int @@ -42,4 +48,8 @@ else: file: AnyStr, cfile: AnyStr | None = ..., dfile: AnyStr | None = ..., doraise: bool = ..., optimize: int = ... ) -> AnyStr | None: ... -def main(args: list[str] | None = ...) -> int: ... +if sys.version_info >= (3, 10): + def main() -> None: ... + +else: + def main(args: list[str] | None = ...) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pyclbr.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pyclbr.pyi index 10d106b4f511..3033833a8162 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pyclbr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pyclbr.pyi @@ -1,5 +1,7 @@ import sys -from typing import Sequence +from collections.abc import Sequence + +__all__ = ["readmodule", "readmodule_ex", "Class", "Function"] class Class: module: str @@ -9,7 +11,26 @@ class Class: file: int lineno: int + if sys.version_info >= (3, 10): + end_lineno: int | None + if sys.version_info >= (3, 7): + parent: Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + super_: list[Class | str] | None, + file: str, + lineno: int, + parent: Class | None = ..., + *, + end_lineno: int | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 7): def __init__( self, module: str, name: str, super: list[Class | str] | None, file: str, lineno: int, parent: Class | None = ... ) -> None: ... @@ -22,8 +43,28 @@ class Function: file: int lineno: int + if sys.version_info >= (3, 10): + end_lineno: int | None + is_async: bool + if sys.version_info >= (3, 7): - def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | None = ...) -> None: ... + parent: Function | Class | None + children: dict[str, Class | Function] + + if sys.version_info >= (3, 10): + def __init__( + self, + module: str, + name: str, + file: str, + lineno: int, + parent: Function | Class | None = ..., + is_async: bool = ..., + *, + end_lineno: int | None = ..., + ) -> None: ... + elif sys.version_info >= (3, 7): + def __init__(self, module: str, name: str, file: str, lineno: int, parent: Function | Class | None = ...) -> None: ... else: def __init__(self, module: str, name: str, file: str, lineno: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pydoc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pydoc.pyi index 778a2e535706..487656a3c160 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pydoc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pydoc.pyi @@ -1,10 +1,15 @@ from _typeshed import SupportsWrite +from abc import abstractmethod from reprlib import Repr from types import MethodType, ModuleType, TracebackType -from typing import IO, Any, AnyStr, Callable, Container, Mapping, MutableMapping, NoReturn, Optional, Tuple, Type +from typing import IO, Any, AnyStr, Callable, Container, Mapping, MutableMapping, NoReturn, TypeVar + +__all__ = ["help"] # the return type of sys.exc_info(), used by ErrorDuringImport.__init__ -_Exc_Info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] +_Exc_Info = tuple[type[BaseException] | None, BaseException | None, TracebackType | None] + +_T = TypeVar("_T") __author__: str __date__: str @@ -13,7 +18,7 @@ __credits__: str def pathdirs() -> list[str]: ... def getdoc(object: object) -> str: ... -def splitdoc(doc: AnyStr) -> Tuple[AnyStr, AnyStr]: ... +def splitdoc(doc: AnyStr) -> tuple[AnyStr, AnyStr]: ... def classname(object: object, modname: str) -> str: ... def isdata(object: object) -> bool: ... def replace(text: AnyStr, *pairs: AnyStr) -> AnyStr: ... @@ -21,14 +26,14 @@ def cram(text: str, maxlen: int) -> str: ... def stripid(text: str) -> str: ... def allmethods(cl: type) -> MutableMapping[str, MethodType]: ... def visiblename(name: str, all: Container[str] | None = ..., obj: object | None = ...) -> bool: ... -def classify_class_attrs(object: object) -> list[Tuple[str, str, type, str]]: ... +def classify_class_attrs(object: object) -> list[tuple[str, str, type, str]]: ... def ispackage(path: str) -> bool: ... def source_synopsis(file: IO[AnyStr]) -> AnyStr | None: ... -def synopsis(filename: str, cache: MutableMapping[str, Tuple[int, str]] = ...) -> str | None: ... +def synopsis(filename: str, cache: MutableMapping[str, tuple[int, str]] = ...) -> str | None: ... class ErrorDuringImport(Exception): filename: str - exc: Type[BaseException] | None + exc: type[BaseException] | None value: BaseException | None tb: TracebackType | None def __init__(self, filename: str, exc_info: _Exc_Info) -> None: ... @@ -40,11 +45,17 @@ class Doc: PYTHONDOCS: str def document(self, object: object, name: str | None = ..., *args: Any) -> str: ... def fail(self, object: object, name: str | None = ..., *args: Any) -> NoReturn: ... + @abstractmethod def docmodule(self, object: object, name: str | None = ..., *args: Any) -> str: ... + @abstractmethod def docclass(self, object: object, name: str | None = ..., *args: Any) -> str: ... + @abstractmethod def docroutine(self, object: object, name: str | None = ..., *args: Any) -> str: ... + @abstractmethod def docother(self, object: object, name: str | None = ..., *args: Any) -> str: ... + @abstractmethod def docproperty(self, object: object, name: str | None = ..., *args: Any) -> str: ... + @abstractmethod def docdata(self, object: object, name: str | None = ..., *args: Any) -> str: ... def getdocloc(self, object: object, basedir: str = ...) -> str | None: ... @@ -81,12 +92,12 @@ class HTMLDoc(Doc): ) -> str: ... def bigsection(self, title: str, *args: Any) -> str: ... def preformat(self, text: str) -> str: ... - def multicolumn(self, list: list[Any], format: Callable[[Any], str], cols: int = ...) -> str: ... + def multicolumn(self, list: list[_T], format: Callable[[_T], str], cols: int = ...) -> str: ... def grey(self, text: str) -> str: ... def namelink(self, name: str, *dicts: MutableMapping[str, str]) -> str: ... def classlink(self, object: object, modname: str) -> str: ... def modulelink(self, object: object) -> str: ... - def modpkglink(self, modpkginfo: Tuple[str, str, bool, bool]) -> str: ... + def modpkglink(self, modpkginfo: tuple[str, str, bool, bool]) -> str: ... def markup( self, text: str, @@ -96,7 +107,7 @@ class HTMLDoc(Doc): methods: Mapping[str, str] = ..., ) -> str: ... def formattree( - self, tree: list[Tuple[type, Tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ... ) -> str: ... def docmodule(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... def docclass( @@ -109,7 +120,7 @@ class HTMLDoc(Doc): *ignored: Any, ) -> str: ... def formatvalue(self, object: object) -> str: ... - def docroutine( + def docroutine( # type: ignore[override] self, object: object, name: str | None = ..., @@ -118,15 +129,10 @@ class HTMLDoc(Doc): classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., cl: type | None = ..., - *ignored: Any, - ) -> str: ... - def docproperty( - self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ..., *ignored: Any ) -> str: ... + def docproperty(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] def docother(self, object: object, name: str | None = ..., mod: Any | None = ..., *ignored: Any) -> str: ... - def docdata( - self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ..., *ignored: Any - ) -> str: ... + def docdata(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] def index(self, dir: str, shadowed: MutableMapping[str, bool] | None = ...) -> str: ... def filelink(self, url: str, path: str) -> str: ... @@ -148,21 +154,15 @@ class TextDoc(Doc): def indent(self, text: str, prefix: str = ...) -> str: ... def section(self, title: str, contents: str) -> str: ... def formattree( - self, tree: list[Tuple[type, Tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ..., prefix: str = ... + self, tree: list[tuple[type, tuple[type, ...]] | list[Any]], modname: str, parent: type | None = ..., prefix: str = ... ) -> str: ... - def docmodule(self, object: object, name: str | None = ..., mod: Any | None = ..., *ignored: Any) -> str: ... + def docmodule(self, object: object, name: str | None = ..., mod: Any | None = ...) -> str: ... # type: ignore[override] def docclass(self, object: object, name: str | None = ..., mod: str | None = ..., *ignored: Any) -> str: ... def formatvalue(self, object: object) -> str: ... - def docroutine( - self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ..., *ignored: Any - ) -> str: ... - def docproperty( - self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ..., *ignored: Any - ) -> str: ... - def docdata( - self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ..., *ignored: Any - ) -> str: ... - def docother( + def docroutine(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] + def docproperty(self, object: object, name: str | None = ..., mod: Any | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] + def docdata(self, object: object, name: str | None = ..., mod: str | None = ..., cl: Any | None = ...) -> str: ... # type: ignore[override] + def docother( # type: ignore[override] self, object: object, name: str | None = ..., @@ -170,7 +170,6 @@ class TextDoc(Doc): parent: str | None = ..., maxlen: int | None = ..., doc: Any | None = ..., - *ignored: Any, ) -> str: ... def pager(text: str) -> None: ... @@ -188,7 +187,7 @@ html: HTMLDoc class _OldStyleClass: ... -def resolve(thing: str | object, forceload: bool = ...) -> Tuple[object, str] | None: ... +def resolve(thing: str | object, forceload: bool = ...) -> tuple[object, str] | None: ... def render_doc(thing: str | object, title: str = ..., forceload: bool = ..., renderer: Doc | None = ...) -> str: ... def doc(thing: str | object, title: str = ..., forceload: bool = ..., output: SupportsWrite[str] | None = ...) -> None: ... def writedoc(thing: str | object, forceload: bool = ...) -> None: ... @@ -197,9 +196,9 @@ def writedocs(dir: str, pkgpath: str = ..., done: Any | None = ...) -> None: ... _list = list # "list" conflicts with method name class Helper: - keywords: dict[str, str | Tuple[str, str]] + keywords: dict[str, str | tuple[str, str]] symbols: dict[str, str] - topics: dict[str, str | Tuple[str, ...]] + topics: dict[str, str | tuple[str, ...]] def __init__(self, input: IO[str] | None = ..., output: IO[str] | None = ...) -> None: ... input: IO[str] output: IO[str] @@ -218,21 +217,12 @@ class Helper: help: Helper -# See Python issue #11182: "remove the unused and undocumented pydoc.Scanner class" -# class Scanner: -# roots = ... # type: Any -# state = ... # type: Any -# children = ... # type: Any -# descendp = ... # type: Any -# def __init__(self, roots, children, descendp) -> None: ... -# def next(self): ... - class ModuleScanner: quit: bool def run( self, callback: Callable[[str | None, str, str], None], - key: Any | None = ..., + key: str | None = ..., completer: Callable[[], None] | None = ..., onerror: Callable[[str], None] | None = ..., ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi index 95c1f98d5652..5aca55c2b813 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/pyexpat/__init__.pyi @@ -1,12 +1,13 @@ import pyexpat.errors as errors import pyexpat.model as model from _typeshed import SupportsRead -from typing import Any, Callable, Optional, Tuple +from typing import Any, Callable, Optional +from typing_extensions import final EXPAT_VERSION: str # undocumented -version_info: Tuple[int, int, int] # undocumented +version_info: tuple[int, int, int] # undocumented native_encoding: str # undocumented -features: list[Tuple[str, int]] # undocumented +features: list[tuple[str, int]] # undocumented class ExpatError(Exception): code: int @@ -19,9 +20,10 @@ XML_PARAM_ENTITY_PARSING_NEVER: int XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE: int XML_PARAM_ENTITY_PARSING_ALWAYS: int -_Model = Tuple[int, int, Optional[str], Tuple[Any, ...]] +_Model = tuple[int, int, Optional[str], tuple[Any, ...]] -class XMLParserType(object): +@final +class XMLParserType: def Parse(self, __data: str | bytes, __isfinal: bool = ...) -> int: ... def ParseFile(self, __file: SupportsRead[bytes]) -> int: ... def SetBase(self, __base: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/queue.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/queue.pyi index 9671155b7eb6..532d5d98344d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/queue.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/queue.pyi @@ -5,6 +5,11 @@ from typing import Any, Generic, TypeVar if sys.version_info >= (3, 9): from types import GenericAlias +if sys.version_info >= (3, 7): + __all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue", "SimpleQueue"] +else: + __all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue"] + _T = TypeVar("_T") class Empty(Exception): ... @@ -18,6 +23,8 @@ class Queue(Generic[_T]): not_full: Condition # undocumented all_tasks_done: Condition # undocumented unfinished_tasks: int # undocumented + # Despite the fact that `queue` has `deque` type, + # we treat it as `Any` to allow different implementations in subtypes. queue: Any # undocumented def __init__(self, maxsize: int = ...) -> None: ... def _init(self, maxsize: int) -> None: ... @@ -36,8 +43,11 @@ class Queue(Generic[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... -class PriorityQueue(Queue[_T]): ... -class LifoQueue(Queue[_T]): ... +class PriorityQueue(Queue[_T]): + queue: list[_T] + +class LifoQueue(Queue[_T]): + queue: list[_T] if sys.version_info >= (3, 7): class SimpleQueue(Generic[_T]): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/quopri.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/quopri.pyi index c2ffabe7d531..b8dc0787fd1a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/quopri.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/quopri.pyi @@ -1,5 +1,7 @@ from typing import BinaryIO +__all__ = ["encode", "decode", "encodestring", "decodestring"] + def encode(input: BinaryIO, output: BinaryIO, quotetabs: int, header: int = ...) -> None: ... def encodestring(s: bytes, quotetabs: int = ..., header: int = ...) -> bytes: ... def decode(input: BinaryIO, output: BinaryIO, header: int = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/random.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/random.pyi index f284fe831b42..74201216ba58 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/random.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/random.pyi @@ -1,25 +1,89 @@ import _random import sys -from collections.abc import Callable, Iterable, MutableSequence, Sequence, Set +from _typeshed import SupportsLenAndGetItem +from collections.abc import Callable, Iterable, MutableSequence, Sequence, Set as AbstractSet from fractions import Fraction -from typing import Any, NoReturn, Tuple, TypeVar +from typing import Any, ClassVar, NoReturn, TypeVar + +if sys.version_info >= (3, 9): + __all__ = [ + "Random", + "SystemRandom", + "betavariate", + "choice", + "choices", + "expovariate", + "gammavariate", + "gauss", + "getrandbits", + "getstate", + "lognormvariate", + "normalvariate", + "paretovariate", + "randbytes", + "randint", + "random", + "randrange", + "sample", + "seed", + "setstate", + "shuffle", + "triangular", + "uniform", + "vonmisesvariate", + "weibullvariate", + ] +else: + __all__ = [ + "Random", + "seed", + "random", + "uniform", + "randint", + "choice", + "sample", + "randrange", + "shuffle", + "normalvariate", + "lognormvariate", + "expovariate", + "vonmisesvariate", + "gammavariate", + "triangular", + "gauss", + "betavariate", + "paretovariate", + "weibullvariate", + "getstate", + "setstate", + "getrandbits", + "choices", + "SystemRandom", + ] _T = TypeVar("_T") class Random(_random.Random): + VERSION: ClassVar[int] def __init__(self, x: Any = ...) -> None: ... - def seed(self, a: Any = ..., version: int = ...) -> None: ... - def getstate(self) -> Tuple[Any, ...]: ... - def setstate(self, state: Tuple[Any, ...]) -> None: ... + # Using other `seed` types is deprecated since 3.9 and removed in 3.11 + if sys.version_info >= (3, 9): + def seed(self, a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... # type: ignore[override] + else: + def seed(self, a: Any = ..., version: int = ...) -> None: ... + + def getstate(self) -> tuple[Any, ...]: ... + def setstate(self, state: tuple[Any, ...]) -> None: ... def getrandbits(self, __k: int) -> int: ... def randrange(self, start: int, stop: int | None = ..., step: int = ...) -> int: ... def randint(self, a: int, b: int) -> int: ... if sys.version_info >= (3, 9): def randbytes(self, n: int) -> bytes: ... - def choice(self, seq: Sequence[_T]) -> _T: ... + + def choice(self, seq: SupportsLenAndGetItem[_T]) -> _T: ... def choices( self, - population: Sequence[_T], + population: SupportsLenAndGetItem[_T], weights: Sequence[float | Fraction] | None = ..., *, cum_weights: Sequence[float | Fraction] | None = ..., @@ -27,9 +91,12 @@ class Random(_random.Random): ) -> list[_T]: ... def shuffle(self, x: MutableSequence[Any], random: Callable[[], float] | None = ...) -> None: ... if sys.version_info >= (3, 9): - def sample(self, population: Sequence[_T] | Set[_T], k: int, *, counts: Iterable[_T] | None = ...) -> list[_T]: ... + def sample( + self, population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[_T] | None = ... + ) -> list[_T]: ... else: - def sample(self, population: Sequence[_T] | Set[_T], k: int) -> list[_T]: ... + def sample(self, population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... + def random(self) -> float: ... def uniform(self, a: float, b: float) -> float: ... def triangular(self, low: float = ..., high: float = ..., mode: float | None = ...) -> float: ... @@ -45,11 +112,17 @@ class Random(_random.Random): # SystemRandom is not implemented for all OS's; good on Windows & Linux class SystemRandom(Random): + def getrandbits(self, k: int) -> int: ... # k can be passed by keyword def getstate(self, *args: Any, **kwds: Any) -> NoReturn: ... def setstate(self, *args: Any, **kwds: Any) -> NoReturn: ... # ----- random function stubs ----- -def seed(a: Any = ..., version: int = ...) -> None: ... +if sys.version_info >= (3, 9): + def seed(a: int | float | str | bytes | bytearray | None = ..., version: int = ...) -> None: ... + +else: + def seed(a: Any = ..., version: int = ...) -> None: ... + def getstate() -> object: ... def setstate(state: object) -> None: ... def getrandbits(__k: int) -> int: ... @@ -59,17 +132,21 @@ def randint(a: int, b: int) -> int: ... if sys.version_info >= (3, 9): def randbytes(n: int) -> bytes: ... -def choice(seq: Sequence[_T]) -> _T: ... +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... def choices( - population: Sequence[_T], weights: Sequence[float] | None = ..., *, cum_weights: Sequence[float] | None = ..., k: int = ... + population: SupportsLenAndGetItem[_T], + weights: Sequence[float] | None = ..., + *, + cum_weights: Sequence[float] | None = ..., + k: int = ..., ) -> list[_T]: ... def shuffle(x: MutableSequence[Any], random: Callable[[], float] | None = ...) -> None: ... if sys.version_info >= (3, 9): - def sample(population: Sequence[_T] | Set[_T], k: int, *, counts: Iterable[_T] | None = ...) -> list[_T]: ... + def sample(population: Sequence[_T] | AbstractSet[_T], k: int, *, counts: Iterable[_T] | None = ...) -> list[_T]: ... else: - def sample(population: Sequence[_T] | Set[_T], k: int) -> list[_T]: ... + def sample(population: Sequence[_T] | AbstractSet[_T], k: int) -> list[_T]: ... def random() -> float: ... def uniform(a: float, b: float) -> float: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/re.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/re.pyi index f2869470dfcf..b9c41e1caa16 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/re.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/re.pyi @@ -1,7 +1,8 @@ import enum +import sre_compile import sys from sre_constants import error as error -from typing import Any, AnyStr, Callable, Iterator, Tuple, Union, overload +from typing import Any, AnyStr, Callable, Iterator, Union, overload # ----- re variables and constants ----- if sys.version_info >= (3, 7): @@ -9,24 +10,123 @@ if sys.version_info >= (3, 7): else: from typing import Match, Pattern +if sys.version_info >= (3, 11): + __all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "Pattern", + "Match", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + "RegexFlag", + "NOFLAG", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "Pattern", + "Match", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + ] +else: + __all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + ] + class RegexFlag(enum.IntFlag): - A: int - ASCII: int - DEBUG: int - I: int - IGNORECASE: int - L: int - LOCALE: int - M: int - MULTILINE: int - S: int - DOTALL: int - X: int - VERBOSE: int - U: int - UNICODE: int - T: int - TEMPLATE: int + A = sre_compile.SRE_FLAG_ASCII + ASCII = A + DEBUG = sre_compile.SRE_FLAG_DEBUG + I = sre_compile.SRE_FLAG_IGNORECASE + IGNORECASE = I + L = sre_compile.SRE_FLAG_LOCALE + LOCALE = L + M = sre_compile.SRE_FLAG_MULTILINE + MULTILINE = M + S = sre_compile.SRE_FLAG_DOTALL + DOTALL = S + X = sre_compile.SRE_FLAG_VERBOSE + VERBOSE = X + U = sre_compile.SRE_FLAG_UNICODE + UNICODE = U + T = sre_compile.SRE_FLAG_TEMPLATE + TEMPLATE = T + if sys.version_info >= (3, 11): + NOFLAG: int A = RegexFlag.A ASCII = RegexFlag.ASCII @@ -45,12 +145,19 @@ U = RegexFlag.U UNICODE = RegexFlag.UNICODE T = RegexFlag.T TEMPLATE = RegexFlag.TEMPLATE +if sys.version_info >= (3, 11): + NOFLAG = RegexFlag.NOFLAG _FlagsType = Union[int, RegexFlag] if sys.version_info < (3, 7): # undocumented _pattern_type: type +# Type-wise these overloads are unnecessary, they could also be modeled using +# unions in the parameter types. However mypy has a bug regarding TypeVar +# constraints (https://github.com/python/mypy/issues/11880), +# which limits us here because AnyStr is a constrained TypeVar. + @overload def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... @overload @@ -63,8 +170,6 @@ def search(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> def match(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr] | None: ... @overload def match(pattern: Pattern[AnyStr], string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr] | None: ... - -# New in Python 3.4 @overload def fullmatch(pattern: AnyStr, string: AnyStr, flags: _FlagsType = ...) -> Match[AnyStr] | None: ... @overload @@ -99,19 +204,19 @@ def sub( pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ... ) -> AnyStr: ... @overload -def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> Tuple[AnyStr, int]: ... +def subn(pattern: AnyStr, repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ...) -> tuple[AnyStr, int]: ... @overload def subn( pattern: AnyStr, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ... -) -> Tuple[AnyStr, int]: ... +) -> tuple[AnyStr, int]: ... @overload def subn( pattern: Pattern[AnyStr], repl: AnyStr, string: AnyStr, count: int = ..., flags: _FlagsType = ... -) -> Tuple[AnyStr, int]: ... +) -> tuple[AnyStr, int]: ... @overload def subn( pattern: Pattern[AnyStr], repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ..., flags: _FlagsType = ... -) -> Tuple[AnyStr, int]: ... +) -> tuple[AnyStr, int]: ... def escape(pattern: AnyStr) -> AnyStr: ... def purge() -> None: ... def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/readline.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/readline.pyi index 2de749b2c216..df08a3cc25ff 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/readline.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/readline.pyi @@ -1,33 +1,35 @@ +import sys from _typeshed import StrOrBytesPath -from typing import Callable, Optional, Sequence +from typing import Callable, Sequence -_CompleterT = Optional[Callable[[str, int], Optional[str]]] -_CompDispT = Optional[Callable[[str, Sequence[str], int], None]] +if sys.platform != "win32": + _Completer = Callable[[str, int], str | None] + _CompDisp = Callable[[str, Sequence[str], int], None] -def parse_and_bind(__string: str) -> None: ... -def read_init_file(__filename: StrOrBytesPath | None = ...) -> None: ... -def get_line_buffer() -> str: ... -def insert_text(__string: str) -> None: ... -def redisplay() -> None: ... -def read_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... -def write_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... -def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = ...) -> None: ... -def get_history_length() -> int: ... -def set_history_length(__length: int) -> None: ... -def clear_history() -> None: ... -def get_current_history_length() -> int: ... -def get_history_item(__index: int) -> str: ... -def remove_history_item(__pos: int) -> None: ... -def replace_history_item(__pos: int, __line: str) -> None: ... -def add_history(__string: str) -> None: ... -def set_auto_history(__enabled: bool) -> None: ... -def set_startup_hook(__function: Callable[[], None] | None = ...) -> None: ... -def set_pre_input_hook(__function: Callable[[], None] | None = ...) -> None: ... -def set_completer(__function: _CompleterT = ...) -> None: ... -def get_completer() -> _CompleterT: ... -def get_completion_type() -> int: ... -def get_begidx() -> int: ... -def get_endidx() -> int: ... -def set_completer_delims(__string: str) -> None: ... -def get_completer_delims() -> str: ... -def set_completion_display_matches_hook(__function: _CompDispT = ...) -> None: ... + def parse_and_bind(__string: str) -> None: ... + def read_init_file(__filename: StrOrBytesPath | None = ...) -> None: ... + def get_line_buffer() -> str: ... + def insert_text(__string: str) -> None: ... + def redisplay() -> None: ... + def read_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... + def write_history_file(__filename: StrOrBytesPath | None = ...) -> None: ... + def append_history_file(__nelements: int, __filename: StrOrBytesPath | None = ...) -> None: ... + def get_history_length() -> int: ... + def set_history_length(__length: int) -> None: ... + def clear_history() -> None: ... + def get_current_history_length() -> int: ... + def get_history_item(__index: int) -> str: ... + def remove_history_item(__pos: int) -> None: ... + def replace_history_item(__pos: int, __line: str) -> None: ... + def add_history(__string: str) -> None: ... + def set_auto_history(__enabled: bool) -> None: ... + def set_startup_hook(__function: Callable[[], object] | None = ...) -> None: ... + def set_pre_input_hook(__function: Callable[[], object] | None = ...) -> None: ... + def set_completer(__function: _Completer | None = ...) -> None: ... + def get_completer() -> _Completer | None: ... + def get_completion_type() -> int: ... + def get_begidx() -> int: ... + def get_endidx() -> int: ... + def set_completer_delims(__string: str) -> None: ... + def get_completer_delims() -> str: ... + def set_completion_display_matches_hook(__function: _CompDisp | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/reprlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/reprlib.pyi index 1529220c9bfc..2d114a7c4f26 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/reprlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/reprlib.pyi @@ -1,5 +1,8 @@ from array import array -from typing import Any, Callable, Deque, FrozenSet, Set, Tuple +from collections import deque +from typing import Any, Callable + +__all__ = ["Repr", "repr", "recursive_repr"] _ReprFunc = Callable[[Any], str] @@ -20,12 +23,12 @@ class Repr: def __init__(self) -> None: ... def repr(self, x: Any) -> str: ... def repr1(self, x: Any, level: int) -> str: ... - def repr_tuple(self, x: Tuple[Any, ...], level: int) -> str: ... + def repr_tuple(self, x: tuple[Any, ...], level: int) -> str: ... def repr_list(self, x: list[Any], level: int) -> str: ... def repr_array(self, x: array[Any], level: int) -> str: ... - def repr_set(self, x: Set[Any], level: int) -> str: ... - def repr_frozenset(self, x: FrozenSet[Any], level: int) -> str: ... - def repr_deque(self, x: Deque[Any], level: int) -> str: ... + def repr_set(self, x: set[Any], level: int) -> str: ... + def repr_frozenset(self, x: frozenset[Any], level: int) -> str: ... + def repr_deque(self, x: deque[Any], level: int) -> str: ... def repr_dict(self, x: dict[Any, Any], level: int) -> str: ... def repr_str(self, x: str, level: int) -> str: ... def repr_int(self, x: int, level: int) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/resource.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/resource.pyi index 742e43814bcc..427f4995ecfd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/resource.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/resource.pyi @@ -1,58 +1,74 @@ import sys -from typing import Any, Tuple, overload +from _typeshed import structseq +from typing import overload +from typing_extensions import final -RLIMIT_AS: int -RLIMIT_CORE: int -RLIMIT_CPU: int -RLIMIT_DATA: int -RLIMIT_FSIZE: int -RLIMIT_MEMLOCK: int -RLIMIT_NOFILE: int -RLIMIT_NPROC: int -RLIMIT_RSS: int -RLIMIT_STACK: int -RLIM_INFINITY: int -RUSAGE_CHILDREN: int -RUSAGE_SELF: int -if sys.platform == "linux": - RLIMIT_MSGQUEUE: int - RLIMIT_NICE: int - RLIMIT_OFILE: int - RLIMIT_RTPRIO: int - RLIMIT_RTTIME: int - RLIMIT_SIGPENDING: int - RUSAGE_THREAD: int +if sys.platform != "win32": + RLIMIT_AS: int + RLIMIT_CORE: int + RLIMIT_CPU: int + RLIMIT_DATA: int + RLIMIT_FSIZE: int + RLIMIT_MEMLOCK: int + RLIMIT_NOFILE: int + RLIMIT_NPROC: int + RLIMIT_RSS: int + RLIMIT_STACK: int + RLIM_INFINITY: int + RUSAGE_CHILDREN: int + RUSAGE_SELF: int + if sys.platform == "linux": + RLIMIT_MSGQUEUE: int + RLIMIT_NICE: int + RLIMIT_OFILE: int + RLIMIT_RTPRIO: int + RLIMIT_RTTIME: int + RLIMIT_SIGPENDING: int + RUSAGE_THREAD: int + @final + class struct_rusage( + structseq[float], tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] + ): + @property + def ru_utime(self) -> float: ... + @property + def ru_stime(self) -> float: ... + @property + def ru_maxrss(self) -> int: ... + @property + def ru_ixrss(self) -> int: ... + @property + def ru_idrss(self) -> int: ... + @property + def ru_isrss(self) -> int: ... + @property + def ru_minflt(self) -> int: ... + @property + def ru_majflt(self) -> int: ... + @property + def ru_nswap(self) -> int: ... + @property + def ru_inblock(self) -> int: ... + @property + def ru_oublock(self) -> int: ... + @property + def ru_msgsnd(self) -> int: ... + @property + def ru_msgrcv(self) -> int: ... + @property + def ru_nsignals(self) -> int: ... + @property + def ru_nvcsw(self) -> int: ... + @property + def ru_nivcsw(self) -> int: ... -_Tuple16 = Tuple[float, float, int, int, int, int, int, int, int, int, int, int, int, int, int, int] - -class struct_rusage(_Tuple16): - def __new__(cls, sequence: _Tuple16, dict: dict[str, Any] = ...) -> struct_rusage: ... - ru_utime: float - ru_stime: float - ru_maxrss: int - ru_ixrss: int - ru_idrss: int - ru_isrss: int - ru_minflt: int - ru_majflt: int - ru_nswap: int - ru_inblock: int - ru_oublock: int - ru_msgsnd: int - ru_msgrcv: int - ru_nsignals: int - ru_nvcsw: int - ru_nivcsw: int - -def getpagesize() -> int: ... -def getrlimit(__resource: int) -> Tuple[int, int]: ... -def getrusage(__who: int) -> struct_rusage: ... -def setrlimit(__resource: int, __limits: Tuple[int, int]) -> None: ... - -if sys.platform == "linux": - @overload - def prlimit(pid: int, resource: int, limits: Tuple[int, int]) -> Tuple[int, int]: ... - @overload - def prlimit(pid: int, resource: int) -> Tuple[int, int]: ... - -error = OSError + def getpagesize() -> int: ... + def getrlimit(__resource: int) -> tuple[int, int]: ... + def getrusage(__who: int) -> struct_rusage: ... + def setrlimit(__resource: int, __limits: tuple[int, int]) -> None: ... + if sys.platform == "linux": + @overload + def prlimit(pid: int, resource: int, limits: tuple[int, int]) -> tuple[int, int]: ... + @overload + def prlimit(pid: int, resource: int) -> tuple[int, int]: ... + error = OSError diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/rlcompleter.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/rlcompleter.pyi index f971c424213d..a73ba8db1c18 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/rlcompleter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/rlcompleter.pyi @@ -1,5 +1,7 @@ from typing import Any +__all__ = ["Completer"] + class Completer: def __init__(self, namespace: dict[str, Any] | None = ...) -> None: ... def complete(self, text: str, state: int) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/runpy.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/runpy.pyi index 371fa9d07cf8..05e0ec65dc49 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/runpy.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/runpy.pyi @@ -2,6 +2,8 @@ from _typeshed import Self from types import ModuleType from typing import Any +__all__ = ["run_module", "run_path"] + class _TempModule: mod_name: str module: ModuleType diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sched.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sched.pyi index cb96dc2bbf4a..dff781b0c176 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sched.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sched.pyi @@ -1,20 +1,36 @@ -from typing import Any, Callable, NamedTuple, Tuple +import sys +from typing import Any, Callable, NamedTuple -class Event(NamedTuple): - time: float - priority: Any - action: Callable[..., Any] - argument: Tuple[Any, ...] - kwargs: dict[str, Any] +__all__ = ["scheduler"] + +if sys.version_info >= (3, 10): + class Event(NamedTuple): + time: float + priority: Any + sequence: int + action: Callable[..., Any] + argument: tuple[Any, ...] + kwargs: dict[str, Any] + +else: + class Event(NamedTuple): + time: float + priority: Any + action: Callable[..., Any] + argument: tuple[Any, ...] + kwargs: dict[str, Any] class scheduler: - def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], None] = ...) -> None: ... + timefunc: Callable[[], float] + delayfunc: Callable[[float], object] + + def __init__(self, timefunc: Callable[[], float] = ..., delayfunc: Callable[[float], object] = ...) -> None: ... def enterabs( self, time: float, priority: Any, action: Callable[..., Any], - argument: Tuple[Any, ...] = ..., + argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ..., ) -> Event: ... def enter( @@ -22,7 +38,7 @@ class scheduler: delay: float, priority: Any, action: Callable[..., Any], - argument: Tuple[Any, ...] = ..., + argument: tuple[Any, ...] = ..., kwargs: dict[str, Any] = ..., ) -> Event: ... def run(self, blocking: bool = ...) -> float | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/secrets.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/secrets.pyi index 6752a30f431e..99b7c14ebafc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/secrets.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/secrets.pyi @@ -1,12 +1,15 @@ +from _typeshed import SupportsLenAndGetItem from hmac import compare_digest as compare_digest from random import SystemRandom as SystemRandom -from typing import Sequence, TypeVar +from typing import TypeVar + +__all__ = ["choice", "randbelow", "randbits", "SystemRandom", "token_bytes", "token_hex", "token_urlsafe", "compare_digest"] _T = TypeVar("_T") def randbelow(exclusive_upper_bound: int) -> int: ... def randbits(k: int) -> int: ... -def choice(seq: Sequence[_T]) -> _T: ... +def choice(seq: SupportsLenAndGetItem[_T]) -> _T: ... def token_bytes(nbytes: int | None = ...) -> bytes: ... def token_hex(nbytes: int | None = ...) -> str: ... def token_urlsafe(nbytes: int | None = ...) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi index fd503dc0033a..d4a3656e110e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/select.pyi @@ -1,7 +1,8 @@ import sys from _typeshed import FileDescriptorLike, Self from types import TracebackType -from typing import Any, Iterable, Tuple, Type +from typing import Any, Iterable +from typing_extensions import final if sys.platform != "win32": PIPE_BUF: int @@ -13,6 +14,7 @@ if sys.platform != "win32": POLLOUT: int POLLPRI: int POLLRDBAND: int + POLLRDHUP: int POLLRDNORM: int POLLWRBAND: int POLLWRNORM: int @@ -22,17 +24,18 @@ class poll: def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = ...) -> list[Tuple[int, int]]: ... + def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... def select( __rlist: Iterable[Any], __wlist: Iterable[Any], __xlist: Iterable[Any], __timeout: float | None = ... -) -> Tuple[list[Any], list[Any], list[Any]]: ... +) -> tuple[list[Any], list[Any], list[Any]]: ... error = OSError if sys.platform != "linux" and sys.platform != "win32": # BSD only - class kevent(object): + @final + class kevent: data: Any fflags: int filter: int @@ -49,7 +52,8 @@ if sys.platform != "linux" and sys.platform != "win32": udata: Any = ..., ) -> None: ... # BSD only - class kqueue(object): + @final + class kqueue: closed: bool def __init__(self) -> None: ... def close(self) -> None: ... @@ -99,14 +103,15 @@ if sys.platform != "linux" and sys.platform != "win32": KQ_NOTE_WRITE: int if sys.platform == "linux": - class epoll(object): + @final + class epoll: def __init__(self, sizehint: int = ..., flags: int = ...) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( self, - exc_type: Type[BaseException] | None = ..., - exc_val: BaseException | None = ..., - exc_tb: TracebackType | None = ..., + __exc_type: type[BaseException] | None = ..., + __exc_val: BaseException | None = ..., + __exc_tb: TracebackType | None = ..., ) -> None: ... def close(self) -> None: ... closed: bool @@ -114,10 +119,11 @@ if sys.platform == "linux": def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = ..., maxevents: int = ...) -> list[Tuple[int, int]]: ... + def poll(self, timeout: float | None = ..., maxevents: int = ...) -> list[tuple[int, int]]: ... @classmethod def fromfd(cls, __fd: FileDescriptorLike) -> epoll: ... EPOLLERR: int + EPOLLEXCLUSIVE: int EPOLLET: int EPOLLHUP: int EPOLLIN: int @@ -126,10 +132,12 @@ if sys.platform == "linux": EPOLLOUT: int EPOLLPRI: int EPOLLRDBAND: int + EPOLLRDHUP: int EPOLLRDNORM: int EPOLLWRBAND: int EPOLLWRNORM: int EPOLL_RDHUP: int + EPOLL_CLOEXEC: int if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win32": # Solaris only @@ -140,4 +148,4 @@ if sys.platform != "linux" and sys.platform != "darwin" and sys.platform != "win def register(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def modify(self, fd: FileDescriptorLike, eventmask: int = ...) -> None: ... def unregister(self, fd: FileDescriptorLike) -> None: ... - def poll(self, timeout: float | None = ...) -> list[Tuple[int, int]]: ... + def poll(self, timeout: float | None = ...) -> list[tuple[int, int]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/selectors.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/selectors.pyi index a28dc1d53899..c3fe7ec47ace 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/selectors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/selectors.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import FileDescriptor, FileDescriptorLike, Self from abc import ABCMeta, abstractmethod -from typing import Any, Mapping, NamedTuple, Tuple +from typing import Any, Mapping, NamedTuple _EventMask = int @@ -21,7 +21,7 @@ class BaseSelector(metaclass=ABCMeta): def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... @abstractmethod - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def close(self) -> None: ... def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... @abstractmethod @@ -32,14 +32,14 @@ class BaseSelector(metaclass=ABCMeta): class SelectSelector(BaseSelector): def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform != "win32": class PollSelector(BaseSelector): def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... if sys.platform == "linux": @@ -47,25 +47,25 @@ if sys.platform == "linux": def fileno(self) -> int: ... def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DevpollSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class KqueueSelector(BaseSelector): def fileno(self) -> int: ... def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... class DefaultSelector(BaseSelector): def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... - def select(self, timeout: float | None = ...) -> list[Tuple[SelectorKey, _EventMask]]: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/shelve.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/shelve.pyi index 90b2aafa4f03..2a211ab34208 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/shelve.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/shelve.pyi @@ -1,7 +1,10 @@ from _typeshed import Self from collections.abc import Iterator, MutableMapping +from dbm import _TFlags from types import TracebackType -from typing import Type, TypeVar, overload +from typing import TypeVar, overload + +__all__ = ["Shelf", "BsdDbShelf", "DbfilenameShelf", "open"] _T = TypeVar("_T") _VT = TypeVar("_VT") @@ -19,9 +22,10 @@ class Shelf(MutableMapping[str, _VT]): def __getitem__(self, key: str) -> _VT: ... def __setitem__(self, key: str, value: _VT) -> None: ... def __delitem__(self, key: str) -> None: ... + def __contains__(self, key: str) -> bool: ... # type: ignore[override] def __enter__(self: Self) -> Self: ... def __exit__( - self, type: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def close(self) -> None: ... def sync(self) -> None: ... @@ -34,6 +38,6 @@ class BsdDbShelf(Shelf[_VT]): def last(self) -> tuple[str, _VT]: ... class DbfilenameShelf(Shelf[_VT]): - def __init__(self, filename: str, flag: str = ..., protocol: int | None = ..., writeback: bool = ...) -> None: ... + def __init__(self, filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> None: ... -def open(filename: str, flag: str = ..., protocol: int | None = ..., writeback: bool = ...) -> Shelf[object]: ... +def open(filename: str, flag: _TFlags = ..., protocol: int | None = ..., writeback: bool = ...) -> Shelf[object]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/shlex.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/shlex.pyi index b517d03daf93..abe5dea58d7a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/shlex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/shlex.pyi @@ -1,5 +1,11 @@ import sys -from typing import Any, Iterable, TextIO, Tuple, TypeVar +from _typeshed import Self +from typing import Iterable, TextIO + +if sys.version_info >= (3, 8): + __all__ = ["shlex", "split", "quote", "join"] +else: + __all__ = ["shlex", "split", "quote"] def split(s: str, comments: bool = ..., posix: bool = ...) -> list[str]: ... @@ -8,8 +14,6 @@ if sys.version_info >= (3, 8): def quote(s: str) -> str: ... -_SLT = TypeVar("_SLT", bound=shlex) - class shlex(Iterable[str]): commenters: str wordchars: str @@ -18,7 +22,7 @@ class shlex(Iterable[str]): quotes: str escapedquotes: str whitespace_split: bool - infile: str + infile: str | None instream: TextIO source: str debug: int @@ -36,10 +40,9 @@ class shlex(Iterable[str]): def get_token(self) -> str: ... def push_token(self, tok: str) -> None: ... def read_token(self) -> str: ... - def sourcehook(self, newfile: str) -> Tuple[str, TextIO]: ... - # TODO argument types - def push_source(self, newstream: Any, newfile: Any = ...) -> None: ... + def sourcehook(self, newfile: str) -> tuple[str, TextIO]: ... + def push_source(self, newstream: str | TextIO, newfile: str | None = ...) -> None: ... def pop_source(self) -> None: ... def error_leader(self, infile: str | None = ..., lineno: int | None = ...) -> None: ... - def __iter__(self: _SLT) -> _SLT: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/shutil.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/shutil.pyi index cd2f28c59825..752cf2cb5a8d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/shutil.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/shutil.pyi @@ -1,9 +1,39 @@ import os import sys -from _typeshed import StrPath, SupportsRead, SupportsWrite -from typing import Any, AnyStr, Callable, Iterable, NamedTuple, Sequence, Set, Tuple, TypeVar, Union, overload - -_PathT = TypeVar("_PathT", str, os.PathLike[str]) +from _typeshed import BytesPath, StrOrBytesPath, StrPath, SupportsRead, SupportsWrite +from typing import Any, AnyStr, Callable, Iterable, NamedTuple, Sequence, TypeVar, Union, overload + +__all__ = [ + "copyfileobj", + "copyfile", + "copymode", + "copystat", + "copy", + "copy2", + "copytree", + "move", + "rmtree", + "Error", + "SpecialFileError", + "ExecError", + "make_archive", + "get_archive_formats", + "register_archive_format", + "unregister_archive_format", + "get_unpack_formats", + "register_unpack_format", + "unregister_unpack_format", + "unpack_archive", + "ignore_patterns", + "chown", + "which", + "get_terminal_size", + "SameFileError", + "disk_usage", +] + +_StrOrBytesPathT = TypeVar("_StrOrBytesPathT", bound=StrOrBytesPath) +_StrPathT = TypeVar("_StrPathT", bound=StrPath) # Return value of some functions that may either return a path-like object that was passed in or # a string _PathReturn = Any @@ -16,12 +46,18 @@ class ReadError(OSError): ... class RegistryError(Exception): ... def copyfileobj(fsrc: SupportsRead[AnyStr], fdst: SupportsWrite[AnyStr], length: int = ...) -> None: ... -def copyfile(src: StrPath, dst: _PathT, *, follow_symlinks: bool = ...) -> _PathT: ... -def copymode(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> None: ... -def copystat(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> None: ... +def copyfile(src: StrOrBytesPath, dst: _StrOrBytesPathT, *, follow_symlinks: bool = ...) -> _StrOrBytesPathT: ... +def copymode(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... +def copystat(src: StrOrBytesPath, dst: StrOrBytesPath, *, follow_symlinks: bool = ...) -> None: ... +@overload def copy(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +@overload +def copy(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +@overload def copy2(src: StrPath, dst: StrPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... -def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], Set[str]]: ... +@overload +def copy2(src: BytesPath, dst: BytesPath, *, follow_symlinks: bool = ...) -> _PathReturn: ... +def ignore_patterns(*patterns: StrPath) -> Callable[[Any, list[str]], set[str]]: ... if sys.version_info >= (3, 8): def copytree( @@ -44,10 +80,13 @@ else: ignore_dangling_symlinks: bool = ..., ) -> _PathReturn: ... -def rmtree(path: bytes | StrPath, ignore_errors: bool = ..., onerror: Callable[[Any, Any, Any], Any] | None = ...) -> None: ... +def rmtree(path: StrOrBytesPath, ignore_errors: bool = ..., onerror: Callable[[Any, Any, Any], Any] | None = ...) -> None: ... _CopyFn = Union[Callable[[str, str], None], Callable[[StrPath, StrPath], None]] +# N.B. shutil.move appears to take bytes arguments, however, +# this does not work when dst is (or is within) an existing directory. +# (#6832) if sys.version_info >= (3, 9): def move(src: StrPath, dst: StrPath, copy_function: _CopyFn = ...) -> _PathReturn: ... @@ -60,17 +99,24 @@ class _ntuple_diskusage(NamedTuple): used: int free: int -def disk_usage(path: StrPath) -> _ntuple_diskusage: ... -def chown(path: StrPath, user: str | int | None = ..., group: str | int | None = ...) -> None: ... +def disk_usage(path: int | StrOrBytesPath) -> _ntuple_diskusage: ... +@overload +def chown(path: StrOrBytesPath, user: str | int, group: None = ...) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: None = ..., *, group: str | int) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: None, group: str | int) -> None: ... +@overload +def chown(path: StrOrBytesPath, user: str | int, group: str | int) -> None: ... if sys.version_info >= (3, 8): @overload - def which(cmd: StrPath, mode: int = ..., path: StrPath | None = ...) -> str | None: ... + def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... @overload def which(cmd: bytes, mode: int = ..., path: StrPath | None = ...) -> bytes | None: ... else: - def which(cmd: StrPath, mode: int = ..., path: StrPath | None = ...) -> str | None: ... + def which(cmd: _StrPathT, mode: int = ..., path: StrPath | None = ...) -> str | _StrPathT | None: ... def make_archive( base_name: str, @@ -83,12 +129,14 @@ def make_archive( group: str | None = ..., logger: Any | None = ..., ) -> str: ... -def get_archive_formats() -> list[Tuple[str, str]]: ... +def get_archive_formats() -> list[tuple[str, str]]: ... +@overload def register_archive_format( - name: str, - function: Callable[..., Any], - extra_args: Sequence[Tuple[str, Any] | list[Any]] | None = ..., - description: str = ..., + name: str, function: Callable[..., object], extra_args: Sequence[tuple[str, Any] | list[Any]], description: str = ... +) -> None: ... +@overload +def register_archive_format( + name: str, function: Callable[[str, str], object], extra_args: None = ..., description: str = ... ) -> None: ... def unregister_archive_format(name: str) -> None: ... @@ -99,9 +147,18 @@ else: # See http://bugs.python.org/issue30218 def unpack_archive(filename: str, extract_dir: StrPath | None = ..., format: str | None = ...) -> None: ... +@overload +def register_unpack_format( + name: str, + extensions: list[str], + function: Callable[..., object], + extra_args: Sequence[tuple[str, Any]], + description: str = ..., +) -> None: ... +@overload def register_unpack_format( - name: str, extensions: list[str], function: Any, extra_args: Sequence[Tuple[str, Any]] | None = ..., description: str = ... + name: str, extensions: list[str], function: Callable[[str, str], object], extra_args: None = ..., description: str = ... ) -> None: ... def unregister_unpack_format(name: str) -> None: ... -def get_unpack_formats() -> list[Tuple[str, list[str], str]]: ... -def get_terminal_size(fallback: Tuple[int, int] = ...) -> os.terminal_size: ... +def get_unpack_formats() -> list[tuple[str, list[str], str]]: ... +def get_terminal_size(fallback: tuple[int, int] = ...) -> os.terminal_size: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/signal.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/signal.pyi index 21ad232c7189..a6bc2daad4d0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/signal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/signal.pyi @@ -1,56 +1,40 @@ import sys +from _typeshed import structseq from enum import IntEnum from types import FrameType -from typing import Any, Callable, Iterable, Optional, Set, Tuple, Union - -if sys.platform != "win32": - class ItimerError(IOError): ... - ITIMER_PROF: int - ITIMER_REAL: int - ITIMER_VIRTUAL: int +from typing import Any, Callable, Iterable, Optional, Union +from typing_extensions import final NSIG: int class Signals(IntEnum): SIGABRT: int - if sys.platform != "win32": - SIGALRM: int + SIGEMT: int + SIGFPE: int + SIGILL: int + SIGINFO: int + SIGINT: int + SIGSEGV: int + SIGTERM: int + if sys.platform == "win32": SIGBREAK: int - if sys.platform != "win32": + CTRL_C_EVENT: int + CTRL_BREAK_EVENT: int + else: + SIGALRM: int SIGBUS: int SIGCHLD: int - if sys.platform != "darwin" and sys.platform != "win32": - SIGCLD: int - if sys.platform != "win32": SIGCONT: int - SIGEMT: int - SIGFPE: int - if sys.platform != "win32": SIGHUP: int - SIGILL: int - SIGINFO: int - SIGINT: int - if sys.platform != "win32": SIGIO: int SIGIOT: int SIGKILL: int SIGPIPE: int - if sys.platform != "darwin" and sys.platform != "win32": - SIGPOLL: int - SIGPWR: int - if sys.platform != "win32": SIGPROF: int SIGQUIT: int - if sys.platform != "darwin" and sys.platform != "win32": - SIGRTMAX: int - SIGRTMIN: int - SIGSEGV: int - if sys.platform != "win32": SIGSTOP: int SIGSYS: int - SIGTERM: int - if sys.platform != "win32": SIGTRAP: int SIGTSTP: int SIGTTIN: int @@ -62,65 +46,60 @@ class Signals(IntEnum): SIGWINCH: int SIGXCPU: int SIGXFSZ: int + if sys.platform != "darwin": + SIGCLD: int + SIGPOLL: int + SIGPWR: int + SIGRTMAX: int + SIGRTMIN: int class Handlers(IntEnum): SIG_DFL: int SIG_IGN: int -SIG_DFL = Handlers.SIG_DFL -SIG_IGN = Handlers.SIG_IGN - -if sys.platform != "win32": - class Sigmasks(IntEnum): - SIG_BLOCK: int - SIG_UNBLOCK: int - SIG_SETMASK: int - SIG_BLOCK = Sigmasks.SIG_BLOCK - SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK - SIG_SETMASK = Sigmasks.SIG_SETMASK +SIG_DFL: Handlers +SIG_IGN: Handlers _SIGNUM = Union[int, Signals] _HANDLER = Union[Callable[[int, Optional[FrameType]], Any], int, Handlers, None] +def default_int_handler(__signalnum: int, __frame: FrameType | None) -> None: ... + +if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def getsignal(signalnum: _SIGNUM) -> _HANDLER: ... + def signal(signalnum: _SIGNUM, handler: _HANDLER) -> _HANDLER: ... + +else: + def getsignal(__signalnum: _SIGNUM) -> _HANDLER: ... + def signal(__signalnum: _SIGNUM, __handler: _HANDLER) -> _HANDLER: ... + SIGABRT: Signals -if sys.platform != "win32": - SIGALRM: Signals +SIGEMT: Signals +SIGFPE: Signals +SIGILL: Signals +SIGINFO: Signals +SIGINT: Signals +SIGSEGV: Signals +SIGTERM: Signals + if sys.platform == "win32": SIGBREAK: Signals -if sys.platform != "win32": + CTRL_C_EVENT: Signals + CTRL_BREAK_EVENT: Signals +else: + SIGALRM: Signals SIGBUS: Signals SIGCHLD: Signals -if sys.platform != "darwin" and sys.platform != "win32": - SIGCLD: Signals -if sys.platform != "win32": SIGCONT: Signals -SIGEMT: Signals -SIGFPE: Signals -if sys.platform != "win32": SIGHUP: Signals -SIGILL: Signals -SIGINFO: Signals -SIGINT: Signals -if sys.platform != "win32": SIGIO: Signals SIGIOT: Signals SIGKILL: Signals SIGPIPE: Signals -if sys.platform != "darwin" and sys.platform != "win32": - SIGPOLL: Signals - SIGPWR: Signals -if sys.platform != "win32": SIGPROF: Signals SIGQUIT: Signals -if sys.platform != "darwin" and sys.platform != "win32": - SIGRTMAX: Signals - SIGRTMIN: Signals -SIGSEGV: Signals -if sys.platform != "win32": SIGSTOP: Signals SIGSYS: Signals -SIGTERM: Signals -if sys.platform != "win32": SIGTRAP: Signals SIGTSTP: Signals SIGTTIN: Signals @@ -133,63 +112,67 @@ if sys.platform != "win32": SIGXCPU: Signals SIGXFSZ: Signals -if sys.platform == "win32": - CTRL_C_EVENT: int - CTRL_BREAK_EVENT: int - -if sys.platform != "win32" and sys.platform != "darwin": - class struct_siginfo(Tuple[int, int, int, int, int, int, int]): - def __init__(self, sequence: Iterable[int]) -> None: ... - @property - def si_signo(self) -> int: ... - @property - def si_code(self) -> int: ... - @property - def si_errno(self) -> int: ... - @property - def si_pid(self) -> int: ... - @property - def si_uid(self) -> int: ... - @property - def si_status(self) -> int: ... - @property - def si_band(self) -> int: ... - -if sys.platform != "win32": - def alarm(__seconds: int) -> int: ... + class ItimerError(IOError): ... + ITIMER_PROF: int + ITIMER_REAL: int + ITIMER_VIRTUAL: int -def default_int_handler(signum: int, frame: FrameType) -> None: ... + class Sigmasks(IntEnum): + SIG_BLOCK: int + SIG_UNBLOCK: int + SIG_SETMASK: int + SIG_BLOCK = Sigmasks.SIG_BLOCK + SIG_UNBLOCK = Sigmasks.SIG_UNBLOCK + SIG_SETMASK = Sigmasks.SIG_SETMASK + def alarm(__seconds: int) -> int: ... + def getitimer(__which: int) -> tuple[float, float]: ... + def pause() -> None: ... + def pthread_kill(__thread_id: int, __signalnum: int) -> None: ... + if sys.version_info >= (3, 10): # arguments changed in 3.10.2 + def pthread_sigmask(how: int, mask: Iterable[int]) -> set[_SIGNUM]: ... + else: + def pthread_sigmask(__how: int, __mask: Iterable[int]) -> set[_SIGNUM]: ... -if sys.platform != "win32": - def getitimer(__which: int) -> Tuple[float, float]: ... + def setitimer(__which: int, __seconds: float, __interval: float = ...) -> tuple[float, float]: ... + def siginterrupt(__signalnum: int, __flag: bool) -> None: ... + def sigpending() -> Any: ... + if sys.version_info >= (3, 10): # argument changed in 3.10.2 + def sigwait(sigset: Iterable[int]) -> _SIGNUM: ... + else: + def sigwait(__sigset: Iterable[int]) -> _SIGNUM: ... + if sys.platform != "darwin": + SIGCLD: Signals + SIGPOLL: Signals + SIGPWR: Signals + SIGRTMAX: Signals + SIGRTMIN: Signals + @final + class struct_siginfo(structseq[int], tuple[int, int, int, int, int, int, int]): + @property + def si_signo(self) -> int: ... + @property + def si_code(self) -> int: ... + @property + def si_errno(self) -> int: ... + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_band(self) -> int: ... -def getsignal(__signalnum: _SIGNUM) -> _HANDLER: ... + def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ... + def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... if sys.version_info >= (3, 8): def strsignal(__signalnum: _SIGNUM) -> str | None: ... - def valid_signals() -> Set[Signals]: ... + def valid_signals() -> set[Signals]: ... def raise_signal(__signalnum: _SIGNUM) -> None: ... -if sys.platform != "win32": - def pause() -> None: ... - def pthread_kill(__thread_id: int, __signalnum: int) -> None: ... - def pthread_sigmask(__how: int, __mask: Iterable[int]) -> Set[_SIGNUM]: ... - if sys.version_info >= (3, 7): def set_wakeup_fd(fd: int, *, warn_on_full_buffer: bool = ...) -> int: ... else: def set_wakeup_fd(fd: int) -> int: ... - -if sys.platform != "win32": - def setitimer(__which: int, __seconds: float, __interval: float = ...) -> Tuple[float, float]: ... - def siginterrupt(__signalnum: int, __flag: bool) -> None: ... - -def signal(__signalnum: _SIGNUM, __handler: _HANDLER) -> _HANDLER: ... - -if sys.platform != "win32": - def sigpending() -> Any: ... - def sigwait(__sigset: Iterable[int]) -> _SIGNUM: ... - if sys.platform != "darwin": - def sigtimedwait(sigset: Iterable[int], timeout: float) -> struct_siginfo | None: ... - def sigwaitinfo(sigset: Iterable[int]) -> struct_siginfo: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/site.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/site.pyi index fc331c113163..a73d188a7e5c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/site.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/site.pyi @@ -1,3 +1,4 @@ +from _typeshed import StrPath from typing import Iterable PREFIXES: list[str] @@ -6,7 +7,21 @@ USER_SITE: str | None USER_BASE: str | None def main() -> None: ... -def addsitedir(sitedir: str, known_paths: Iterable[str] | None = ...) -> None: ... +def abs_paths() -> None: ... # undocumented +def addpackage(sitedir: StrPath, name: StrPath, known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def addsitedir(sitedir: str, known_paths: set[str] | None = ...) -> None: ... +def addsitepackages(known_paths: set[str] | None, prefixes: Iterable[str] | None = ...) -> set[str] | None: ... # undocumented +def addusersitepackages(known_paths: set[str] | None) -> set[str] | None: ... # undocumented +def check_enableusersite() -> bool | None: ... # undocumented +def enablerlcompleter() -> None: ... # undocumented +def execsitecustomize() -> None: ... # undocumented +def execusercustomize() -> None: ... # undocumented def getsitepackages(prefixes: Iterable[str] | None = ...) -> list[str]: ... def getuserbase() -> str: ... def getusersitepackages() -> str: ... +def makepath(*paths: StrPath) -> tuple[str, str]: ... # undocumented +def removeduppaths() -> set[str]: ... # undocumented +def setcopyright() -> None: ... # undocumented +def sethelper() -> None: ... # undocumented +def setquit() -> None: ... # undocumented +def venv(known_paths: set[str] | None) -> set[str] | None: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/smtpd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/smtpd.pyi index 5d9307300c7e..037f62a8d6e1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/smtpd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/smtpd.pyi @@ -1,15 +1,22 @@ import asynchat import asyncore import socket -from typing import Any, DefaultDict, Tuple, Type +import sys +from collections import defaultdict +from typing import Any -_Address = Tuple[str, int] # (host, port) +if sys.version_info >= (3, 11): + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy"] +else: + __all__ = ["SMTPChannel", "SMTPServer", "DebuggingServer", "PureProxy", "MailmanProxy"] + +_Address = tuple[str, int] # (host, port) class SMTPChannel(asynchat.async_chat): COMMAND: int DATA: int - command_size_limits: DefaultDict[str, int] + command_size_limits: defaultdict[str, int] smtp_server: SMTPServer conn: socket.socket addr: Any @@ -39,7 +46,7 @@ class SMTPChannel(asynchat.async_chat): decode_data: bool = ..., ) -> None: ... # base asynchat.async_chat.push() accepts bytes - def push(self, msg: str) -> None: ... # type: ignore + def push(self, msg: str) -> None: ... # type: ignore[override] def collect_incoming_data(self, data: bytes) -> None: ... def found_terminator(self) -> None: ... def smtp_HELO(self, arg: str) -> None: ... @@ -55,7 +62,7 @@ class SMTPChannel(asynchat.async_chat): def smtp_EXPN(self, arg: str) -> None: ... class SMTPServer(asyncore.dispatcher): - channel_class: Type[SMTPChannel] + channel_class: type[SMTPChannel] data_size_limit: int enable_SMTPUTF8: bool @@ -76,11 +83,8 @@ class SMTPServer(asyncore.dispatcher): class DebuggingServer(SMTPServer): ... class PureProxy(SMTPServer): - def process_message( # type: ignore - self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str - ) -> str | None: ... + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] -class MailmanProxy(PureProxy): - def process_message( # type: ignore - self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str - ) -> str | None: ... +if sys.version_info < (3, 11): + class MailmanProxy(PureProxy): + def process_message(self, peer: _Address, mailfrom: str, rcpttos: list[str], data: bytes | str) -> str | None: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/smtplib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/smtplib.pyi index 6b3b9bfad704..5bf95d2eec8c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/smtplib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/smtplib.pyi @@ -1,14 +1,49 @@ +import sys from _typeshed import Self from email.message import Message as _Message from socket import socket from ssl import SSLContext from types import TracebackType -from typing import Any, Dict, Pattern, Protocol, Sequence, Tuple, Type, Union, overload +from typing import Any, Pattern, Protocol, Sequence, Union, overload -_Reply = Tuple[int, bytes] -_SendErrs = Dict[str, _Reply] +if sys.version_info >= (3, 7): + __all__ = [ + "SMTPException", + "SMTPNotSupportedError", + "SMTPServerDisconnected", + "SMTPResponseException", + "SMTPSenderRefused", + "SMTPRecipientsRefused", + "SMTPDataError", + "SMTPConnectError", + "SMTPHeloError", + "SMTPAuthenticationError", + "quoteaddr", + "quotedata", + "SMTP", + "SMTP_SSL", + ] +else: + __all__ = [ + "SMTPException", + "SMTPServerDisconnected", + "SMTPResponseException", + "SMTPSenderRefused", + "SMTPRecipientsRefused", + "SMTPDataError", + "SMTPConnectError", + "SMTPHeloError", + "SMTPAuthenticationError", + "quoteaddr", + "quotedata", + "SMTP", + "SMTP_SSL", + ] + +_Reply = tuple[int, bytes] +_SendErrs = dict[str, _Reply] # Should match source_address for socket.create_connection -_SourceAddress = Tuple[Union[bytearray, bytes, str], int] +_SourceAddress = tuple[Union[bytearray, bytes, str], int] SMTP_PORT: int SMTP_SSL_PORT: int @@ -24,19 +59,19 @@ class SMTPServerDisconnected(SMTPException): ... class SMTPResponseException(SMTPException): smtp_code: int smtp_error: bytes | str - args: Tuple[int, bytes | str] | Tuple[int, bytes, str] + args: tuple[int, bytes | str] | tuple[int, bytes, str] def __init__(self, code: int, msg: bytes | str) -> None: ... class SMTPSenderRefused(SMTPResponseException): smtp_code: int smtp_error: bytes sender: str - args: Tuple[int, bytes, str] + args: tuple[int, bytes, str] def __init__(self, code: int, msg: bytes, sender: str) -> None: ... class SMTPRecipientsRefused(SMTPException): recipients: _SendErrs - args: Tuple[_SendErrs] + args: tuple[_SendErrs] def __init__(self, recipients: _SendErrs) -> None: ... class SMTPDataError(SMTPResponseException): ... @@ -78,7 +113,7 @@ class SMTP: ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, tb: TracebackType | None ) -> None: ... def set_debuglevel(self, debuglevel: int) -> None: ... def connect(self, host: str = ..., port: int = ..., source_address: _SourceAddress | None = ...) -> _Reply: ... @@ -149,6 +184,16 @@ class SMTP_SSL(SMTP): LMTP_PORT: int class LMTP(SMTP): - def __init__( - self, host: str = ..., port: int = ..., local_hostname: str | None = ..., source_address: _SourceAddress | None = ... - ) -> None: ... + if sys.version_info >= (3, 9): + def __init__( + self, + host: str = ..., + port: int = ..., + local_hostname: str | None = ..., + source_address: _SourceAddress | None = ..., + timeout: float = ..., + ) -> None: ... + else: + def __init__( + self, host: str = ..., port: int = ..., local_hostname: str | None = ..., source_address: _SourceAddress | None = ... + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sndhdr.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sndhdr.pyi index 84d6441a83b1..f4d487607fbb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sndhdr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sndhdr.pyi @@ -1,6 +1,8 @@ from _typeshed import StrOrBytesPath from typing import NamedTuple +__all__ = ["what", "whathdr"] + class SndHeaders(NamedTuple): filetype: str framerate: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi index f72115301ace..e97a66f5e8c5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/socket.pyi @@ -3,7 +3,7 @@ from _typeshed import ReadableBuffer, Self, WriteableBuffer from collections.abc import Iterable from enum import IntEnum, IntFlag from io import RawIOBase -from typing import Any, BinaryIO, TextIO, TypeVar, overload +from typing import Any, BinaryIO, TextIO, overload from typing_extensions import Literal # Ideally, we'd just do "from _socket import *". Unfortunately, socket @@ -341,6 +341,7 @@ if sys.platform == "linux" and sys.version_info >= (3, 8): if sys.platform == "linux" and sys.version_info >= (3, 9): from _socket import ( CAN_J1939 as CAN_J1939, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, J1939_FILTER_MAX as J1939_FILTER_MAX, @@ -365,6 +366,8 @@ if sys.platform == "linux" and sys.version_info >= (3, 9): SO_J1939_PROMISC as SO_J1939_PROMISC, SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, ) +if sys.platform == "linux" and sys.version_info >= (3, 10): + from _socket import IPPROTO_MPTCP as IPPROTO_MPTCP if sys.platform == "win32": from _socket import ( RCVALL_IPLEVEL as RCVALL_IPLEVEL, @@ -377,8 +380,6 @@ if sys.platform == "win32": SIO_RCVALL as SIO_RCVALL, ) -_T = TypeVar("_T") - # Re-exported from errno EBADF: int EAGAIN: int @@ -543,7 +544,7 @@ class socket(_socket.socket): ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__(self, *args: object) -> None: ... - def dup(self: _T) -> _T: ... # noqa: F811 + def dup(self: Self) -> Self: ... # noqa: F811 def accept(self) -> tuple[socket, _RetAddress]: ... # Note that the makefile's documented windows-specific behavior is not represented # mode strings with duplicates are intentionally excluded @@ -569,9 +570,9 @@ class socket(_socket.socket): ) -> BinaryIO: ... def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ... @property - def family(self) -> AddressFamily: ... # type: ignore + def family(self) -> AddressFamily: ... # type: ignore[override] @property - def type(self) -> SocketKind: ... # type: ignore + def type(self) -> SocketKind: ... # type: ignore[override] def get_inheritable(self) -> bool: ... def set_inheritable(self, inheritable: bool) -> None: ... @@ -592,7 +593,7 @@ if sys.platform == "win32": def socketpair(family: int = ..., type: int = ..., proto: int = ...) -> tuple[socket, socket]: ... else: - def socketpair( # type: ignore + def socketpair( family: int | AddressFamily | None = ..., type: SocketType | int = ..., proto: int = ... ) -> tuple[socket, socket]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/socketserver.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/socketserver.pyi index 5966b8d10e32..9bdd8ccfe31f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/socketserver.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/socketserver.pyi @@ -2,22 +2,60 @@ import sys import types from _typeshed import Self from socket import socket as _socket -from typing import Any, BinaryIO, Callable, ClassVar, Set, Tuple, Type, TypeVar, Union +from typing import Any, BinaryIO, Callable, ClassVar, Union -_T = TypeVar("_T") -_RequestType = Union[_socket, Tuple[bytes, _socket]] -_AddressType = Union[Tuple[str, int], str] +if sys.platform == "win32": + __all__ = [ + "BaseServer", + "TCPServer", + "UDPServer", + "ThreadingUDPServer", + "ThreadingTCPServer", + "BaseRequestHandler", + "StreamRequestHandler", + "DatagramRequestHandler", + "ThreadingMixIn", + ] +else: + __all__ = [ + "BaseServer", + "TCPServer", + "UDPServer", + "ThreadingUDPServer", + "ThreadingTCPServer", + "BaseRequestHandler", + "StreamRequestHandler", + "DatagramRequestHandler", + "ThreadingMixIn", + "ForkingUDPServer", + "ForkingTCPServer", + "ForkingMixIn", + "UnixStreamServer", + "UnixDatagramServer", + "ThreadingUnixStreamServer", + "ThreadingUnixDatagramServer", + ] +_RequestType = Union[_socket, tuple[bytes, _socket]] +_AddressType = Union[tuple[str, int], str] + +# This can possibly be generic at some point: class BaseServer: address_family: int - RequestHandlerClass: Callable[..., BaseRequestHandler] server_address: tuple[str, int] socket: _socket allow_reuse_address: bool request_queue_size: int socket_type: int timeout: float | None - def __init__(self, server_address: Any, RequestHandlerClass: Callable[..., BaseRequestHandler]) -> None: ... + def __init__( + self: Self, server_address: Any, RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler] + ) -> None: ... + # It is not actually a `@property`, but we need a `Self` type: + @property + def RequestHandlerClass(self: Self) -> Callable[[Any, Any, Self], BaseRequestHandler]: ... + @RequestHandlerClass.setter + def RequestHandlerClass(self: Self, val: Callable[[Any, Any, Self], BaseRequestHandler]) -> None: ... def fileno(self) -> int: ... def handle_request(self) -> None: ... def serve_forever(self, poll_interval: float = ...) -> None: ... @@ -33,62 +71,48 @@ class BaseServer: def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None ) -> None: ... def service_actions(self) -> None: ... def shutdown_request(self, request: _RequestType) -> None: ... # undocumented def close_request(self, request: _RequestType) -> None: ... # undocumented class TCPServer(BaseServer): + allow_reuse_port: bool + request_queue_size: int def __init__( - self, + self: Self, server_address: tuple[str, int], - RequestHandlerClass: Callable[..., BaseRequestHandler], + RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... def get_request(self) -> tuple[_socket, Any]: ... - def finish_request(self, request: _RequestType, client_address: _AddressType) -> None: ... - def handle_error(self, request: _RequestType, client_address: _AddressType) -> None: ... - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... - def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... - def shutdown_request(self, request: _RequestType) -> None: ... # undocumented - def close_request(self, request: _RequestType) -> None: ... # undocumented class UDPServer(BaseServer): - def __init__( - self, - server_address: tuple[str, int], - RequestHandlerClass: Callable[..., BaseRequestHandler], - bind_and_activate: bool = ..., - ) -> None: ... + max_packet_size: ClassVar[int] def get_request(self) -> tuple[tuple[bytes, _socket], Any]: ... - def finish_request(self, request: _RequestType, client_address: _AddressType) -> None: ... - def handle_error(self, request: _RequestType, client_address: _AddressType) -> None: ... - def process_request(self, request: _RequestType, client_address: _AddressType) -> None: ... - def verify_request(self, request: _RequestType, client_address: _AddressType) -> bool: ... - def shutdown_request(self, request: _RequestType) -> None: ... # undocumented - def close_request(self, request: _RequestType) -> None: ... # undocumented if sys.platform != "win32": class UnixStreamServer(BaseServer): def __init__( - self, + self: Self, server_address: str | bytes, - RequestHandlerClass: Callable[..., BaseRequestHandler], + RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... + class UnixDatagramServer(BaseServer): def __init__( - self, + self: Self, server_address: str | bytes, - RequestHandlerClass: Callable[..., BaseRequestHandler], + RequestHandlerClass: Callable[[Any, Any, Self], BaseRequestHandler], bind_and_activate: bool = ..., ) -> None: ... if sys.platform != "win32": class ForkingMixIn: timeout: float | None # undocumented - active_children: Set[int] | None # undocumented + active_children: set[int] | None # undocumented max_children: int # undocumented if sys.version_info >= (3, 7): block_on_close: bool diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/spwd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/spwd.pyi index 0f8d36fee945..82d9b8dbfffc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/spwd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/spwd.pyi @@ -1,15 +1,29 @@ -from typing import NamedTuple +import sys +from _typeshed import structseq +from typing import Any +from typing_extensions import final -class struct_spwd(NamedTuple): - sp_namp: str - sp_pwdp: str - sp_lstchg: int - sp_min: int - sp_max: int - sp_warn: int - sp_inact: int - sp_expire: int - sp_flag: int +if sys.platform != "win32": + @final + class struct_spwd(structseq[Any], tuple[str, str, int, int, int, int, int, int, int]): + @property + def sp_namp(self) -> str: ... + @property + def sp_pwdp(self) -> str: ... + @property + def sp_lstchg(self) -> int: ... + @property + def sp_min(self) -> int: ... + @property + def sp_max(self) -> int: ... + @property + def sp_warn(self) -> int: ... + @property + def sp_inact(self) -> int: ... + @property + def sp_expire(self) -> int: ... + @property + def sp_flag(self) -> int: ... -def getspall() -> list[struct_spwd]: ... -def getspnam(__arg: str) -> struct_spwd: ... + def getspall() -> list[struct_spwd]: ... + def getspnam(__arg: str) -> struct_spwd: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi index d5d20d67b58e..d747be90fd0a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/__init__.pyi @@ -1 +1 @@ -from sqlite3.dbapi2 import * # noqa: F403 +from sqlite3.dbapi2 import * diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi index abce96305c7b..456c94362a8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sqlite3/dbapi2.pyi @@ -1,7 +1,8 @@ import sys from _typeshed import Self, StrOrBytesPath from datetime import date, datetime, time -from typing import Any, Callable, Generator, Iterable, Iterator, Protocol, Tuple, Type, TypeVar +from typing import Any, Callable, Generator, Iterable, Iterator, Protocol, TypeVar +from typing_extensions import Literal, final _T = TypeVar("_T") @@ -16,8 +17,8 @@ def DateFromTicks(ticks: float) -> Date: ... def TimeFromTicks(ticks: float) -> Time: ... def TimestampFromTicks(ticks: float) -> Timestamp: ... -version_info: Tuple[int, int, int] -sqlite_version_info: Tuple[int, int, int] +version_info: tuple[int, int, int] +sqlite_version_info: tuple[int, int, int] Binary = memoryview # The remaining definitions are imported from _sqlite3. @@ -35,9 +36,13 @@ SQLITE_CREATE_TEMP_TRIGGER: int SQLITE_CREATE_TEMP_VIEW: int SQLITE_CREATE_TRIGGER: int SQLITE_CREATE_VIEW: int +if sys.version_info >= (3, 7): + SQLITE_CREATE_VTABLE: int SQLITE_DELETE: int SQLITE_DENY: int SQLITE_DETACH: int +if sys.version_info >= (3, 7): + SQLITE_DONE: int SQLITE_DROP_INDEX: int SQLITE_DROP_TABLE: int SQLITE_DROP_TEMP_INDEX: int @@ -46,12 +51,31 @@ SQLITE_DROP_TEMP_TRIGGER: int SQLITE_DROP_TEMP_VIEW: int SQLITE_DROP_TRIGGER: int SQLITE_DROP_VIEW: int +if sys.version_info >= (3, 7): + SQLITE_DROP_VTABLE: int + SQLITE_FUNCTION: int SQLITE_IGNORE: int SQLITE_INSERT: int SQLITE_OK: int +if sys.version_info >= (3, 11): + SQLITE_LIMIT_LENGTH: int + SQLITE_LIMIT_SQL_LENGTH: int + SQLITE_LIMIT_COLUMN: int + SQLITE_LIMIT_EXPR_DEPTH: int + SQLITE_LIMIT_COMPOUND_SELECT: int + SQLITE_LIMIT_VDBE_OP: int + SQLITE_LIMIT_FUNCTION_ARG: int + SQLITE_LIMIT_ATTACHED: int + SQLITE_LIMIT_LIKE_PATTERN_LENGTH: int + SQLITE_LIMIT_VARIABLE_NUMBER: int + SQLITE_LIMIT_TRIGGER_DEPTH: int + SQLITE_LIMIT_WORKER_THREADS: int SQLITE_PRAGMA: int SQLITE_READ: int SQLITE_REINDEX: int +if sys.version_info >= (3, 7): + SQLITE_RECURSIVE: int + SQLITE_SAVEPOINT: int SQLITE_SELECT: int SQLITE_TRANSACTION: int SQLITE_UPDATE: int @@ -62,7 +86,7 @@ version: str # TODO: adapt needs to get probed def adapt(obj, protocol, alternate): ... -def complete_statement(sql: str) -> bool: ... +def complete_statement(statement: str) -> bool: ... if sys.version_info >= (3, 7): def connect( @@ -71,7 +95,7 @@ if sys.version_info >= (3, 7): detect_types: int = ..., isolation_level: str | None = ..., check_same_thread: bool = ..., - factory: Type[Connection] | None = ..., + factory: type[Connection] | None = ..., cached_statements: int = ..., uri: bool = ..., ) -> Connection: ... @@ -83,18 +107,18 @@ else: detect_types: int = ..., isolation_level: str | None = ..., check_same_thread: bool = ..., - factory: Type[Connection] | None = ..., + factory: type[Connection] | None = ..., cached_statements: int = ..., uri: bool = ..., ) -> Connection: ... def enable_callback_tracebacks(__enable: bool) -> None: ... def enable_shared_cache(enable: int) -> None: ... -def register_adapter(__type: Type[_T], __caster: Callable[[_T], int | float | str | bytes]) -> None: ... +def register_adapter(__type: type[_T], __caster: Callable[[_T], int | float | str | bytes]) -> None: ... def register_converter(__name: str, __converter: Callable[[bytes], Any]) -> None: ... if sys.version_info < (3, 8): - class Cache(object): + class Cache: def __init__(self, *args, **kwargs) -> None: ... def display(self, *args, **kwargs) -> None: ... def get(self, *args, **kwargs) -> None: ... @@ -103,7 +127,7 @@ class _AggregateProtocol(Protocol): def step(self, value: int) -> None: ... def finalize(self) -> int: ... -class Connection(object): +class Connection: DataError: Any DatabaseError: Any Error: Any @@ -128,21 +152,20 @@ class Connection(object): def create_function(self, name: str, narg: int, func: Any, *, deterministic: bool = ...) -> None: ... else: def create_function(self, name: str, num_params: int, func: Any) -> None: ... + def cursor(self, cursorClass: type | None = ...) -> Cursor: ... def execute(self, sql: str, parameters: Iterable[Any] = ...) -> Cursor: ... # TODO: please check in executemany() if seq_of_parameters type is possible like this def executemany(self, __sql: str, __parameters: Iterable[Iterable[Any]]) -> Cursor: ... def executescript(self, __sql_script: bytes | str) -> Cursor: ... - def interrupt(self, *args: Any, **kwargs: Any) -> None: ... - def iterdump(self, *args: Any, **kwargs: Any) -> Generator[str, None, None]: ... - def rollback(self, *args: Any, **kwargs: Any) -> None: ... - # TODO: set_authorizer(authorzer_callback) - # see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_authorizer - # returns [SQLITE_OK, SQLITE_DENY, SQLITE_IGNORE] so perhaps int - def set_authorizer(self, *args: Any, **kwargs: Any) -> None: ... - # set_progress_handler(handler, n) -> see https://docs.python.org/2/library/sqlite3.html#sqlite3.Connection.set_progress_handler - def set_progress_handler(self, *args: Any, **kwargs: Any) -> None: ... - def set_trace_callback(self, *args: Any, **kwargs: Any) -> None: ... + def interrupt(self) -> None: ... + def iterdump(self) -> Generator[str, None, None]: ... + def rollback(self) -> None: ... + def set_authorizer( + self, authorizer_callback: Callable[[int, str | None, str | None, str | None, str | None], int] | None + ) -> None: ... + def set_progress_handler(self, progress_handler: Callable[[], bool | None] | None, n: int) -> None: ... + def set_trace_callback(self, trace_callback: Callable[[str], object] | None) -> None: ... # enable_load_extension and load_extension is not available on python distributions compiled # without sqlite3 loadable extension support. see footnotes https://docs.python.org/3/library/sqlite3.html#f1 def enable_load_extension(self, enabled: bool) -> None: ... @@ -157,9 +180,10 @@ class Connection(object): name: str = ..., sleep: float = ..., ) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, t: type | None, exc: BaseException | None, tb: Any | None) -> None: ... + def __exit__(self, __type: type | None, __value: BaseException | None, __traceback: Any | None) -> Literal[False]: ... class Cursor(Iterator[Any]): arraysize: Any @@ -167,26 +191,31 @@ class Cursor(Iterator[Any]): description: Any lastrowid: Any row_factory: Any - rowcount: Any + rowcount: int # TODO: Cursor class accepts exactly 1 argument # required type is sqlite3.Connection (which is imported as _Connection) # however, the name of the __init__ variable is unknown def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def close(self, *args: Any, **kwargs: Any) -> None: ... + def close(self) -> None: ... def execute(self, __sql: str, __parameters: Iterable[Any] = ...) -> Cursor: ... def executemany(self, __sql: str, __seq_of_parameters: Iterable[Iterable[Any]]) -> Cursor: ... def executescript(self, __sql_script: bytes | str) -> Cursor: ... def fetchall(self) -> list[Any]: ... def fetchmany(self, size: int | None = ...) -> list[Any]: ... def fetchone(self) -> Any: ... - def setinputsizes(self, *args: Any, **kwargs: Any) -> None: ... - def setoutputsize(self, *args: Any, **kwargs: Any) -> None: ... - def __iter__(self) -> Cursor: ... + def setinputsizes(self, __sizes: object) -> None: ... # does nothing + def setoutputsize(self, __size: object, __column: object = ...) -> None: ... # does nothing + def __iter__(self: Self) -> Self: ... def __next__(self) -> Any: ... class DataError(DatabaseError): ... class DatabaseError(Error): ... -class Error(Exception): ... + +class Error(Exception): + if sys.version_info >= (3, 11): + sqlite_errorcode: int + sqlite_errorname: str + class IntegrityError(DatabaseError): ... class InterfaceError(Error): ... class InternalError(DatabaseError): ... @@ -195,27 +224,29 @@ class OperationalError(DatabaseError): ... OptimizedUnicode = str -class PrepareProtocol(object): +@final +class PrepareProtocol: def __init__(self, *args: Any, **kwargs: Any) -> None: ... class ProgrammingError(DatabaseError): ... -class Row(object): +class Row: def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def keys(self, *args: Any, **kwargs: Any): ... - def __eq__(self, other): ... - def __ge__(self, other): ... - def __getitem__(self, index): ... - def __gt__(self, other): ... + def keys(self): ... + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __getitem__(self, __index): ... + def __gt__(self, __other): ... def __hash__(self): ... def __iter__(self): ... - def __le__(self, other): ... + def __le__(self, __other): ... def __len__(self): ... - def __lt__(self, other): ... - def __ne__(self, other): ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... if sys.version_info < (3, 8): - class Statement(object): + @final + class Statement: def __init__(self, *args, **kwargs): ... class Warning(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sre_compile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sre_compile.pyi index aac8c0242764..98a9f4dad008 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sre_compile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sre_compile.pyi @@ -1,18 +1,5 @@ -from sre_constants import ( - SRE_FLAG_ASCII as SRE_FLAG_ASCII, - SRE_FLAG_DEBUG as SRE_FLAG_DEBUG, - SRE_FLAG_DOTALL as SRE_FLAG_DOTALL, - SRE_FLAG_IGNORECASE as SRE_FLAG_IGNORECASE, - SRE_FLAG_LOCALE as SRE_FLAG_LOCALE, - SRE_FLAG_MULTILINE as SRE_FLAG_MULTILINE, - SRE_FLAG_TEMPLATE as SRE_FLAG_TEMPLATE, - SRE_FLAG_UNICODE as SRE_FLAG_UNICODE, - SRE_FLAG_VERBOSE as SRE_FLAG_VERBOSE, - SRE_INFO_CHARSET as SRE_INFO_CHARSET, - SRE_INFO_LITERAL as SRE_INFO_LITERAL, - SRE_INFO_PREFIX as SRE_INFO_PREFIX, - _NamedIntConstant, -) +from sre_constants import * +from sre_constants import _NamedIntConstant from sre_parse import SubPattern from typing import Any, Pattern diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sre_constants.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sre_constants.pyi index 4658d0e4b175..00644994fe3e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sre_constants.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sre_constants.pyi @@ -1,6 +1,9 @@ import sys +from _typeshed import Self from typing import Any +MAXGROUPS: int + MAGIC: int class error(Exception): @@ -13,13 +16,16 @@ class error(Exception): class _NamedIntConstant(int): name: Any - def __new__(cls, value: int, name: str) -> _NamedIntConstant: ... + def __new__(cls: type[Self], value: int, name: str) -> Self: ... MAXREPEAT: _NamedIntConstant OPCODES: list[_NamedIntConstant] ATCODES: list[_NamedIntConstant] CHCODES: list[_NamedIntConstant] OP_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +if sys.version_info >= (3, 7): + OP_LOCALE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] + OP_UNICODE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] AT_MULTILINE: dict[_NamedIntConstant, _NamedIntConstant] AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] @@ -75,6 +81,14 @@ SUBPATTERN: _NamedIntConstant MIN_REPEAT_ONE: _NamedIntConstant if sys.version_info >= (3, 7): RANGE_UNI_IGNORE: _NamedIntConstant + GROUPREF_LOC_IGNORE: _NamedIntConstant + GROUPREF_UNI_IGNORE: _NamedIntConstant + IN_LOC_IGNORE: _NamedIntConstant + IN_UNI_IGNORE: _NamedIntConstant + LITERAL_LOC_IGNORE: _NamedIntConstant + LITERAL_UNI_IGNORE: _NamedIntConstant + NOT_LITERAL_LOC_IGNORE: _NamedIntConstant + NOT_LITERAL_UNI_IGNORE: _NamedIntConstant else: RANGE_IGNORE: _NamedIntConstant MIN_REPEAT: _NamedIntConstant diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sre_parse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sre_parse.pyi index a242ca34afd2..56d551180f5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sre_parse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sre_parse.pyi @@ -1,17 +1,20 @@ import sys +from sre_constants import * from sre_constants import _NamedIntConstant as _NIC, error as _Error -from typing import Any, FrozenSet, Iterable, List, Match, Optional, Pattern as _Pattern, Tuple, Union, overload +from typing import Any, Iterable, Match, Optional, Pattern as _Pattern, Union, overload SPECIAL_CHARS: str REPEAT_CHARS: str -DIGITS: FrozenSet[str] -OCTDIGITS: FrozenSet[str] -HEXDIGITS: FrozenSet[str] -ASCIILETTERS: FrozenSet[str] -WHITESPACE: FrozenSet[str] -ESCAPES: dict[str, Tuple[_NIC, int]] -CATEGORIES: dict[str, Tuple[_NIC, _NIC] | Tuple[_NIC, list[Tuple[_NIC, _NIC]]]] +DIGITS: frozenset[str] +OCTDIGITS: frozenset[str] +HEXDIGITS: frozenset[str] +ASCIILETTERS: frozenset[str] +WHITESPACE: frozenset[str] +ESCAPES: dict[str, tuple[_NIC, int]] +CATEGORIES: dict[str, tuple[_NIC, _NIC] | tuple[_NIC, list[tuple[_NIC, _NIC]]]] FLAGS: dict[str, int] +if sys.version_info >= (3, 7): + TYPE_FLAGS: int GLOBAL_FLAGS: int class Verbose(Exception): ... @@ -24,7 +27,7 @@ class _State: def __init__(self) -> None: ... @property def groups(self) -> int: ... - def opengroup(self, name: str = ...) -> int: ... + def opengroup(self, name: str | None = ...) -> int: ... def closegroup(self, gid: int, p: SubPattern) -> None: ... def checkgroup(self, gid: int) -> bool: ... def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... @@ -34,12 +37,12 @@ if sys.version_info >= (3, 8): else: Pattern = _State -_OpSubpatternType = Tuple[Optional[int], int, int, SubPattern] -_OpGroupRefExistsType = Tuple[int, SubPattern, SubPattern] -_OpInType = List[Tuple[_NIC, int]] -_OpBranchType = Tuple[None, List[SubPattern]] +_OpSubpatternType = tuple[Optional[int], int, int, SubPattern] +_OpGroupRefExistsType = tuple[int, SubPattern, SubPattern] +_OpInType = list[tuple[_NIC, int]] +_OpBranchType = tuple[None, list[SubPattern]] _AvType = Union[_OpInType, _OpBranchType, Iterable[SubPattern], _OpGroupRefExistsType, _OpSubpatternType] -_CodeType = Tuple[_NIC, _AvType] +_CodeType = tuple[_NIC, _AvType] class SubPattern: data: list[_CodeType] @@ -51,6 +54,7 @@ class SubPattern: else: pattern: Pattern def __init__(self, pattern: Pattern, data: list[_CodeType] | None = ...) -> None: ... + def dump(self, level: int = ...) -> None: ... def __len__(self) -> int: ... def __delitem__(self, index: int | slice) -> None: ... @@ -74,6 +78,7 @@ class Tokenizer: def getuntil(self, terminator: str, name: str) -> str: ... else: def getuntil(self, terminator: str) -> str: ... + @property def pos(self) -> int: ... def tell(self) -> int: ... @@ -82,8 +87,8 @@ class Tokenizer: def fix_flags(src: str | bytes, flags: int) -> int: ... -_TemplateType = Tuple[List[Tuple[int, int]], List[Optional[str]]] -_TemplateByteType = Tuple[List[Tuple[int, int]], List[Optional[bytes]]] +_TemplateType = tuple[list[tuple[int, int]], list[Optional[str]]] +_TemplateByteType = tuple[list[tuple[int, int]], list[Optional[bytes]]] if sys.version_info >= (3, 8): def parse(str: str, flags: int = ..., state: State | None = ...) -> SubPattern: ... @overload diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi index 759b992f6c1c..fb9acb011e4c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/ssl.pyi @@ -2,14 +2,14 @@ import enum import socket import sys from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer -from typing import Any, Callable, ClassVar, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Type, Union, overload -from typing_extensions import Literal, TypedDict +from typing import Any, Callable, ClassVar, Iterable, NamedTuple, Optional, Union, overload +from typing_extensions import Literal, TypedDict, final -_PCTRTT = Tuple[Tuple[str, str], ...] -_PCTRTTT = Tuple[_PCTRTT, ...] -_PeerCertRetDictType = Dict[str, Union[str, _PCTRTTT, _PCTRTT]] +_PCTRTT = tuple[tuple[str, str], ...] +_PCTRTTT = tuple[_PCTRTT, ...] +_PeerCertRetDictType = dict[str, Union[str, _PCTRTTT, _PCTRTT]] _PeerCertRetType = Union[_PeerCertRetDictType, bytes, None] -_EnumRetType = List[Tuple[bytes, str, Union[Set[str], bool]]] +_EnumRetType = list[tuple[bytes, str, Union[set[str], bool]]] _PasswordType = Union[Callable[[], Union[str, bytes]], str, bytes] _SrvnmeCbType = Callable[[Union[SSLSocket, SSLObject], Optional[str], SSLSocket], Optional[int]] @@ -96,13 +96,21 @@ else: _create_default_https_context: Callable[..., SSLContext] def RAND_bytes(__num: int) -> bytes: ... -def RAND_pseudo_bytes(__num: int) -> Tuple[bytes, bool]: ... +def RAND_pseudo_bytes(__num: int) -> tuple[bytes, bool]: ... def RAND_status() -> bool: ... def RAND_egd(path: str) -> None: ... def RAND_add(__s: bytes, __entropy: float) -> None: ... def match_hostname(cert: _PeerCertRetType, hostname: str) -> None: ... def cert_time_to_seconds(cert_time: str) -> int: ... -def get_server_certificate(addr: Tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... + +if sys.version_info >= (3, 10): + def get_server_certificate( + addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ..., timeout: float = ... + ) -> str: ... + +else: + def get_server_certificate(addr: tuple[str, int], ssl_version: int = ..., ca_certs: str | None = ...) -> str: ... + def DER_cert_to_PEM_cert(der_cert_bytes: bytes) -> str: ... def PEM_cert_to_DER_cert(pem_cert_string: str) -> bytes: ... @@ -135,6 +143,9 @@ class VerifyFlags(enum.IntFlag): VERIFY_CRL_CHECK_CHAIN: int VERIFY_X509_STRICT: int VERIFY_X509_TRUSTED_FIRST: int + if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: int + VERIFY_X509_PARTIAL_CHAIN: int VERIFY_DEFAULT: VerifyFlags VERIFY_CRL_CHECK_LEAF: VerifyFlags @@ -142,6 +153,10 @@ VERIFY_CRL_CHECK_CHAIN: VerifyFlags VERIFY_X509_STRICT: VerifyFlags VERIFY_X509_TRUSTED_FIRST: VerifyFlags +if sys.version_info >= (3, 10): + VERIFY_ALLOW_PROXY_CERTS: VerifyFlags + VERIFY_X509_PARTIAL_CHAIN: VerifyFlags + class _SSLMethod(enum.IntEnum): PROTOCOL_SSLv23: int PROTOCOL_SSLv2: int @@ -205,7 +220,7 @@ if sys.version_info >= (3, 7): HAS_TLSv1: bool HAS_TLSv1_1: bool HAS_TLSv1_2: bool - HAS_TLSv1_3: bool +HAS_TLSv1_3: bool HAS_ALPN: bool HAS_ECDH: bool HAS_SNI: bool @@ -213,7 +228,7 @@ HAS_NPN: bool CHANNEL_BINDING_TYPES: list[str] OPENSSL_VERSION: str -OPENSSL_VERSION_INFO: Tuple[int, int, int, int, int] +OPENSSL_VERSION_INFO: tuple[int, int, int, int, int] OPENSSL_VERSION_NUMBER: int class AlertDescription(enum.IntEnum): @@ -279,9 +294,9 @@ class _ASN1Object(NamedTuple): longname: str oid: str @classmethod - def fromnid(cls: Type[Self], nid: int) -> Self: ... + def fromnid(cls: type[Self], nid: int) -> Self: ... @classmethod - def fromname(cls: Type[Self], name: str) -> Self: ... + def fromname(cls: type[Self], name: str) -> Self: ... class Purpose(_ASN1Object, enum.Enum): SERVER_AUTH: _ASN1Object @@ -293,7 +308,9 @@ class SSLSocket(socket.socket): server_hostname: str | None session: SSLSession | None session_reused: bool | None - if sys.version_info < (3, 7): + if sys.version_info >= (3, 7): + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + else: def __init__( self, sock: socket.socket | None = ..., @@ -315,8 +332,7 @@ class SSLSocket(socket.socket): _context: SSLContext | None = ..., _session: Any | None = ..., ) -> None: ... - else: - def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def connect(self, addr: socket._Address | bytes) -> None: ... def connect_ex(self, addr: socket._Address | bytes) -> int: ... def recv(self, buflen: int = ..., flags: int = ...) -> bytes: ... @@ -341,13 +357,13 @@ class SSLSocket(socket.socket): def getpeercert(self, binary_form: Literal[True]) -> bytes | None: ... @overload def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... - def cipher(self) -> Tuple[str, str, int] | None: ... - def shared_ciphers(self) -> list[Tuple[str, str, int]] | None: ... + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... def compression(self) -> str | None: ... def get_channel_binding(self, cb_type: str = ...) -> bytes | None: ... def selected_alpn_protocol(self) -> str | None: ... def selected_npn_protocol(self) -> str | None: ... - def accept(self) -> Tuple[SSLSocket, socket._RetAddress]: ... + def accept(self) -> tuple[SSLSocket, socket._RetAddress]: ... def unwrap(self) -> socket.socket: ... def version(self) -> str | None: ... def pending(self) -> int: ... @@ -376,12 +392,12 @@ class SSLContext: maximum_version: TLSVersion minimum_version: TLSVersion sni_callback: Callable[[SSLObject, str, SSLContext], None | int] | None - sslobject_class: ClassVar[Type[SSLObject]] - sslsocket_class: ClassVar[Type[SSLSocket]] + sslobject_class: ClassVar[type[SSLObject]] + sslsocket_class: ClassVar[type[SSLSocket]] if sys.version_info >= (3, 8): keylog_filename: str post_handshake_auth: bool - def __new__(cls, protocol: int = ..., *args: Any, **kwargs: Any) -> SSLContext: ... + def __new__(cls: type[Self], protocol: int = ..., *args: Any, **kwargs: Any) -> Self: ... def __init__(self, protocol: int = ...) -> None: ... def cert_store_stats(self) -> dict[str, int]: ... def load_cert_chain( @@ -401,6 +417,7 @@ class SSLContext: def set_servername_callback(self, server_name_callback: _SrvnmeCbType | None) -> None: ... else: def set_servername_callback(self, __method: _SrvnmeCbType | None) -> None: ... + def load_dh_params(self, __path: str) -> None: ... def set_ecdh_curve(self, __name: str) -> None: ... def wrap_socket( @@ -432,6 +449,7 @@ class SSLObject: def __init__(self, *args: Any, **kwargs: Any) -> None: ... else: def __init__(self, sslobj: Any, owner: SSLSocket | SSLObject | None = ..., session: Any | None = ...) -> None: ... + def read(self, len: int = ..., buffer: bytearray | None = ...) -> bytes: ... def write(self, data: bytes) -> int: ... @overload @@ -442,8 +460,8 @@ class SSLObject: def getpeercert(self, binary_form: bool) -> _PeerCertRetType: ... def selected_alpn_protocol(self) -> str | None: ... def selected_npn_protocol(self) -> str | None: ... - def cipher(self) -> Tuple[str, str, int] | None: ... - def shared_ciphers(self) -> list[Tuple[str, str, int]] | None: ... + def cipher(self) -> tuple[str, str, int] | None: ... + def shared_ciphers(self) -> list[tuple[str, str, int]] | None: ... def compression(self) -> str | None: ... def pending(self) -> int: ... def do_handshake(self) -> None: ... @@ -453,6 +471,7 @@ class SSLObject: if sys.version_info >= (3, 8): def verify_client_post_handshake(self) -> None: ... +@final class MemoryBIO: pending: int eof: bool @@ -460,6 +479,7 @@ class MemoryBIO: def write(self, __buf: bytes) -> int: ... def write_eof(self) -> None: ... +@final class SSLSession: id: bytes time: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi index d6450fe77817..4518acb5a162 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/stat.pyi @@ -1,97 +1 @@ -import sys - -def S_ISDIR(mode: int) -> bool: ... -def S_ISCHR(mode: int) -> bool: ... -def S_ISBLK(mode: int) -> bool: ... -def S_ISREG(mode: int) -> bool: ... -def S_ISFIFO(mode: int) -> bool: ... -def S_ISLNK(mode: int) -> bool: ... -def S_ISSOCK(mode: int) -> bool: ... -def S_IMODE(mode: int) -> int: ... -def S_IFMT(mode: int) -> int: ... -def S_ISDOOR(mode: int) -> int: ... -def S_ISPORT(mode: int) -> int: ... -def S_ISWHT(mode: int) -> int: ... -def filemode(mode: int) -> str: ... - -ST_MODE: int -ST_INO: int -ST_DEV: int -ST_NLINK: int -ST_UID: int -ST_GID: int -ST_SIZE: int -ST_ATIME: int -ST_MTIME: int -ST_CTIME: int - -S_IFSOCK: int -S_IFLNK: int -S_IFREG: int -S_IFBLK: int -S_IFDIR: int -S_IFCHR: int -S_IFIFO: int -S_IFDOOR: int -S_IFPORT: int -S_IFWHT: int -S_ISUID: int -S_ISGID: int -S_ISVTX: int - -S_IRWXU: int -S_IRUSR: int -S_IWUSR: int -S_IXUSR: int - -S_IRWXG: int -S_IRGRP: int -S_IWGRP: int -S_IXGRP: int - -S_IRWXO: int -S_IROTH: int -S_IWOTH: int -S_IXOTH: int - -S_ENFMT: int -S_IREAD: int -S_IWRITE: int -S_IEXEC: int - -UF_NODUMP: int -UF_IMMUTABLE: int -UF_APPEND: int -UF_OPAQUE: int -UF_NOUNLINK: int -if sys.platform == "darwin": - UF_COMPRESSED: int # OS X 10.6+ only - UF_HIDDEN: int # OX X 10.5+ only -SF_ARCHIVED: int -SF_IMMUTABLE: int -SF_APPEND: int -SF_NOUNLINK: int -SF_SNAPSHOT: int - -FILE_ATTRIBUTE_ARCHIVE: int -FILE_ATTRIBUTE_COMPRESSED: int -FILE_ATTRIBUTE_DEVICE: int -FILE_ATTRIBUTE_DIRECTORY: int -FILE_ATTRIBUTE_ENCRYPTED: int -FILE_ATTRIBUTE_HIDDEN: int -FILE_ATTRIBUTE_INTEGRITY_STREAM: int -FILE_ATTRIBUTE_NORMAL: int -FILE_ATTRIBUTE_NOT_CONTENT_INDEXED: int -FILE_ATTRIBUTE_NO_SCRUB_DATA: int -FILE_ATTRIBUTE_OFFLINE: int -FILE_ATTRIBUTE_READONLY: int -FILE_ATTRIBUTE_REPARSE_POINT: int -FILE_ATTRIBUTE_SPARSE_FILE: int -FILE_ATTRIBUTE_SYSTEM: int -FILE_ATTRIBUTE_TEMPORARY: int -FILE_ATTRIBUTE_VIRTUAL: int - -if sys.platform == "win32" and sys.version_info >= (3, 8): - IO_REPARSE_TAG_SYMLINK: int - IO_REPARSE_TAG_MOUNT_POINT: int - IO_REPARSE_TAG_APPEXECLINK: int +from _stat import * diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/statistics.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/statistics.pyi index ec3574ab12b1..1a194dee7199 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/statistics.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/statistics.pyi @@ -1,10 +1,68 @@ import sys -from _typeshed import SupportsLessThanT +from _typeshed import Self, SupportsRichComparisonT from decimal import Decimal from fractions import Fraction -from typing import Any, Hashable, Iterable, NamedTuple, Sequence, SupportsFloat, Type, TypeVar, Union +from typing import Any, Hashable, Iterable, NamedTuple, Sequence, SupportsFloat, TypeVar, Union + +if sys.version_info >= (3, 10): + __all__ = [ + "NormalDist", + "StatisticsError", + "correlation", + "covariance", + "fmean", + "geometric_mean", + "harmonic_mean", + "linear_regression", + "mean", + "median", + "median_grouped", + "median_high", + "median_low", + "mode", + "multimode", + "pstdev", + "pvariance", + "quantiles", + "stdev", + "variance", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "NormalDist", + "StatisticsError", + "fmean", + "geometric_mean", + "harmonic_mean", + "mean", + "median", + "median_grouped", + "median_high", + "median_low", + "mode", + "multimode", + "pstdev", + "pvariance", + "quantiles", + "stdev", + "variance", + ] +else: + __all__ = [ + "StatisticsError", + "pstdev", + "pvariance", + "stdev", + "variance", + "median", + "median_low", + "median_high", + "median_grouped", + "mean", + "mode", + "harmonic_mean", + ] -_T = TypeVar("_T") # Most functions in this module accept homogeneous collections of one of these types _Number = Union[float, Decimal, Fraction] _NumberT = TypeVar("_NumberT", float, Decimal, Fraction) @@ -14,8 +72,13 @@ _HashableT = TypeVar("_HashableT", bound=Hashable) class StatisticsError(ValueError): ... -if sys.version_info >= (3, 8): +if sys.version_info >= (3, 11): + def fmean(data: Iterable[SupportsFloat], weights: Iterable[SupportsFloat] | None = ...) -> float: ... + +elif sys.version_info >= (3, 8): def fmean(data: Iterable[SupportsFloat]) -> float: ... + +if sys.version_info >= (3, 8): def geometric_mean(data: Iterable[SupportsFloat]) -> float: ... def mean(data: Iterable[_NumberT]) -> _NumberT: ... @@ -27,8 +90,8 @@ else: def harmonic_mean(data: Iterable[_NumberT]) -> _NumberT: ... def median(data: Iterable[_NumberT]) -> _NumberT: ... -def median_low(data: Iterable[SupportsLessThanT]) -> SupportsLessThanT: ... -def median_high(data: Iterable[SupportsLessThanT]) -> SupportsLessThanT: ... +def median_low(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... +def median_high(data: Iterable[SupportsRichComparisonT]) -> SupportsRichComparisonT: ... def median_grouped(data: Iterable[_NumberT], interval: _NumberT = ...) -> _NumberT: ... def mode(data: Iterable[_HashableT]) -> _HashableT: ... @@ -58,7 +121,7 @@ if sys.version_info >= (3, 8): @property def variance(self) -> float: ... @classmethod - def from_samples(cls: Type[_T], data: Iterable[SupportsFloat]) -> _T: ... + def from_samples(cls: type[Self], data: Iterable[SupportsFloat]) -> Self: ... def samples(self, n: int, *, seed: Any | None = ...) -> list[float]: ... def pdf(self, x: float) -> float: ... def cdf(self, x: float) -> float: ... @@ -67,6 +130,8 @@ if sys.version_info >= (3, 8): def quantiles(self, n: int = ...) -> list[float]: ... if sys.version_info >= (3, 9): def zscore(self, x: float) -> float: ... + + def __eq__(self, x2: object) -> bool: ... def __add__(self, x2: float | NormalDist) -> NormalDist: ... def __sub__(self, x2: float | NormalDist) -> NormalDist: ... def __mul__(self, x2: float) -> NormalDist: ... @@ -81,7 +146,15 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): def correlation(__x: Sequence[_Number], __y: Sequence[_Number]) -> float: ... def covariance(__x: Sequence[_Number], __y: Sequence[_Number]) -> float: ... + class LinearRegression(NamedTuple): slope: float intercept: float + +if sys.version_info >= (3, 11): + def linear_regression( + __regressor: Sequence[_Number], __dependent_variable: Sequence[_Number], *, proportional: bool = ... + ) -> LinearRegression: ... + +elif sys.version_info >= (3, 10): def linear_regression(__regressor: Sequence[_Number], __dependent_variable: Sequence[_Number]) -> LinearRegression: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/string.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/string.pyi index ceed38f00931..535f38545132 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/string.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/string.pyi @@ -1,4 +1,26 @@ -from typing import Any, Iterable, Mapping, Sequence, Tuple +import sys +from re import RegexFlag +from typing import Any, Iterable, Mapping, Sequence + +if sys.version_info >= (3, 8): + from re import Pattern +else: + from typing import Pattern + +__all__ = [ + "ascii_letters", + "ascii_lowercase", + "ascii_uppercase", + "capwords", + "digits", + "hexdigits", + "octdigits", + "printable", + "punctuation", + "whitespace", + "Formatter", + "Template", +] ascii_letters: str ascii_lowercase: str @@ -14,6 +36,11 @@ def capwords(s: str, sep: str | None = ...) -> str: ... class Template: template: str + delimiter: str + idpattern: str + braceidpattern: str | None + flags: RegexFlag + pattern: Pattern[str] def __init__(self, template: str) -> None: ... def substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... def safe_substitute(self, __mapping: Mapping[str, object] = ..., **kwds: object) -> str: ... @@ -22,7 +49,7 @@ class Template: class Formatter: def format(self, __format_string: str, *args: Any, **kwargs: Any) -> str: ... def vformat(self, format_string: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> str: ... - def parse(self, format_string: str) -> Iterable[Tuple[str, str | None, str | None, str | None]]: ... + def parse(self, format_string: str) -> Iterable[tuple[str, str | None, str | None, str | None]]: ... def get_field(self, field_name: str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def get_value(self, key: int | str, args: Sequence[Any], kwargs: Mapping[str, Any]) -> Any: ... def check_unused_args(self, used_args: Sequence[int | str], args: Sequence[Any], kwargs: Mapping[str, Any]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/stringprep.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/stringprep.pyi index cbc562d460f6..fc28c027ca9b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/stringprep.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/stringprep.pyi @@ -1,3 +1,11 @@ +b1_set: set[int] +b3_exceptions: dict[int, str] +c22_specials: set[int] +c6_set: set[int] +c7_set: set[int] +c8_set: set[int] +c9_set: set[int] + def in_table_a1(code: str) -> bool: ... def in_table_b1(code: str) -> bool: ... def map_table_b3(code: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/struct.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/struct.pyi index d7c9cbef7dce..1f6c45a23c0a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/struct.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/struct.pyi @@ -1,14 +1,16 @@ import sys from _typeshed import ReadableBuffer, WriteableBuffer -from typing import Any, Iterator, Tuple +from typing import Any, Iterator + +__all__ = ["calcsize", "pack", "pack_into", "unpack", "unpack_from", "iter_unpack", "Struct", "error"] class error(Exception): ... def pack(fmt: str | bytes, *v: Any) -> bytes: ... def pack_into(fmt: str | bytes, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... -def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Tuple[Any, ...]: ... -def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> Tuple[Any, ...]: ... -def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[Tuple[Any, ...]]: ... +def unpack(__format: str | bytes, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... +def unpack_from(__format: str | bytes, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... +def iter_unpack(__format: str | bytes, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... def calcsize(__format: str | bytes) -> int: ... class Struct: @@ -20,6 +22,6 @@ class Struct: def __init__(self, format: str | bytes) -> None: ... def pack(self, *v: Any) -> bytes: ... def pack_into(self, buffer: WriteableBuffer, offset: int, *v: Any) -> None: ... - def unpack(self, __buffer: ReadableBuffer) -> Tuple[Any, ...]: ... - def unpack_from(self, buffer: ReadableBuffer, offset: int = ...) -> Tuple[Any, ...]: ... - def iter_unpack(self, __buffer: ReadableBuffer) -> Iterator[Tuple[Any, ...]]: ... + def unpack(self, __buffer: ReadableBuffer) -> tuple[Any, ...]: ... + def unpack_from(self, buffer: ReadableBuffer, offset: int = ...) -> tuple[Any, ...]: ... + def iter_unpack(self, __buffer: ReadableBuffer) -> Iterator[tuple[Any, ...]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi index ed8dce29363d..8c03fa592f70 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/subprocess.pyi @@ -1,7 +1,7 @@ import sys from _typeshed import Self, StrOrBytesPath from types import TracebackType -from typing import IO, Any, AnyStr, Callable, Generic, Iterable, Mapping, Sequence, Tuple, Type, TypeVar, Union, overload +from typing import IO, Any, AnyStr, Callable, Generic, Iterable, Mapping, Sequence, TypeVar, Union, overload from typing_extensions import Literal if sys.version_info >= (3, 9): @@ -991,6 +991,7 @@ class Popen(Generic[AnyStr]): encoding: str | None = ..., errors: str | None = ..., ) -> Popen[Any]: ... + def poll(self) -> int | None: ... if sys.version_info >= (3, 7): def wait(self, timeout: float | None = ...) -> int: ... @@ -1002,21 +1003,26 @@ class Popen(Generic[AnyStr]): input: AnyStr | None = ..., timeout: float | None = ..., # morally this should be optional - ) -> Tuple[AnyStr, AnyStr]: ... + ) -> tuple[AnyStr, AnyStr]: ... def send_signal(self, sig: int) -> None: ... def terminate(self) -> None: ... def kill(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, type: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... # The result really is always a str. -def getstatusoutput(cmd: _TXT) -> Tuple[int, str]: ... +def getstatusoutput(cmd: _TXT) -> tuple[int, str]: ... def getoutput(cmd: _TXT) -> str: ... -def list2cmdline(seq: Iterable[str]) -> str: ... # undocumented + +if sys.version_info >= (3, 8): + def list2cmdline(seq: Iterable[StrOrBytesPath]) -> str: ... # undocumented + +else: + def list2cmdline(seq: Iterable[str]) -> str: ... # undocumented if sys.platform == "win32": class STARTUPINFO: @@ -1038,22 +1044,27 @@ if sys.platform == "win32": wShowWindow: int if sys.version_info >= (3, 7): lpAttributeList: Mapping[str, Any] - STD_INPUT_HANDLE: Any - STD_OUTPUT_HANDLE: Any - STD_ERROR_HANDLE: Any - SW_HIDE: int - STARTF_USESTDHANDLES: int - STARTF_USESHOWWINDOW: int - CREATE_NEW_CONSOLE: int - CREATE_NEW_PROCESS_GROUP: int + from _winapi import ( + CREATE_NEW_CONSOLE as CREATE_NEW_CONSOLE, + CREATE_NEW_PROCESS_GROUP as CREATE_NEW_PROCESS_GROUP, + STARTF_USESHOWWINDOW as STARTF_USESHOWWINDOW, + STARTF_USESTDHANDLES as STARTF_USESTDHANDLES, + STD_ERROR_HANDLE as STD_ERROR_HANDLE, + STD_INPUT_HANDLE as STD_INPUT_HANDLE, + STD_OUTPUT_HANDLE as STD_OUTPUT_HANDLE, + SW_HIDE as SW_HIDE, + ) + if sys.version_info >= (3, 7): - ABOVE_NORMAL_PRIORITY_CLASS: int - BELOW_NORMAL_PRIORITY_CLASS: int - HIGH_PRIORITY_CLASS: int - IDLE_PRIORITY_CLASS: int - NORMAL_PRIORITY_CLASS: int - REALTIME_PRIORITY_CLASS: int - CREATE_NO_WINDOW: int - DETACHED_PROCESS: int - CREATE_DEFAULT_ERROR_MODE: int - CREATE_BREAKAWAY_FROM_JOB: int + from _winapi import ( + ABOVE_NORMAL_PRIORITY_CLASS as ABOVE_NORMAL_PRIORITY_CLASS, + BELOW_NORMAL_PRIORITY_CLASS as BELOW_NORMAL_PRIORITY_CLASS, + CREATE_BREAKAWAY_FROM_JOB as CREATE_BREAKAWAY_FROM_JOB, + CREATE_DEFAULT_ERROR_MODE as CREATE_DEFAULT_ERROR_MODE, + CREATE_NO_WINDOW as CREATE_NO_WINDOW, + DETACHED_PROCESS as DETACHED_PROCESS, + HIGH_PRIORITY_CLASS as HIGH_PRIORITY_CLASS, + IDLE_PRIORITY_CLASS as IDLE_PRIORITY_CLASS, + NORMAL_PRIORITY_CLASS as NORMAL_PRIORITY_CLASS, + REALTIME_PRIORITY_CLASS as REALTIME_PRIORITY_CLASS, + ) diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sunau.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sunau.pyi index 8393136aa795..f96dbaaf7be8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sunau.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sunau.pyi @@ -1,6 +1,7 @@ import sys from _typeshed import Self -from typing import IO, Any, NamedTuple, NoReturn, Union +from typing import IO, Any, NamedTuple, NoReturn, Union, overload +from typing_extensions import Literal _File = Union[str, IO[bytes]] @@ -72,7 +73,11 @@ class Au_write: def writeframes(self, data: bytes) -> None: ... def close(self) -> None: ... -# Returns a Au_read if mode is rb and Au_write if mode is wb +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Au_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Au_write: ... +@overload def open(f: _File, mode: str | None = ...) -> Any: ... if sys.version_info < (3, 9): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/symbol.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/symbol.pyi index 2d3bd83087c7..234c814b55b5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/symbol.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/symbol.pyi @@ -1,3 +1,5 @@ +import sys + single_input: int file_input: int eval_input: int @@ -84,5 +86,13 @@ comp_if: int encoding_decl: int yield_expr: int yield_arg: int +if sys.version_info >= (3, 7): + sync_comp_for: int +if sys.version_info >= (3, 8): + func_body_suite: int + func_type: int + func_type_input: int + namedexpr_test: int + typelist: int sym_name: dict[int, str] diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/symtable.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/symtable.pyi index 613ac90ef7a9..91e95270901e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/symtable.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/symtable.pyi @@ -1,9 +1,11 @@ import sys -from typing import Any, Sequence, Tuple +from typing import Any, Sequence + +__all__ = ["symtable", "SymbolTable", "Class", "Function", "Symbol"] def symtable(code: str, filename: str, compile_type: str) -> SymbolTable: ... -class SymbolTable(object): +class SymbolTable: def __init__(self, raw_table: Any, filename: str) -> None: ... def get_type(self) -> str: ... def get_id(self) -> int: ... @@ -19,21 +21,25 @@ class SymbolTable(object): def get_children(self) -> list[SymbolTable]: ... class Function(SymbolTable): - def get_parameters(self) -> Tuple[str, ...]: ... - def get_locals(self) -> Tuple[str, ...]: ... - def get_globals(self) -> Tuple[str, ...]: ... - def get_frees(self) -> Tuple[str, ...]: ... + def get_parameters(self) -> tuple[str, ...]: ... + def get_locals(self) -> tuple[str, ...]: ... + def get_globals(self) -> tuple[str, ...]: ... + def get_frees(self) -> tuple[str, ...]: ... + if sys.version_info >= (3, 8): + def get_nonlocals(self) -> tuple[str, ...]: ... class Class(SymbolTable): - def get_methods(self) -> Tuple[str, ...]: ... + def get_methods(self) -> tuple[str, ...]: ... -class Symbol(object): +class Symbol: if sys.version_info >= (3, 8): def __init__( self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ..., *, module_scope: bool = ... ) -> None: ... + def is_nonlocal(self) -> bool: ... else: def __init__(self, name: str, flags: int, namespaces: Sequence[SymbolTable] | None = ...) -> None: ... + def get_name(self) -> str: ... def is_referenced(self) -> bool: ... def is_parameter(self) -> bool: ... @@ -48,7 +54,7 @@ class Symbol(object): def get_namespaces(self) -> Sequence[SymbolTable]: ... def get_namespace(self) -> SymbolTable: ... -class SymbolTableFactory(object): +class SymbolTableFactory: def __init__(self) -> None: ... def new(self, table: Any, filename: str) -> SymbolTable: ... def __call__(self, table: Any, filename: str) -> SymbolTable: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sys.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sys.pyi index e60a58c02ef2..0c4cf5be3783 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sys.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sys.pyi @@ -1,38 +1,22 @@ import sys +from _typeshed import structseq from builtins import object as _object -from importlib.abc import Loader, PathEntryFinder +from importlib.abc import PathEntryFinder from importlib.machinery import ModuleSpec from io import TextIOWrapper from types import FrameType, ModuleType, TracebackType -from typing import ( - Any, - AsyncGenerator, - Callable, - FrozenSet, - NoReturn, - Optional, - Protocol, - Sequence, - TextIO, - Tuple, - Type, - TypeVar, - Union, - overload, -) -from typing_extensions import Literal +from typing import Any, AsyncGenerator, Callable, NoReturn, Optional, Protocol, Sequence, TextIO, TypeVar, Union, overload +from typing_extensions import Literal, final _T = TypeVar("_T") # The following type alias are stub-only and do not exist during runtime -_ExcInfo = Tuple[Type[BaseException], BaseException, TracebackType] -_OptExcInfo = Union[_ExcInfo, Tuple[None, None, None]] -_PathSequence = Sequence[Union[bytes, str]] +_ExcInfo = tuple[type[BaseException], BaseException, TracebackType] +_OptExcInfo = Union[_ExcInfo, tuple[None, None, None]] -# Unlike importlib.abc.MetaPathFinder, invalidate_caches() might not exist (see python docs) +# Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder` class _MetaPathFinder(Protocol): - def find_module(self, fullname: str, path: _PathSequence | None) -> Loader | None: ... - def find_spec(self, fullname: str, path: _PathSequence | None, target: ModuleType | None = ...) -> ModuleSpec | None: ... + def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ...) -> ModuleSpec | None: ... # ----- sys variables ----- if sys.platform != "win32": @@ -47,12 +31,12 @@ if sys.platform == "win32": dllhandle: int dont_write_bytecode: bool displayhook: Callable[[object], Any] -excepthook: Callable[[Type[BaseException], BaseException, TracebackType], Any] +excepthook: Callable[[type[BaseException], BaseException, TracebackType | None], Any] exec_prefix: str executable: str -float_repr_style: str +float_repr_style: Literal["short", "legacy"] hexversion: int -last_type: Type[BaseException] | None +last_type: type[BaseException] | None last_value: BaseException | None last_traceback: TracebackType | None maxsize: int @@ -62,7 +46,7 @@ modules: dict[str, ModuleType] if sys.version_info >= (3, 10): orig_argv: list[str] path: list[str] -path_hooks: list[Any] # TODO precise type; function, path to finder +path_hooks: list[Callable[[str], PathEntryFinder]] path_importer_cache: dict[str, PathEntryFinder | None] platform: str if sys.version_info >= (3, 9): @@ -70,13 +54,13 @@ if sys.version_info >= (3, 9): prefix: str if sys.version_info >= (3, 8): pycache_prefix: str | None -ps1: str -ps2: str +ps1: object +ps2: object stdin: TextIO stdout: TextIO stderr: TextIO if sys.version_info >= (3, 10): - stdlib_module_names: FrozenSet[str] + stdlib_module_names: frozenset[str] __stdin__: TextIOWrapper __stdout__: TextIOWrapper __stderr__: TextIOWrapper @@ -90,49 +74,105 @@ if sys.platform == "win32": winver: str _xoptions: dict[Any, Any] +# Type alias used as a mixin for structseq classes that cannot be instantiated at runtime +# This can't be represented in the type system, so we just use `structseq[Any]` +_uninstantiable_structseq = structseq[Any] + flags: _flags -class _flags: - debug: int - division_warning: int - inspect: int - interactive: int - optimize: int - dont_write_bytecode: int - no_user_site: int - no_site: int - ignore_environment: int - verbose: int - bytes_warning: int - quiet: int - hash_randomization: int +if sys.version_info >= (3, 10): + _FlagTuple = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int, int] +elif sys.version_info >= (3, 7): + _FlagTuple = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int] +else: + _FlagTuple = tuple[int, int, int, int, int, int, int, int, int, int, int, int, int] + +@final +class _flags(_uninstantiable_structseq, _FlagTuple): + @property + def debug(self) -> int: ... + @property + def inspect(self) -> int: ... + @property + def interactive(self) -> int: ... + @property + def optimize(self) -> int: ... + @property + def dont_write_bytecode(self) -> int: ... + @property + def no_user_site(self) -> int: ... + @property + def no_site(self) -> int: ... + @property + def ignore_environment(self) -> int: ... + @property + def verbose(self) -> int: ... + @property + def bytes_warning(self) -> int: ... + @property + def quiet(self) -> int: ... + @property + def hash_randomization(self) -> int: ... + @property + def isolated(self) -> int: ... if sys.version_info >= (3, 7): - dev_mode: int - utf8_mode: int + @property + def dev_mode(self) -> bool: ... + @property + def utf8_mode(self) -> int: ... + if sys.version_info >= (3, 10): + @property + def warn_default_encoding(self) -> int: ... # undocumented float_info: _float_info -class _float_info: - epsilon: float # DBL_EPSILON - dig: int # DBL_DIG - mant_dig: int # DBL_MANT_DIG - max: float # DBL_MAX - max_exp: int # DBL_MAX_EXP - max_10_exp: int # DBL_MAX_10_EXP - min: float # DBL_MIN - min_exp: int # DBL_MIN_EXP - min_10_exp: int # DBL_MIN_10_EXP - radix: int # FLT_RADIX - rounds: int # FLT_ROUNDS +@final +class _float_info(structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int]): + @property + def max(self) -> float: ... # DBL_MAX + @property + def max_exp(self) -> int: ... # DBL_MAX_EXP + @property + def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP + @property + def min(self) -> float: ... # DBL_MIN + @property + def min_exp(self) -> int: ... # DBL_MIN_EXP + @property + def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP + @property + def dig(self) -> int: ... # DBL_DIG + @property + def mant_dig(self) -> int: ... # DBL_MANT_DIG + @property + def epsilon(self) -> float: ... # DBL_EPSILON + @property + def radix(self) -> int: ... # FLT_RADIX + @property + def rounds(self) -> int: ... # FLT_ROUNDS hash_info: _hash_info -class _hash_info: - width: int - modulus: int - inf: int - nan: int - imag: int +@final +class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): + @property + def width(self) -> int: ... + @property + def modulus(self) -> int: ... + @property + def inf(self) -> int: ... + @property + def nan(self) -> int: ... + @property + def imag(self) -> int: ... + @property + def algorithm(self) -> str: ... + @property + def hash_bits(self) -> int: ... + @property + def seed_bits(self) -> int: ... + @property + def cutoff(self) -> int: ... # undocumented implementation: _implementation @@ -141,20 +181,32 @@ class _implementation: version: _version_info hexversion: int cache_tag: str - _multiarch: str + # Define __getattr__, as the documentation states: + # > sys.implementation may contain additional attributes specific to the Python implementation. + # > These non-standard attributes must start with an underscore, and are not described here. + def __getattr__(self, name: str) -> Any: ... int_info: _int_info -class _int_info: - bits_per_digit: int - sizeof_digit: int - -class _version_info(Tuple[int, int, int, str, int]): - major: int - minor: int - micro: int - releaselevel: str - serial: int +@final +class _int_info(structseq[int], tuple[int, int]): + @property + def bits_per_digit(self) -> int: ... + @property + def sizeof_digit(self) -> int: ... + +@final +class _version_info(_uninstantiable_structseq, tuple[int, int, int, str, int]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... + @property + def serial(self) -> int: ... version_info: _version_info @@ -163,8 +215,8 @@ def _clear_type_cache() -> None: ... def _current_frames() -> dict[int, FrameType]: ... def _getframe(__depth: int = ...) -> FrameType: ... def _debugmallocstats() -> None: ... -def __displayhook__(value: object) -> None: ... -def __excepthook__(type_: Type[BaseException], value: BaseException, traceback: TracebackType) -> None: ... +def __displayhook__(__value: object) -> None: ... +def __excepthook__(__exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None) -> None: ... def exc_info() -> _OptExcInfo: ... # sys.exit() accepts an optional argument of anything printable @@ -195,19 +247,31 @@ _TraceFunc = Callable[[FrameType, str, Any], Optional[Callable[[FrameType, str, def gettrace() -> _TraceFunc | None: ... def settrace(tracefunc: _TraceFunc | None) -> None: ... -class _WinVersion(Tuple[int, int, int, int, str, int, int, int, int, Tuple[int, int, int]]): - major: int - minor: int - build: int - platform: int - service_pack: str - service_pack_minor: int - service_pack_major: int - suite_mast: int - product_type: int - platform_version: Tuple[int, int, int] - if sys.platform == "win32": + # A tuple of length 5, even though it has more than 5 attributes. + @final + class _WinVersion(_uninstantiable_structseq, tuple[int, int, int, int, str]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def build(self) -> int: ... + @property + def platform(self) -> int: ... + @property + def service_pack(self) -> str: ... + @property + def service_pack_minor(self) -> int: ... + @property + def service_pack_major(self) -> int: ... + @property + def suite_mask(self) -> int: ... + @property + def product_type(self) -> int: ... + @property + def platform_version(self) -> tuple[int, int, int]: ... + def getwindowsversion() -> _WinVersion: ... def intern(__string: str) -> str: ... @@ -229,22 +293,25 @@ if sys.version_info < (3, 9): def setcheckinterval(__n: int) -> None: ... # deprecated if sys.version_info >= (3, 8): - # not exported by sys + # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. class UnraisableHookArgs: - exc_type: Type[BaseException] + exc_type: type[BaseException] exc_value: BaseException | None exc_traceback: TracebackType | None err_msg: str | None object: _object | None unraisablehook: Callable[[UnraisableHookArgs], Any] - def addaudithook(hook: Callable[[str, Tuple[Any, ...]], Any]) -> None: ... + def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... def audit(__event: str, *args: Any) -> None: ... _AsyncgenHook = Optional[Callable[[AsyncGenerator[Any, Any]], None]] -class _asyncgen_hooks(Tuple[_AsyncgenHook, _AsyncgenHook]): - firstiter: _AsyncgenHook - finalizer: _AsyncgenHook +@final +class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHook]): + @property + def firstiter(self) -> _AsyncgenHook: ... + @property + def finalizer(self) -> _AsyncgenHook: ... def get_asyncgen_hooks() -> _asyncgen_hooks: ... def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi index ff828d519912..13c40b927f4e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/sysconfig.pyi @@ -1,12 +1,26 @@ -from typing import IO, Any, Tuple, overload +from typing import IO, Any, overload + +__all__ = [ + "get_config_h_filename", + "get_config_var", + "get_config_vars", + "get_makefile_filename", + "get_path", + "get_path_names", + "get_paths", + "get_platform", + "get_python_version", + "get_scheme_names", + "parse_config_h", +] def get_config_var(name: str) -> str | None: ... @overload def get_config_vars() -> dict[str, Any]: ... @overload def get_config_vars(arg: str, *args: str) -> list[Any]: ... -def get_scheme_names() -> Tuple[str, ...]: ... -def get_path_names() -> Tuple[str, ...]: ... +def get_scheme_names() -> tuple[str, ...]: ... +def get_path_names() -> tuple[str, ...]: ... def get_path(name: str, scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> str: ... def get_paths(scheme: str = ..., vars: dict[str, Any] | None = ..., expand: bool = ...) -> dict[str, str]: ... def get_python_version() -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/syslog.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/syslog.pyi index 49169f40db5c..cfa8df887c1b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/syslog.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/syslog.pyi @@ -1,43 +1,47 @@ +import sys from typing import overload +from typing_extensions import Literal -LOG_ALERT: int -LOG_AUTH: int -LOG_CONS: int -LOG_CRIT: int -LOG_CRON: int -LOG_DAEMON: int -LOG_DEBUG: int -LOG_EMERG: int -LOG_ERR: int -LOG_INFO: int -LOG_KERN: int -LOG_LOCAL0: int -LOG_LOCAL1: int -LOG_LOCAL2: int -LOG_LOCAL3: int -LOG_LOCAL4: int -LOG_LOCAL5: int -LOG_LOCAL6: int -LOG_LOCAL7: int -LOG_LPR: int -LOG_MAIL: int -LOG_NDELAY: int -LOG_NEWS: int -LOG_NOTICE: int -LOG_NOWAIT: int -LOG_PERROR: int -LOG_PID: int -LOG_SYSLOG: int -LOG_USER: int -LOG_UUCP: int -LOG_WARNING: int - -def LOG_MASK(a: int) -> int: ... -def LOG_UPTO(a: int) -> int: ... -def closelog() -> None: ... -def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... -def setlogmask(x: int) -> int: ... -@overload -def syslog(priority: int, message: str) -> None: ... -@overload -def syslog(message: str) -> None: ... +if sys.platform != "win32": + LOG_ALERT: Literal[1] + LOG_AUTH: Literal[32] + LOG_AUTHPRIV: Literal[80] + LOG_CONS: Literal[2] + LOG_CRIT: Literal[2] + LOG_CRON: Literal[72] + LOG_DAEMON: Literal[24] + LOG_DEBUG: Literal[7] + LOG_EMERG: Literal[0] + LOG_ERR: Literal[3] + LOG_INFO: Literal[6] + LOG_KERN: Literal[0] + LOG_LOCAL0: Literal[128] + LOG_LOCAL1: Literal[136] + LOG_LOCAL2: Literal[144] + LOG_LOCAL3: Literal[152] + LOG_LOCAL4: Literal[160] + LOG_LOCAL5: Literal[168] + LOG_LOCAL6: Literal[176] + LOG_LOCAL7: Literal[184] + LOG_LPR: Literal[48] + LOG_MAIL: Literal[16] + LOG_NDELAY: Literal[8] + LOG_NEWS: Literal[56] + LOG_NOTICE: Literal[5] + LOG_NOWAIT: Literal[16] + LOG_ODELAY: Literal[4] + LOG_PERROR: Literal[32] + LOG_PID: Literal[1] + LOG_SYSLOG: Literal[40] + LOG_USER: Literal[8] + LOG_UUCP: Literal[64] + LOG_WARNING: Literal[4] + def LOG_MASK(a: int) -> int: ... + def LOG_UPTO(a: int) -> int: ... + def closelog() -> None: ... + def openlog(ident: str = ..., logoption: int = ..., facility: int = ...) -> None: ... + def setlogmask(x: int) -> int: ... + @overload + def syslog(priority: int, message: str) -> None: ... + @overload + def syslog(message: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tabnanny.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tabnanny.pyi index 584c6d4e26bd..020100031c14 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tabnanny.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tabnanny.pyi @@ -1,5 +1,7 @@ from _typeshed import StrOrBytesPath -from typing import Iterable, Tuple +from typing import Iterable + +__all__ = ["check", "NannyNag", "process_tokens"] verbose: int filename_only: int @@ -11,4 +13,4 @@ class NannyNag(Exception): def get_line(self) -> str: ... def check(file: StrOrBytesPath) -> None: ... -def process_tokens(tokens: Iterable[Tuple[int, str, Tuple[int, int], Tuple[int, int], str]]) -> None: ... +def process_tokens(tokens: Iterable[tuple[int, str, tuple[int, int], tuple[int, int], str]]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi index 53c986a3f017..4d54f9815601 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tarfile.pyi @@ -2,13 +2,30 @@ import bz2 import io import sys from _typeshed import Self, StrOrBytesPath, StrPath +from builtins import type as Type # alias to avoid name clashes with fields named "type" from collections.abc import Callable, Iterable, Iterator, Mapping from gzip import _ReadableFileobj as _GzipReadableFileobj, _WritableFileobj as _GzipWritableFileobj from types import TracebackType -from typing import IO, Protocol, Set, Tuple, Type, TypeVar, overload +from typing import IO, Protocol, overload from typing_extensions import Literal -_TF = TypeVar("_TF", bound=TarFile) +__all__ = [ + "TarFile", + "TarInfo", + "is_tarfile", + "TarError", + "ReadError", + "CompressionError", + "StreamError", + "ExtractError", + "HeaderError", + "ENCODING", + "USTAR_FORMAT", + "GNU_FORMAT", + "PAX_FORMAT", + "DEFAULT_FORMAT", + "open", +] class _Fileobj(Protocol): def read(self, __size: int) -> bytes: ... @@ -62,12 +79,12 @@ DEFAULT_FORMAT: int # tarfile constants -SUPPORTED_TYPES: Tuple[bytes, ...] -REGULAR_TYPES: Tuple[bytes, ...] -GNU_TYPES: Tuple[bytes, ...] -PAX_FIELDS: Tuple[str, ...] +SUPPORTED_TYPES: tuple[bytes, ...] +REGULAR_TYPES: tuple[bytes, ...] +GNU_TYPES: tuple[bytes, ...] +PAX_FIELDS: tuple[str, ...] PAX_NUMBER_FIELDS: dict[str, type] -PAX_NAME_FIELDS: Set[str] +PAX_NAME_FIELDS: set[str] ENCODING: str @@ -78,7 +95,7 @@ def open( bufsize: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., @@ -100,12 +117,12 @@ class TarFile: mode: Literal["r", "a", "w", "x"] fileobj: _Fileobj | None format: int | None - tarinfo: Type[TarInfo] + tarinfo: type[TarInfo] dereference: bool | None ignore_zeros: bool | None encoding: str | None errors: str - fileobject: Type[ExFileObject] + fileobject: type[ExFileObject] pax_headers: Mapping[str, str] | None debug: int | None errorlevel: int | None @@ -116,7 +133,7 @@ class TarFile: mode: Literal["r", "a", "w", "x"] = ..., fileobj: _Fileobj | None = ..., format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., @@ -128,19 +145,19 @@ class TarFile: ) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __iter__(self) -> Iterator[TarInfo]: ... @classmethod def open( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None = ..., mode: str = ..., fileobj: IO[bytes] | None = ..., # depends on mode bufsize: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., @@ -148,113 +165,113 @@ class TarFile: pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @classmethod def taropen( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["r", "a", "w", "x"] = ..., fileobj: _Fileobj | None = ..., *, compresslevel: int = ..., format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @overload @classmethod def gzopen( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["r"] = ..., fileobj: _GzipReadableFileobj | None = ..., compresslevel: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @overload @classmethod def gzopen( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["w", "x"], fileobj: _GzipWritableFileobj | None = ..., compresslevel: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @overload @classmethod def bz2open( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["w", "x"], fileobj: _Bz2WritableFileobj | None = ..., compresslevel: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @overload @classmethod def bz2open( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["r"] = ..., fileobj: _Bz2ReadableFileobj | None = ..., compresslevel: int = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... @classmethod def xzopen( - cls: Type[_TF], + cls: type[Self], name: StrOrBytesPath | None, mode: Literal["r", "w", "x"] = ..., fileobj: IO[bytes] | None = ..., preset: int | None = ..., *, format: int | None = ..., - tarinfo: Type[TarInfo] | None = ..., + tarinfo: type[TarInfo] | None = ..., dereference: bool | None = ..., ignore_zeros: bool | None = ..., encoding: str | None = ..., pax_headers: Mapping[str, str] | None = ..., debug: int | None = ..., errorlevel: int | None = ..., - ) -> _TF: ... + ) -> Self: ... def getmember(self, name: str) -> TarInfo: ... def getmembers(self) -> _list[TarInfo]: ... def getnames(self) -> _list[str]: ... @@ -298,6 +315,7 @@ class TarFile: *, filter: Callable[[TarInfo], TarInfo | None] | None = ..., ) -> None: ... + def addfile(self, tarinfo: TarInfo, fileobj: IO[bytes] | None = ...) -> None: ... def gettarinfo(self, name: str | None = ..., arcname: str | None = ..., fileobj: IO[bytes] | None = ...) -> TarInfo: ... def close(self) -> None: ... @@ -340,9 +358,9 @@ class TarInfo: pax_headers: Mapping[str, str] def __init__(self, name: str = ...) -> None: ... @classmethod - def frombuf(cls, buf: bytes, encoding: str, errors: str) -> TarInfo: ... + def frombuf(cls: Type[Self], buf: bytes, encoding: str, errors: str) -> Self: ... @classmethod - def fromtarfile(cls, tarfile: TarFile) -> TarInfo: ... + def fromtarfile(cls: Type[Self], tarfile: TarFile) -> Self: ... @property def linkpath(self) -> str: ... @linkpath.setter diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/telnetlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/telnetlib.pyi index b9c984550c2c..f522d206550d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/telnetlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/telnetlib.pyi @@ -1,6 +1,8 @@ import socket from _typeshed import Self -from typing import Any, Callable, Match, Pattern, Sequence, Tuple +from typing import Any, Callable, Match, Pattern, Sequence + +__all__ = ["Telnet"] DEBUGLEVEL: int TELNET_PORT: int @@ -109,6 +111,6 @@ class Telnet: def listener(self) -> None: ... def expect( self, list: Sequence[Pattern[bytes] | bytes], timeout: float | None = ... - ) -> Tuple[int, Match[bytes] | None, bytes]: ... + ) -> tuple[int, Match[bytes] | None, bytes]: ... def __enter__(self: Self) -> Self: ... def __exit__(self, type: Any, value: Any, traceback: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tempfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tempfile.pyi index 270f506ed594..ef0a36241592 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tempfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tempfile.pyi @@ -2,12 +2,28 @@ import os import sys from _typeshed import Self from types import TracebackType -from typing import IO, Any, AnyStr, Generic, Iterable, Iterator, Tuple, Type, Union, overload +from typing import IO, Any, AnyStr, Generic, Iterable, Iterator, Union, overload from typing_extensions import Literal if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = [ + "NamedTemporaryFile", + "TemporaryFile", + "SpooledTemporaryFile", + "TemporaryDirectory", + "mkstemp", + "mkdtemp", + "mktemp", + "TMP_MAX", + "gettempprefix", + "tempdir", + "gettempdir", + "gettempprefixb", + "gettempdirb", +] + # global variables TMP_MAX: int tempdir: str | None @@ -169,7 +185,7 @@ class _TemporaryFileWrapper(Generic[AnyStr], IO[AnyStr]): delete: bool def __init__(self, file: IO[AnyStr], name: str, delete: bool = ...) -> None: ... def __enter__(self: Self) -> Self: ... - def __exit__(self, exc: Type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> bool | None: ... + def __exit__(self, exc: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None) -> bool | None: ... def __getattr__(self, name: str) -> Any: ... def close(self) -> None: ... # These methods don't exist directly on this object, but @@ -206,7 +222,7 @@ class SpooledTemporaryFile(IO[AnyStr]): @property def encoding(self) -> str: ... # undocumented @property - def newlines(self) -> str | Tuple[str, ...] | None: ... # undocumented + def newlines(self) -> str | tuple[str, ...] | None: ... # undocumented # bytes needs to go first, as default mode is to open as bytes if sys.version_info >= (3, 8): @overload @@ -290,11 +306,12 @@ class SpooledTemporaryFile(IO[AnyStr]): prefix: str | None = ..., dir: str | None = ..., ) -> None: ... + def rollover(self) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> bool | None: ... + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... # These methods are copied from the abstract methods of IO, because # SpooledTemporaryFile implements IO. # See also https://github.com/python/typeshed/pull/2452#issuecomment-420657918. @@ -307,7 +324,7 @@ class SpooledTemporaryFile(IO[AnyStr]): def readlines(self, hint: int = ...) -> list[AnyStr]: ... def seek(self, offset: int, whence: int = ...) -> int: ... def tell(self) -> int: ... - def truncate(self, size: int | None = ...) -> int: ... + def truncate(self, size: int | None = ...) -> None: ... # type: ignore[override] def write(self, s: AnyStr) -> int: ... def writelines(self, iterable: Iterable[AnyStr]) -> None: ... def __iter__(self) -> Iterator[AnyStr]: ... @@ -321,24 +338,43 @@ class SpooledTemporaryFile(IO[AnyStr]): class TemporaryDirectory(Generic[AnyStr]): name: AnyStr - @overload - def __init__(self: TemporaryDirectory[str], suffix: None = ..., prefix: None = ..., dir: None = ...) -> None: ... - @overload - def __init__(self, suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ...) -> None: ... + if sys.version_info >= (3, 10): + @overload + def __init__( + self: TemporaryDirectory[str], + suffix: None = ..., + prefix: None = ..., + dir: None = ..., + ignore_cleanup_errors: bool = ..., + ) -> None: ... + @overload + def __init__( + self, + suffix: AnyStr | None = ..., + prefix: AnyStr | None = ..., + dir: _DirT[AnyStr] | None = ..., + ignore_cleanup_errors: bool = ..., + ) -> None: ... + else: + @overload + def __init__(self: TemporaryDirectory[str], suffix: None = ..., prefix: None = ..., dir: None = ...) -> None: ... + @overload + def __init__(self, suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ...) -> None: ... + def cleanup(self) -> None: ... def __enter__(self) -> AnyStr: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @overload -def mkstemp() -> Tuple[int, str]: ... +def mkstemp() -> tuple[int, str]: ... @overload def mkstemp( suffix: AnyStr | None = ..., prefix: AnyStr | None = ..., dir: _DirT[AnyStr] | None = ..., text: bool = ... -) -> Tuple[int, AnyStr]: ... +) -> tuple[int, AnyStr]: ... @overload def mkdtemp() -> str: ... @overload diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/termios.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/termios.pyi index ed8522dccc51..6ef2f6818827 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/termios.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/termios.pyi @@ -1,246 +1,247 @@ +import sys from _typeshed import FileDescriptorLike -from typing import Any, List, Union +from typing import Any, Union -_Attr = List[Union[int, List[Union[bytes, int]]]] +if sys.platform != "win32": + _Attr = list[Union[int, list[Union[bytes, int]]]] -# TODO constants not really documented -B0: int -B1000000: int -B110: int -B115200: int -B1152000: int -B1200: int -B134: int -B150: int -B1500000: int -B1800: int -B19200: int -B200: int -B2000000: int -B230400: int -B2400: int -B2500000: int -B300: int -B3000000: int -B3500000: int -B38400: int -B4000000: int -B460800: int -B4800: int -B50: int -B500000: int -B57600: int -B576000: int -B600: int -B75: int -B921600: int -B9600: int -BRKINT: int -BS0: int -BS1: int -BSDLY: int -CBAUD: int -CBAUDEX: int -CDSUSP: int -CEOF: int -CEOL: int -CEOT: int -CERASE: int -CFLUSH: int -CIBAUD: int -CINTR: int -CKILL: int -CLNEXT: int -CLOCAL: int -CQUIT: int -CR0: int -CR1: int -CR2: int -CR3: int -CRDLY: int -CREAD: int -CRPRNT: int -CRTSCTS: int -CS5: int -CS6: int -CS7: int -CS8: int -CSIZE: int -CSTART: int -CSTOP: int -CSTOPB: int -CSUSP: int -CWERASE: int -ECHO: int -ECHOCTL: int -ECHOE: int -ECHOK: int -ECHOKE: int -ECHONL: int -ECHOPRT: int -EXTA: int -EXTB: int -FF0: int -FF1: int -FFDLY: int -FIOASYNC: int -FIOCLEX: int -FIONBIO: int -FIONCLEX: int -FIONREAD: int -FLUSHO: int -HUPCL: int -ICANON: int -ICRNL: int -IEXTEN: int -IGNBRK: int -IGNCR: int -IGNPAR: int -IMAXBEL: int -INLCR: int -INPCK: int -IOCSIZE_MASK: int -IOCSIZE_SHIFT: int -ISIG: int -ISTRIP: int -IUCLC: int -IXANY: int -IXOFF: int -IXON: int -NCC: int -NCCS: int -NL0: int -NL1: int -NLDLY: int -NOFLSH: int -N_MOUSE: int -N_PPP: int -N_SLIP: int -N_STRIP: int -N_TTY: int -OCRNL: int -OFDEL: int -OFILL: int -OLCUC: int -ONLCR: int -ONLRET: int -ONOCR: int -OPOST: int -PARENB: int -PARMRK: int -PARODD: int -PENDIN: int -TAB0: int -TAB1: int -TAB2: int -TAB3: int -TABDLY: int -TCFLSH: int -TCGETA: int -TCGETS: int -TCIFLUSH: int -TCIOFF: int -TCIOFLUSH: int -TCION: int -TCOFLUSH: int -TCOOFF: int -TCOON: int -TCSADRAIN: int -TCSAFLUSH: int -TCSANOW: int -TCSBRK: int -TCSBRKP: int -TCSETA: int -TCSETAF: int -TCSETAW: int -TCSETS: int -TCSETSF: int -TCSETSW: int -TCXONC: int -TIOCCONS: int -TIOCEXCL: int -TIOCGETD: int -TIOCGICOUNT: int -TIOCGLCKTRMIOS: int -TIOCGPGRP: int -TIOCGSERIAL: int -TIOCGSOFTCAR: int -TIOCGWINSZ: int -TIOCINQ: int -TIOCLINUX: int -TIOCMBIC: int -TIOCMBIS: int -TIOCMGET: int -TIOCMIWAIT: int -TIOCMSET: int -TIOCM_CAR: int -TIOCM_CD: int -TIOCM_CTS: int -TIOCM_DSR: int -TIOCM_DTR: int -TIOCM_LE: int -TIOCM_RI: int -TIOCM_RNG: int -TIOCM_RTS: int -TIOCM_SR: int -TIOCM_ST: int -TIOCNOTTY: int -TIOCNXCL: int -TIOCOUTQ: int -TIOCPKT: int -TIOCPKT_DATA: int -TIOCPKT_DOSTOP: int -TIOCPKT_FLUSHREAD: int -TIOCPKT_FLUSHWRITE: int -TIOCPKT_NOSTOP: int -TIOCPKT_START: int -TIOCPKT_STOP: int -TIOCSCTTY: int -TIOCSERCONFIG: int -TIOCSERGETLSR: int -TIOCSERGETMULTI: int -TIOCSERGSTRUCT: int -TIOCSERGWILD: int -TIOCSERSETMULTI: int -TIOCSERSWILD: int -TIOCSER_TEMT: int -TIOCSETD: int -TIOCSLCKTRMIOS: int -TIOCSPGRP: int -TIOCSSERIAL: int -TIOCSSOFTCAR: int -TIOCSTI: int -TIOCSWINSZ: int -TOSTOP: int -VDISCARD: int -VEOF: int -VEOL: int -VEOL2: int -VERASE: int -VINTR: int -VKILL: int -VLNEXT: int -VMIN: int -VQUIT: int -VREPRINT: int -VSTART: int -VSTOP: int -VSUSP: int -VSWTC: int -VSWTCH: int -VT0: int -VT1: int -VTDLY: int -VTIME: int -VWERASE: int -XCASE: int -XTABS: int + # TODO constants not really documented + B0: int + B1000000: int + B110: int + B115200: int + B1152000: int + B1200: int + B134: int + B150: int + B1500000: int + B1800: int + B19200: int + B200: int + B2000000: int + B230400: int + B2400: int + B2500000: int + B300: int + B3000000: int + B3500000: int + B38400: int + B4000000: int + B460800: int + B4800: int + B50: int + B500000: int + B57600: int + B576000: int + B600: int + B75: int + B921600: int + B9600: int + BRKINT: int + BS0: int + BS1: int + BSDLY: int + CBAUD: int + CBAUDEX: int + CDSUSP: int + CEOF: int + CEOL: int + CEOT: int + CERASE: int + CFLUSH: int + CIBAUD: int + CINTR: int + CKILL: int + CLNEXT: int + CLOCAL: int + CQUIT: int + CR0: int + CR1: int + CR2: int + CR3: int + CRDLY: int + CREAD: int + CRPRNT: int + CRTSCTS: int + CS5: int + CS6: int + CS7: int + CS8: int + CSIZE: int + CSTART: int + CSTOP: int + CSTOPB: int + CSUSP: int + CWERASE: int + ECHO: int + ECHOCTL: int + ECHOE: int + ECHOK: int + ECHOKE: int + ECHONL: int + ECHOPRT: int + EXTA: int + EXTB: int + FF0: int + FF1: int + FFDLY: int + FIOASYNC: int + FIOCLEX: int + FIONBIO: int + FIONCLEX: int + FIONREAD: int + FLUSHO: int + HUPCL: int + ICANON: int + ICRNL: int + IEXTEN: int + IGNBRK: int + IGNCR: int + IGNPAR: int + IMAXBEL: int + INLCR: int + INPCK: int + IOCSIZE_MASK: int + IOCSIZE_SHIFT: int + ISIG: int + ISTRIP: int + IUCLC: int + IXANY: int + IXOFF: int + IXON: int + NCC: int + NCCS: int + NL0: int + NL1: int + NLDLY: int + NOFLSH: int + N_MOUSE: int + N_PPP: int + N_SLIP: int + N_STRIP: int + N_TTY: int + OCRNL: int + OFDEL: int + OFILL: int + OLCUC: int + ONLCR: int + ONLRET: int + ONOCR: int + OPOST: int + PARENB: int + PARMRK: int + PARODD: int + PENDIN: int + TAB0: int + TAB1: int + TAB2: int + TAB3: int + TABDLY: int + TCFLSH: int + TCGETA: int + TCGETS: int + TCIFLUSH: int + TCIOFF: int + TCIOFLUSH: int + TCION: int + TCOFLUSH: int + TCOOFF: int + TCOON: int + TCSADRAIN: int + TCSAFLUSH: int + TCSANOW: int + TCSBRK: int + TCSBRKP: int + TCSETA: int + TCSETAF: int + TCSETAW: int + TCSETS: int + TCSETSF: int + TCSETSW: int + TCXONC: int + TIOCCONS: int + TIOCEXCL: int + TIOCGETD: int + TIOCGICOUNT: int + TIOCGLCKTRMIOS: int + TIOCGPGRP: int + TIOCGSERIAL: int + TIOCGSOFTCAR: int + TIOCGWINSZ: int + TIOCINQ: int + TIOCLINUX: int + TIOCMBIC: int + TIOCMBIS: int + TIOCMGET: int + TIOCMIWAIT: int + TIOCMSET: int + TIOCM_CAR: int + TIOCM_CD: int + TIOCM_CTS: int + TIOCM_DSR: int + TIOCM_DTR: int + TIOCM_LE: int + TIOCM_RI: int + TIOCM_RNG: int + TIOCM_RTS: int + TIOCM_SR: int + TIOCM_ST: int + TIOCNOTTY: int + TIOCNXCL: int + TIOCOUTQ: int + TIOCPKT: int + TIOCPKT_DATA: int + TIOCPKT_DOSTOP: int + TIOCPKT_FLUSHREAD: int + TIOCPKT_FLUSHWRITE: int + TIOCPKT_NOSTOP: int + TIOCPKT_START: int + TIOCPKT_STOP: int + TIOCSCTTY: int + TIOCSERCONFIG: int + TIOCSERGETLSR: int + TIOCSERGETMULTI: int + TIOCSERGSTRUCT: int + TIOCSERGWILD: int + TIOCSERSETMULTI: int + TIOCSERSWILD: int + TIOCSER_TEMT: int + TIOCSETD: int + TIOCSLCKTRMIOS: int + TIOCSPGRP: int + TIOCSSERIAL: int + TIOCSSOFTCAR: int + TIOCSTI: int + TIOCSWINSZ: int + TOSTOP: int + VDISCARD: int + VEOF: int + VEOL: int + VEOL2: int + VERASE: int + VINTR: int + VKILL: int + VLNEXT: int + VMIN: int + VQUIT: int + VREPRINT: int + VSTART: int + VSTOP: int + VSUSP: int + VSWTC: int + VSWTCH: int + VT0: int + VT1: int + VTDLY: int + VTIME: int + VWERASE: int + XCASE: int + XTABS: int + def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... + def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... + def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... + def tcdrain(__fd: FileDescriptorLike) -> None: ... + def tcflush(__fd: FileDescriptorLike, __queue: int) -> None: ... + def tcflow(__fd: FileDescriptorLike, __action: int) -> None: ... -def tcgetattr(__fd: FileDescriptorLike) -> list[Any]: ... -def tcsetattr(__fd: FileDescriptorLike, __when: int, __attributes: _Attr) -> None: ... -def tcsendbreak(__fd: FileDescriptorLike, __duration: int) -> None: ... -def tcdrain(__fd: FileDescriptorLike) -> None: ... -def tcflush(__fd: FileDescriptorLike, __queue: int) -> None: ... -def tcflow(__fd: FileDescriptorLike, __action: int) -> None: ... - -class error(Exception): ... + class error(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/textwrap.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/textwrap.pyi index 3b8fd5c0349a..aeb1d87141be 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/textwrap.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/textwrap.pyi @@ -1,5 +1,7 @@ from typing import Callable, Pattern +__all__ = ["TextWrapper", "wrap", "fill", "dedent", "indent", "shorten"] + class TextWrapper: width: int initial_indent: str diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi index 64998d86bf9f..8c623239446c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/threading.pyi @@ -1,6 +1,6 @@ import sys from types import FrameType, TracebackType -from typing import Any, Callable, Iterable, Mapping, Optional, Type, TypeVar +from typing import Any, Callable, Iterable, Mapping, Optional, TypeVar # TODO recursive type _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] @@ -8,11 +8,91 @@ _TF = Callable[[FrameType, str, Any], Optional[Callable[..., Any]]] _PF = Callable[[FrameType, str, Any], None] _T = TypeVar("_T") -__all__: list[str] +if sys.version_info >= (3, 10): + __all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", + "excepthook", + "ExceptHookArgs", + "gettrace", + "getprofile", + "get_native_id", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", + "excepthook", + "ExceptHookArgs", + "get_native_id", + ] +else: + __all__ = [ + "get_ident", + "active_count", + "Condition", + "current_thread", + "enumerate", + "main_thread", + "TIMEOUT_MAX", + "Event", + "Lock", + "RLock", + "Semaphore", + "BoundedSemaphore", + "Thread", + "Barrier", + "BrokenBarrierError", + "Timer", + "ThreadError", + "setprofile", + "settrace", + "local", + "stack_size", + ] def active_count() -> int: ... +def activeCount() -> int: ... # deprecated alias for active_count() def current_thread() -> Thread: ... -def currentThread() -> Thread: ... +def currentThread() -> Thread: ... # deprecated alias for current_thread() def get_ident() -> int: ... def enumerate() -> list[Thread]: ... def main_thread() -> Thread: ... @@ -22,16 +102,21 @@ if sys.version_info >= (3, 8): def settrace(func: _TF) -> None: ... def setprofile(func: _PF | None) -> None: ... + +if sys.version_info >= (3, 10): + def gettrace() -> _TF | None: ... + def getprofile() -> _PF | None: ... + def stack_size(size: int = ...) -> int: ... TIMEOUT_MAX: float class ThreadError(Exception): ... -class local(object): - def __getattribute__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... - def __delattr__(self, name: str) -> None: ... +class local: + def __getattribute__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... class Thread: name: str @@ -50,24 +135,27 @@ class Thread: def start(self) -> None: ... def run(self) -> None: ... def join(self, timeout: float | None = ...) -> None: ... - def getName(self) -> str: ... - def setName(self, name: str) -> None: ... if sys.version_info >= (3, 8): @property def native_id(self) -> int | None: ... # only available on some platforms + def is_alive(self) -> bool: ... if sys.version_info < (3, 9): def isAlive(self) -> bool: ... + # the following methods are all deprecated + def getName(self) -> str: ... + def setName(self, name: str) -> None: ... def isDaemon(self) -> bool: ... def setDaemon(self, daemonic: bool) -> None: ... -class _DummyThread(Thread): ... +class _DummyThread(Thread): + def __init__(self) -> None: ... class Lock: def __init__(self) -> None: ... def __enter__(self) -> bool: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... @@ -75,12 +163,12 @@ class Lock: class _RLock: def __init__(self) -> None: ... - def __enter__(self) -> bool: ... - def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... + __enter__ = acquire + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... RLock = _RLock @@ -88,7 +176,7 @@ class Condition: def __init__(self, lock: Lock | _RLock | None = ...) -> None: ... def __enter__(self) -> bool: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool | None: ... def acquire(self, blocking: bool = ..., timeout: float = ...) -> bool: ... def release(self) -> None: ... @@ -96,13 +184,13 @@ class Condition: def wait_for(self, predicate: Callable[[], _T], timeout: float | None = ...) -> _T: ... def notify(self, n: int = ...) -> None: ... def notify_all(self) -> None: ... - def notifyAll(self) -> None: ... + def notifyAll(self) -> None: ... # deprecated alias for notify_all() class Semaphore: def __init__(self, value: int = ...) -> None: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> bool | None: ... + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... def acquire(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... def __enter__(self, blocking: bool = ..., timeout: float | None = ...) -> bool: ... if sys.version_info >= (3, 9): @@ -115,6 +203,7 @@ class BoundedSemaphore(Semaphore): ... class Event: def __init__(self) -> None: ... def is_set(self) -> bool: ... + def isSet(self) -> bool: ... # deprecated alias for is_set() def set(self) -> None: ... def clear(self) -> None: ... def wait(self, timeout: float | None = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/time.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/time.pyi index 039350c5b7ad..815171f0c7dd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/time.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/time.pyi @@ -1,13 +1,14 @@ import sys -from types import SimpleNamespace -from typing import Any, NamedTuple, Tuple +from _typeshed import structseq +from typing import Any, Protocol, Union +from typing_extensions import Literal, final -_TimeTuple = Tuple[int, int, int, int, int, int, int, int, int] +_TimeTuple = tuple[int, int, int, int, int, int, int, int, int] altzone: int daylight: int timezone: int -tzname: Tuple[str, str] +tzname: tuple[str, str] if sys.version_info >= (3, 7): if sys.platform == "linux": @@ -31,38 +32,31 @@ if sys.version_info >= (3, 8) and sys.platform == "darwin": if sys.version_info >= (3, 9) and sys.platform == "linux": CLOCK_TAI: int -class _struct_time(NamedTuple): - tm_year: int - tm_mon: int - tm_mday: int - tm_hour: int - tm_min: int - tm_sec: int - tm_wday: int - tm_yday: int - tm_isdst: int +# Constructor takes an iterable of any type, of length between 9 and 11 elements. +# However, it always *behaves* like a tuple of 9 elements, +# even if an iterable with length >9 is passed. +# https://github.com/python/typeshed/pull/6560#discussion_r767162532 +@final +class struct_time(structseq[Union[Any, int]], _TimeTuple): @property - def n_fields(self) -> int: ... + def tm_year(self) -> int: ... @property - def n_sequence_fields(self) -> int: ... + def tm_mon(self) -> int: ... @property - def n_unnamed_fields(self) -> int: ... - -class struct_time(_struct_time): - def __init__( - self, - o: Tuple[int, int, int, int, int, int, int, int, int] - | Tuple[int, int, int, int, int, int, int, int, int, str] - | Tuple[int, int, int, int, int, int, int, int, int, str, int], - _arg: Any = ..., - ) -> None: ... - def __new__( - cls, - o: Tuple[int, int, int, int, int, int, int, int, int] - | Tuple[int, int, int, int, int, int, int, int, int, str] - | Tuple[int, int, int, int, int, int, int, int, int, str, int], - _arg: Any = ..., - ) -> struct_time: ... + def tm_mday(self) -> int: ... + @property + def tm_hour(self) -> int: ... + @property + def tm_min(self) -> int: ... + @property + def tm_sec(self) -> int: ... + @property + def tm_wday(self) -> int: ... + @property + def tm_yday(self) -> int: ... + @property + def tm_isdst(self) -> int: ... + # These final two properties only exist if a 10- or 11-item sequence was passed to the constructor. @property def tm_zone(self) -> str: ... @property @@ -85,7 +79,13 @@ def time() -> float: ... if sys.platform != "win32": def tzset() -> None: ... # Unix only -def get_clock_info(name: str) -> SimpleNamespace: ... +class _ClockInfo(Protocol): + adjustable: bool + implementation: str + monotonic: bool + resolution: float + +def get_clock_info(name: Literal["monotonic", "perf_counter", "process_time", "time", "thread_time"]) -> _ClockInfo: ... def monotonic() -> float: ... def perf_counter() -> float: ... def process_time() -> float: ... @@ -99,6 +99,7 @@ if sys.version_info >= (3, 7): if sys.platform != "win32": def clock_gettime_ns(clock_id: int) -> int: ... def clock_settime_ns(clock_id: int, time: int) -> int: ... + def monotonic_ns() -> int: ... def perf_counter_ns() -> int: ... def process_time_ns() -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/timeit.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/timeit.pyi index 2a8330d1cee1..3135c21a6022 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/timeit.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/timeit.pyi @@ -1,4 +1,6 @@ -from typing import IO, Any, Callable, Sequence, Tuple, Union +from typing import IO, Any, Callable, Sequence, Union + +__all__ = ["Timer", "timeit", "repeat", "default_timer"] _Timer = Callable[[], float] _Stmt = Union[str, Callable[[], Any]] @@ -12,7 +14,7 @@ class Timer: def print_exc(self, file: IO[str] | None = ...) -> None: ... def timeit(self, number: int = ...) -> float: ... def repeat(self, repeat: int = ..., number: int = ...) -> list[float]: ... - def autorange(self, callback: Callable[[int, float], Any] | None = ...) -> Tuple[int, float]: ... + def autorange(self, callback: Callable[[int, float], Any] | None = ...) -> tuple[int, float]: ... def timeit( stmt: _Stmt = ..., setup: _Stmt = ..., timer: _Timer = ..., number: int = ..., globals: dict[str, Any] | None = ... @@ -25,7 +27,4 @@ def repeat( number: int = ..., globals: dict[str, Any] | None = ..., ) -> list[float]: ... - -_timerFunc = Callable[[], float] - -def main(args: Sequence[str] | None = ..., *, _wrap_timer: Callable[[_timerFunc], _timerFunc] | None = ...) -> None: ... +def main(args: Sequence[str] | None = ..., *, _wrap_timer: Callable[[_Timer], _Timer] | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/__init__.pyi index b7a5d3a112cc..2393b0e5be49 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/__init__.pyi @@ -2,12 +2,147 @@ import _tkinter import sys from _typeshed import StrOrBytesPath from enum import Enum -from tkinter.constants import * # comment this out to find undefined identifier names with flake8 +from tkinter.constants import * from tkinter.font import _FontDescription from types import TracebackType -from typing import Any, Callable, Generic, List, Mapping, Optional, Protocol, Sequence, Tuple, Type, TypeVar, Union, overload +from typing import Any, Callable, Generic, Mapping, Optional, Protocol, Sequence, TypeVar, Union, overload from typing_extensions import Literal, TypedDict +if sys.version_info >= (3, 9): + __all__ = [ + "TclError", + "NO", + "FALSE", + "OFF", + "YES", + "TRUE", + "ON", + "N", + "S", + "W", + "E", + "NW", + "SW", + "NE", + "SE", + "NS", + "EW", + "NSEW", + "CENTER", + "NONE", + "X", + "Y", + "BOTH", + "LEFT", + "TOP", + "RIGHT", + "BOTTOM", + "RAISED", + "SUNKEN", + "FLAT", + "RIDGE", + "GROOVE", + "SOLID", + "HORIZONTAL", + "VERTICAL", + "NUMERIC", + "CHAR", + "WORD", + "BASELINE", + "INSIDE", + "OUTSIDE", + "SEL", + "SEL_FIRST", + "SEL_LAST", + "END", + "INSERT", + "CURRENT", + "ANCHOR", + "ALL", + "NORMAL", + "DISABLED", + "ACTIVE", + "HIDDEN", + "CASCADE", + "CHECKBUTTON", + "COMMAND", + "RADIOBUTTON", + "SEPARATOR", + "SINGLE", + "BROWSE", + "MULTIPLE", + "EXTENDED", + "DOTBOX", + "UNDERLINE", + "PIESLICE", + "CHORD", + "ARC", + "FIRST", + "LAST", + "BUTT", + "PROJECTING", + "ROUND", + "BEVEL", + "MITER", + "MOVETO", + "SCROLL", + "UNITS", + "PAGES", + "TkVersion", + "TclVersion", + "READABLE", + "WRITABLE", + "EXCEPTION", + "EventType", + "Event", + "NoDefaultRoot", + "Variable", + "StringVar", + "IntVar", + "DoubleVar", + "BooleanVar", + "mainloop", + "getint", + "getdouble", + "getboolean", + "Misc", + "CallWrapper", + "XView", + "YView", + "Wm", + "Tk", + "Tcl", + "Pack", + "Place", + "Grid", + "BaseWidget", + "Widget", + "Toplevel", + "Button", + "Canvas", + "Checkbutton", + "Entry", + "Frame", + "Label", + "Listbox", + "Menu", + "Menubutton", + "Message", + "Radiobutton", + "Scale", + "Scrollbar", + "Text", + "OptionMenu", + "Image", + "PhotoImage", + "BitmapImage", + "image_names", + "image_types", + "Spinbox", + "LabelFrame", + "PanedWindow", + ] + # Using anything from tkinter.font in this file means that 'import tkinter' # seems to also load tkinter.font. That's not how it actually works, but # unfortunately not much can be done about it. https://github.com/python/typeshed/pull/4346 @@ -24,96 +159,46 @@ EXCEPTION = _tkinter.EXCEPTION # - Misc: any widget (don't use BaseWidget because Tk doesn't inherit from BaseWidget) # - Widget: anything that is meant to be put into another widget with e.g. pack or grid # -# Instructions for figuring out the correct type of each widget option: -# - See discussion on #4363. -# -# - Find the option from the manual page of the widget. Usually the manual -# page of a non-ttk widget has the same name as the tkinter class, in the -# 3tk section: -# -# $ sudo apt install tk-doc -# $ man 3tk label -# -# Ttk manual pages tend to have ttk_ prefixed names: -# -# $ man 3tk ttk_label -# -# Non-GUI things like the .after() method are often in the 3tcl section: -# -# $ sudo apt install tcl-doc -# $ man 3tcl after -# -# If you don't have man or apt, you can read these manual pages online: -# -# https://www.tcl.tk/doc/ -# -# Every option has '-' in front of its name in the manual page (and in Tcl). -# For example, there's an option named '-text' in the label manual page. -# -# - Tkinter has some options documented in docstrings, but don't rely on them. -# They aren't updated when a new version of Tk comes out, so the latest Tk -# manual pages (see above) are much more likely to actually contain all -# possible options. -# -# Also, reading tkinter's source code typically won't help much because it -# uses a lot of **kwargs and duck typing. Typically every argument goes into -# self.tk.call, which is _tkinter.TkappType.call, and the return value is -# whatever that returns. The type of that depends on how the Tcl interpreter -# represents the return value of the executed Tcl code. +# Don't trust tkinter's docstrings, because they have been created by copy/pasting from +# Tk's manual pages more than 10 years ago. Use the latest manual pages instead: # -# - If you think that int is an appropriate type for something, then you may -# actually want _ScreenUnits instead. +# $ sudo apt install tk-doc tcl-doc +# $ man 3tk label # tkinter.Label +# $ man 3tk ttk_label # tkinter.ttk.Label +# $ man 3tcl after # tkinter.Misc.after # -# - If you think that Callable[something] is an appropriate type for -# something, then you may actually want Callable[something] | str, -# because it's often possible to specify a string of Tcl code. -# -# - Some options can be set only in __init__, but all options are available -# when getting their values with configure's return value or cget. -# -# - Asks other tkinter users if you haven't worked much with tkinter. - -# _TkinterSequence[T] represents a sequence that tkinter understands. It -# differs from typing.Sequence[T]. For example, collections.deque a valid -# Sequence but not a valid _TkinterSequence: -# -# >>> tkinter.Label(font=('Helvetica', 12, collections.deque(['bold']))) -# Traceback (most recent call last): -# ... -# _tkinter.TclError: unknown font style "deque(['bold'])" -_T = TypeVar("_T") -_TkinterSequence = Union[List[_T], Tuple[_T, ...]] -_TkinterSequence2D = Union[List[List[_T]], List[Tuple[_T, ...]], Tuple[List[_T], ...], Tuple[Tuple[_T, ...], ...]] +# You can also read the manual pages online: https://www.tcl.tk/doc/ # Some widgets have an option named -compound that accepts different values # than the _Compound defined here. Many other options have similar things. _Anchor = Literal["nw", "n", "ne", "w", "center", "e", "sw", "s", "se"] # manual page: Tk_GetAnchor _Bitmap = str # manual page: Tk_GetBitmap -_ButtonCommand = Union[str, Callable[[], Any]] # return value is returned from Button.invoke() +_ButtonCommand = Union[str, Callable[[], Any]] # accepts string of tcl code, return value is returned from Button.invoke() _CanvasItemId = int _Color = str # typically '#rrggbb', '#rgb' or color names. _Compound = Literal["top", "left", "center", "right", "bottom", "none"] # -compound in manual page named 'options' -_Cursor = Union[str, Tuple[str], Tuple[str, str], Tuple[str, str, str], Tuple[str, str, str, str]] # manual page: Tk_GetCursor +_Cursor = Union[str, tuple[str], tuple[str, str], tuple[str, str, str], tuple[str, str, str, str]] # manual page: Tk_GetCursor _EntryValidateCommand = Union[ - Callable[[], bool], str, _TkinterSequence[str] + Callable[[], bool], str, list[str], tuple[str, ...] ] # example when it's sequence: entry['invalidcommand'] = [entry.register(print), '%P'] _GridIndex = Union[int, str, Literal["all"]] _ImageSpec = Union[_Image, str] # str can be from e.g. tkinter.image_names() _Padding = Union[ _ScreenUnits, - Tuple[_ScreenUnits], - Tuple[_ScreenUnits, _ScreenUnits], - Tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits], - Tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits], + tuple[_ScreenUnits], + tuple[_ScreenUnits, _ScreenUnits], + tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits], + tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits], ] _Relief = Literal["raised", "sunken", "flat", "ridge", "solid", "groove"] # manual page: Tk_GetRelief -_ScreenUnits = Union[str, float] # manual page: Tk_GetPixels +_ScreenUnits = Union[str, float] # Often the right type instead of int. Manual page: Tk_GetPixels _XYScrollCommand = Union[str, Callable[[float, float], Any]] # -xscrollcommand and -yscrollcommand in 'options' manual page _TakeFocusValue = Union[int, Literal[""], Callable[[str], Optional[bool]]] # -takefocus in manual page named 'options' class EventType(str, Enum): Activate: str ButtonPress: str + Button = ButtonPress ButtonRelease: str Circulate: str CirculateRequest: str @@ -131,6 +216,7 @@ class EventType(str, Enum): GraphicsExpose: str Gravity: str KeyPress: str + Key = KeyPress KeyRelease: str Keymap: str Leave: str @@ -150,9 +236,9 @@ class EventType(str, Enum): VirtualEvent: str Visibility: str -_W = TypeVar("_W", bound="Misc") +_W = TypeVar("_W", bound=Misc) # Events considered covariant because you should never assign to event.widget. -_W_co = TypeVar("_W_co", covariant=True, bound="Misc") +_W_co = TypeVar("_W_co", covariant=True, bound=Misc) class Event(Generic[_W_co]): serial: int @@ -175,7 +261,7 @@ class Event(Generic[_W_co]): widget: _W_co delta: int -def NoDefaultRoot(): ... +def NoDefaultRoot() -> None: ... _TraceMode = Literal["array", "read", "write", "unset"] @@ -186,11 +272,12 @@ class Variable: def get(self) -> Any: ... def trace_add(self, mode: _TraceMode, callback: Callable[[str, str, str], Any]) -> str: ... def trace_remove(self, mode: _TraceMode, cbname: str) -> None: ... - def trace_info(self) -> list[Tuple[Tuple[_TraceMode, ...], str]]: ... + def trace_info(self) -> list[tuple[tuple[_TraceMode, ...], str]]: ... def trace_variable(self, mode, callback): ... # deprecated - def trace_vdelete(self, mode, cbname): ... # deprecated + def trace_vdelete(self, mode, cbname) -> None: ... # deprecated def trace_vinfo(self): ... # deprecated trace = trace_variable # deprecated + def __eq__(self, other: object) -> bool: ... class StringVar(Variable): def __init__(self, master: Misc | None = ..., value: str | None = ..., name: str | None = ...) -> None: ... @@ -236,13 +323,13 @@ class Misc: def destroy(self) -> None: ... def deletecommand(self, name: str) -> None: ... def tk_strictMotif(self, boolean: Any | None = ...): ... - def tk_bisque(self): ... - def tk_setPalette(self, *args, **kw): ... + def tk_bisque(self) -> None: ... + def tk_setPalette(self, *args, **kw) -> None: ... def wait_variable(self, name: str | Variable = ...) -> None: ... waitvar = wait_variable def wait_window(self, window: Misc | None = ...) -> None: ... def wait_visibility(self, window: Misc | None = ...) -> None: ... - def setvar(self, name: str = ..., value: str = ...): ... + def setvar(self, name: str = ..., value: str = ...) -> None: ... def getvar(self, name: str = ...): ... def getint(self, s): ... def getdouble(self, s): ... @@ -263,30 +350,32 @@ class Misc: # after_idle is essentially partialmethod(after, "idle") def after_idle(self, func: Callable[..., Any], *args: Any) -> str: ... def after_cancel(self, id: str) -> None: ... - def bell(self, displayof: Literal[0] | Misc | None = ...): ... + def bell(self, displayof: Literal[0] | Misc | None = ...) -> None: ... def clipboard_get(self, *, displayof: Misc = ..., type: str = ...) -> str: ... def clipboard_clear(self, *, displayof: Misc = ...) -> None: ... - def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...): ... + def clipboard_append(self, string: str, *, displayof: Misc = ..., format: str = ..., type: str = ...) -> None: ... def grab_current(self): ... - def grab_release(self): ... + def grab_release(self) -> None: ... def grab_set(self) -> None: ... def grab_set_global(self) -> None: ... - def grab_status(self): ... - def option_add(self, pattern, value, priority: Any | None = ...): ... - def option_clear(self): ... + def grab_status(self) -> Literal["local", "global"] | None: ... + def option_add( + self, pattern, value, priority: int | Literal["widgetDefault", "startupFile", "userDefault", "interactive"] | None = ... + ) -> None: ... + def option_clear(self) -> None: ... def option_get(self, name, className): ... - def option_readfile(self, fileName, priority: Any | None = ...): ... - def selection_clear(self, **kw): ... + def option_readfile(self, fileName, priority: Any | None = ...) -> None: ... + def selection_clear(self, **kw) -> None: ... def selection_get(self, **kw): ... - def selection_handle(self, command, **kw): ... - def selection_own(self, **kw): ... + def selection_handle(self, command, **kw) -> None: ... + def selection_own(self, **kw) -> None: ... def selection_own_get(self, **kw): ... def send(self, interp, cmd, *args): ... - def lower(self, belowThis: Any | None = ...): ... - def tkraise(self, aboveThis: Any | None = ...): ... + def lower(self, belowThis: Any | None = ...) -> None: ... + def tkraise(self, aboveThis: Any | None = ...) -> None: ... lift = tkraise - def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = ...): ... - def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = ...): ... + def winfo_atom(self, name: str, displayof: Literal[0] | Misc | None = ...) -> int: ... + def winfo_atomname(self, id: int, displayof: Literal[0] | Misc | None = ...) -> str: ... def winfo_cells(self) -> int: ... def winfo_children(self) -> list[Widget]: ... # Widget because it can't be Toplevel or Tk def winfo_class(self) -> str: ... @@ -298,7 +387,7 @@ class Misc: def winfo_geometry(self) -> str: ... def winfo_height(self) -> int: ... def winfo_id(self) -> int: ... - def winfo_interps(self, displayof: Literal[0] | Misc | None = ...) -> Tuple[str, ...]: ... + def winfo_interps(self, displayof: Literal[0] | Misc | None = ...) -> tuple[str, ...]: ... def winfo_ismapped(self) -> bool: ... def winfo_manager(self) -> str: ... def winfo_name(self) -> str: ... @@ -306,11 +395,11 @@ class Misc: def winfo_pathname(self, id: int, displayof: Literal[0] | Misc | None = ...): ... def winfo_pixels(self, number: _ScreenUnits) -> int: ... def winfo_pointerx(self) -> int: ... - def winfo_pointerxy(self) -> Tuple[int, int]: ... + def winfo_pointerxy(self) -> tuple[int, int]: ... def winfo_pointery(self) -> int: ... def winfo_reqheight(self) -> int: ... def winfo_reqwidth(self) -> int: ... - def winfo_rgb(self, color: _Color) -> Tuple[int, int, int]: ... + def winfo_rgb(self, color: _Color) -> tuple[int, int, int]: ... def winfo_rootx(self) -> int: ... def winfo_rooty(self) -> int: ... def winfo_screen(self) -> str: ... @@ -326,7 +415,7 @@ class Misc: def winfo_viewable(self) -> bool: ... def winfo_visual(self) -> str: ... def winfo_visualid(self) -> str: ... - def winfo_visualsavailable(self, includeids: int = ...) -> list[Tuple[str, int]]: ... + def winfo_visualsavailable(self, includeids: int = ...) -> list[tuple[str, int]]: ... def winfo_vrootheight(self) -> int: ... def winfo_vrootwidth(self) -> int: ... def winfo_vrootx(self) -> int: ... @@ -336,40 +425,53 @@ class Misc: def winfo_y(self) -> int: ... def update(self) -> None: ... def update_idletasks(self) -> None: ... - def bindtags(self, tagList: Any | None = ...): ... + @overload + def bindtags(self, tagList: None = ...) -> tuple[str, ...]: ... + @overload + def bindtags(self, tagList: list[str] | tuple[str, ...]) -> None: ... # bind with isinstance(func, str) doesn't return anything, but all other # binds do. The default value of func is not str. @overload def bind( - self, sequence: str | None = ..., func: Callable[[Event[Misc]], Any] | None = ..., add: bool | None = ... + self, + sequence: str | None = ..., + func: Callable[[Event[Misc]], Any] | None = ..., + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... @overload - def bind(self, *, func: str, add: bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... # There's no way to know what type of widget bind_all and bind_class # callbacks will get, so those are Misc. @overload def bind_all( - self, sequence: str | None = ..., func: Callable[[Event[Misc]], Any] | None = ..., add: bool | None = ... + self, + sequence: str | None = ..., + func: Callable[[Event[Misc]], Any] | None = ..., + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def bind_all(self, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def bind_all(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... @overload - def bind_all(self, *, func: str, add: bool | None = ...) -> None: ... + def bind_all(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... @overload def bind_class( - self, className: str, sequence: str | None = ..., func: Callable[[Event[Misc]], Any] | None = ..., add: bool | None = ... + self, + className: str, + sequence: str | None = ..., + func: Callable[[Event[Misc]], Any] | None = ..., + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def bind_class(self, className: str, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def bind_class(self, className: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... @overload - def bind_class(self, className: str, *, func: str, add: bool | None = ...) -> None: ... + def bind_class(self, className: str, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... def unbind(self, sequence: str, funcid: str | None = ...) -> None: ... def unbind_all(self, sequence: str) -> None: ... def unbind_class(self, className: str, sequence: str) -> None: ... def mainloop(self, n: int = ...) -> None: ... - def quit(self): ... + def quit(self) -> None: ... def nametowidget(self, name: str | Misc | _tkinter.Tcl_Obj) -> Any: ... def register( self, func: Callable[..., Any], subst: Callable[..., Sequence[Any]] | None = ..., needcleanup: int = ... @@ -385,11 +487,11 @@ class Misc: @overload def grid_bbox( self, column: None = ..., row: None = ..., col2: None = ..., row2: None = ... - ) -> Tuple[int, int, int, int] | None: ... + ) -> tuple[int, int, int, int] | None: ... @overload - def grid_bbox(self, column: int, row: int, col2: None = ..., row2: None = ...) -> Tuple[int, int, int, int] | None: ... + def grid_bbox(self, column: int, row: int, col2: None = ..., row2: None = ...) -> tuple[int, int, int, int] | None: ... @overload - def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> Tuple[int, int, int, int] | None: ... + def grid_bbox(self, column: int, row: int, col2: int, row2: int) -> tuple[int, int, int, int] | None: ... bbox = grid_bbox def grid_columnconfigure( self, @@ -410,15 +512,15 @@ class Misc: pad: _ScreenUnits = ..., uniform: str = ..., weight: int = ..., - ) -> _GridIndexInfo | Any: ... # can be None but annoyying to check + ) -> _GridIndexInfo | Any: ... # can be None but annoying to check columnconfigure = grid_columnconfigure rowconfigure = grid_rowconfigure - def grid_location(self, x: _ScreenUnits, y: _ScreenUnits) -> Tuple[int, int]: ... + def grid_location(self, x: _ScreenUnits, y: _ScreenUnits) -> tuple[int, int]: ... @overload def grid_propagate(self, flag: bool) -> None: ... @overload def grid_propagate(self) -> bool: ... - def grid_size(self) -> Tuple[int, int]: ... + def grid_size(self) -> tuple[int, int]: ... size = grid_size # Widget because Toplevel or Tk is never a slave def pack_slaves(self) -> list[Widget]: ... @@ -459,9 +561,9 @@ class Misc: x: _ScreenUnits = ..., y: _ScreenUnits = ..., ) -> None: ... - def event_info(self, virtual: str | None = ...) -> Tuple[str, ...]: ... - def image_names(self) -> Tuple[str, ...]: ... - def image_types(self) -> Tuple[str, ...]: ... + def event_info(self, virtual: str | None = ...) -> tuple[str, ...]: ... + def image_names(self) -> tuple[str, ...]: ... + def image_types(self) -> tuple[str, ...]: ... # See #4363 and #4891 def __setitem__(self, key: str, value: Any) -> None: ... def __getitem__(self, key: str) -> Any: ... @@ -473,12 +575,12 @@ class CallWrapper: func: Any subst: Any widget: Any - def __init__(self, func, subst, widget): ... + def __init__(self, func, subst, widget) -> None: ... def __call__(self, *args): ... class XView: @overload - def xview(self) -> Tuple[float, float]: ... + def xview(self) -> tuple[float, float]: ... @overload def xview(self, *args: Any) -> Any: ... def xview_moveto(self, fraction: float) -> None: ... @@ -489,7 +591,7 @@ class XView: class YView: @overload - def yview(self) -> Tuple[float, float]: ... + def yview(self) -> tuple[float, float]: ... @overload def yview(self, *args: Any) -> Any: ... def yview_moveto(self, fraction: float) -> None: ... @@ -504,10 +606,10 @@ class Wm: @overload def wm_aspect( self, minNumer: None = ..., minDenom: None = ..., maxNumer: None = ..., maxDenom: None = ... - ) -> Tuple[int, int, int, int] | None: ... + ) -> tuple[int, int, int, int] | None: ... aspect = wm_aspect @overload - def wm_attributes(self) -> Tuple[Any, ...]: ... + def wm_attributes(self) -> tuple[Any, ...]: ... @overload def wm_attributes(self, __option: str) -> Any: ... @overload @@ -518,7 +620,7 @@ class Wm: @overload def wm_colormapwindows(self) -> list[Misc]: ... @overload - def wm_colormapwindows(self, __wlist: _TkinterSequence[Misc]) -> None: ... + def wm_colormapwindows(self, __wlist: list[Misc] | tuple[Misc, ...]) -> None: ... @overload def wm_colormapwindows(self, __first_wlist_item: Misc, *other_wlist_items: Misc) -> None: ... colormapwindows = wm_colormapwindows @@ -527,11 +629,11 @@ class Wm: # Some of these always return empty string, but return type is set to None to prevent accidentally using it def wm_deiconify(self) -> None: ... deiconify = wm_deiconify - def wm_focusmodel(self, model: Any | None = ...): ... + def wm_focusmodel(self, model: Literal["active", "passive"] | None = ...) -> Literal["active", "passive", ""]: ... focusmodel = wm_focusmodel def wm_forget(self, window: Wm) -> None: ... forget = wm_forget - def wm_frame(self): ... + def wm_frame(self) -> str: ... frame = wm_frame @overload def wm_geometry(self, newGeometry: None = ...) -> str: ... @@ -550,23 +652,23 @@ class Wm: iconify = wm_iconify def wm_iconmask(self, bitmap: Any | None = ...): ... iconmask = wm_iconmask - def wm_iconname(self, newName: Any | None = ...): ... + def wm_iconname(self, newName: Any | None = ...) -> str: ... iconname = wm_iconname def wm_iconphoto(self, default: bool, __image1: Image, *args: Image) -> None: ... iconphoto = wm_iconphoto - def wm_iconposition(self, x: Any | None = ..., y: Any | None = ...): ... + def wm_iconposition(self, x: int | None = ..., y: int | None = ...) -> tuple[int, int] | None: ... iconposition = wm_iconposition def wm_iconwindow(self, pathName: Any | None = ...): ... iconwindow = wm_iconwindow - def wm_manage(self, widget): ... + def wm_manage(self, widget) -> None: ... manage = wm_manage @overload - def wm_maxsize(self, width: None = ..., height: None = ...) -> Tuple[int, int]: ... + def wm_maxsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... @overload def wm_maxsize(self, width: int, height: int) -> None: ... maxsize = wm_maxsize @overload - def wm_minsize(self, width: None = ..., height: None = ...) -> Tuple[int, int]: ... + def wm_minsize(self, width: None = ..., height: None = ...) -> tuple[int, int]: ... @overload def wm_minsize(self, width: int, height: int) -> None: ... minsize = wm_minsize @@ -575,21 +677,21 @@ class Wm: @overload def wm_overrideredirect(self, boolean: bool) -> None: ... overrideredirect = wm_overrideredirect - def wm_positionfrom(self, who: Any | None = ...): ... + def wm_positionfrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... positionfrom = wm_positionfrom @overload def wm_protocol(self, name: str, func: Callable[[], Any] | str) -> None: ... @overload def wm_protocol(self, name: str, func: None = ...) -> str: ... @overload - def wm_protocol(self, name: None = ..., func: None = ...) -> Tuple[str, ...]: ... + def wm_protocol(self, name: None = ..., func: None = ...) -> tuple[str, ...]: ... protocol = wm_protocol @overload - def wm_resizable(self, width: None = ..., height: None = ...) -> Tuple[bool, bool]: ... + def wm_resizable(self, width: None = ..., height: None = ...) -> tuple[bool, bool]: ... @overload def wm_resizable(self, width: bool, height: bool) -> None: ... resizable = wm_resizable - def wm_sizefrom(self, who: Any | None = ...): ... + def wm_sizefrom(self, who: Literal["program", "user"] | None = ...) -> Literal["", "program", "user"]: ... sizefrom = wm_sizefrom @overload def wm_state(self, newstate: None = ...) -> str: ... @@ -610,12 +712,14 @@ class Wm: withdraw = wm_withdraw class _ExceptionReportingCallback(Protocol): - def __call__(self, __exc: Type[BaseException], __val: BaseException, __tb: TracebackType) -> Any: ... + def __call__(self, __exc: type[BaseException], __val: BaseException, __tb: TracebackType | None) -> Any: ... class Tk(Misc, Wm): master: None def __init__( - # please update ttkthemes stub if you change this + # Make sure to keep in sync with other functions that use the same + # args. + # use `git grep screenName` to find them self, screenName: str | None = ..., baseName: str | None = ..., @@ -645,49 +749,46 @@ class Tk(Misc, Wm): relief: _Relief = ..., takefocus: _TakeFocusValue = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def loadtk(self) -> None: ... # differs from _tkinter.TkappType.loadtk def destroy(self) -> None: ... def readprofile(self, baseName: str, className: str) -> None: ... report_callback_exception: _ExceptionReportingCallback # Tk has __getattr__ so that tk_instance.foo falls back to tk_instance.tk.foo - # Please keep in sync with _tkinter.TkappType - call: Callable[..., Any] - def eval(self, __code: str) -> str: ... - adderrorinfo: Any - createcommand: Any - createfilehandler: Any - createtimerhandler: Any - deletecommand: Any - deletefilehandler: Any - dooneevent: Any - evalfile: Any - exprboolean: Any - exprdouble: Any - exprlong: Any - exprstring: Any - getboolean: Any - getdouble: Any - getint: Any - getvar: Any - globalgetvar: Any - globalsetvar: Any - globalunsetvar: Any - interpaddr: Any - mainloop: Any - quit: Any - record: Any - setvar: Any - split: Any - splitlist: Any - unsetvar: Any - wantobjects: Any - willdispatch: Any - -def Tcl(screenName: Any | None = ..., baseName: Any | None = ..., className: str = ..., useTk: bool = ...): ... + # Please keep in sync with _tkinter.TkappType. + # Some methods are intentionally missing because they are inherited from Misc instead. + def adderrorinfo(self, __msg): ... + def call(self, __command: Any, *args: Any) -> Any: ... + def createcommand(self, __name, __func): ... + if sys.platform != "win32": + def createfilehandler(self, __file, __mask, __func): ... + def deletefilehandler(self, __file): ... + + def createtimerhandler(self, __milliseconds, __func): ... + def dooneevent(self, __flags: int = ...): ... + def eval(self, __script: str) -> str: ... + def evalfile(self, __fileName): ... + def exprboolean(self, __s): ... + def exprdouble(self, __s): ... + def exprlong(self, __s): ... + def exprstring(self, __s): ... + def globalgetvar(self, *args, **kwargs): ... + def globalsetvar(self, *args, **kwargs): ... + def globalunsetvar(self, *args, **kwargs): ... + def interpaddr(self): ... + def loadtk(self) -> None: ... + def record(self, __script): ... + if sys.version_info < (3, 11): + def split(self, __arg): ... + + def splitlist(self, __arg): ... + def unsetvar(self, *args, **kwargs): ... + def wantobjects(self, *args, **kwargs): ... + def willdispatch(self): ... + +def Tcl(screenName: str | None = ..., baseName: str | None = ..., className: str = ..., useTk: bool = ...) -> Tk: ... _InMiscTotal = TypedDict("_InMiscTotal", {"in": Misc}) _InMiscNonTotal = TypedDict("_InMiscNonTotal", {"in": Misc}, total=False) @@ -702,8 +803,8 @@ class _PackInfo(_InMiscTotal): # can be specified in pack(). ipadx: int ipady: int - padx: int | Tuple[int, int] - pady: int | Tuple[int, int] + padx: int | tuple[int, int] + pady: int | tuple[int, int] class Pack: # _PackInfo is not the valid type for cnf because pad stuff accepts any @@ -722,8 +823,8 @@ class Pack: side: Literal["left", "right", "top", "bottom"] = ..., ipadx: _ScreenUnits = ..., ipady: _ScreenUnits = ..., - padx: _ScreenUnits | Tuple[_ScreenUnits, _ScreenUnits] = ..., - pady: _ScreenUnits | Tuple[_ScreenUnits, _ScreenUnits] = ..., + padx: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + pady: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 ) -> None: ... @@ -732,14 +833,6 @@ class Pack: pack = pack_configure forget = pack_forget propagate = Misc.pack_propagate - # commented out to avoid mypy getting confused with multiple - # inheritance and how things get overrided with different things - # info = pack_info - # pack_propagate = Misc.pack_propagate - # configure = pack_configure - # config = pack_configure - # slaves = Misc.pack_slaves - # pack_slaves = Misc.pack_slaves class _PlaceInfo(_InMiscNonTotal): # empty dict if widget hasn't been placed anchor: _Anchor @@ -776,13 +869,6 @@ class Place: def place_info(self) -> _PlaceInfo: ... place = place_configure info = place_info - # commented out to avoid mypy getting confused with multiple - # inheritance and how things get overrided with different things - # config = place_configure - # configure = place_configure - # forget = place_forget - # slaves = Misc.place_slaves - # place_slaves = Misc.place_slaves class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded column: int @@ -791,8 +877,8 @@ class _GridInfo(_InMiscNonTotal): # empty dict if widget hasn't been gridded rowspan: int ipadx: int ipady: int - padx: int | Tuple[int, int] - pady: int | Tuple[int, int] + padx: int | tuple[int, int] + pady: int | tuple[int, int] sticky: str # consists of letters 'n', 's', 'w', 'e', no repeats, may be empty class Grid: @@ -806,8 +892,8 @@ class Grid: rowspan: int = ..., ipadx: _ScreenUnits = ..., ipady: _ScreenUnits = ..., - padx: _ScreenUnits | Tuple[_ScreenUnits, _ScreenUnits] = ..., - pady: _ScreenUnits | Tuple[_ScreenUnits, _ScreenUnits] = ..., + padx: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., + pady: _ScreenUnits | tuple[_ScreenUnits, _ScreenUnits] = ..., sticky: str = ..., # consists of letters 'n', 's', 'w', 'e', may contain repeats, may be empty in_: Misc = ..., **kw: Any, # allow keyword argument named 'in', see #4836 @@ -818,29 +904,11 @@ class Grid: grid = grid_configure location = Misc.grid_location size = Misc.grid_size - # commented out to avoid mypy getting confused with multiple - # inheritance and how things get overrided with different things - # bbox = Misc.grid_bbox - # grid_bbox = Misc.grid_bbox - # forget = grid_forget - # info = grid_info - # grid_location = Misc.grid_location - # grid_propagate = Misc.grid_propagate - # grid_size = Misc.grid_size - # rowconfigure = Misc.grid_rowconfigure - # grid_rowconfigure = Misc.grid_rowconfigure - # grid_columnconfigure = Misc.grid_columnconfigure - # columnconfigure = Misc.grid_columnconfigure - # config = grid_configure - # configure = grid_configure - # propagate = Misc.grid_propagate - # slaves = Misc.grid_slaves - # grid_slaves = Misc.grid_slaves class BaseWidget(Misc): master: Misc widgetName: Any - def __init__(self, master, widgetName, cnf=..., kw=..., extra=...): ... + def __init__(self, master, widgetName, cnf=..., kw=..., extra=...) -> None: ... def destroy(self) -> None: ... # This class represents any widget except Toplevel or Tk. @@ -850,12 +918,15 @@ class Widget(BaseWidget, Pack, Place, Grid): # widgets don't. @overload def bind( - self: _W, sequence: str | None = ..., func: Callable[[Event[_W]], Any] | None = ..., add: bool | None = ... + self: _W, + sequence: str | None = ..., + func: Callable[[Event[_W]], Any] | None = ..., + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def bind(self, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def bind(self, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... @overload - def bind(self, *, func: str, add: bool | None = ...) -> None: ... + def bind(self, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... class Toplevel(BaseWidget, Wm): # Toplevel and Tk have the same options because they correspond to the same @@ -887,7 +958,7 @@ class Toplevel(BaseWidget, Wm): screen: str = ..., # can't be changed after creating widget takefocus: _TakeFocusValue = ..., use: int = ..., - visual: str | Tuple[str, int] = ..., + visual: str | tuple[str, int] = ..., width: _ScreenUnits = ..., ) -> None: ... @overload @@ -911,9 +982,9 @@ class Toplevel(BaseWidget, Wm): relief: _Relief = ..., takefocus: _TakeFocusValue = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Button(Widget): @@ -957,7 +1028,7 @@ class Button(Widget): state: Literal["normal", "active", "disabled"] = ..., takefocus: _TakeFocusValue = ..., text: float | str = ..., - # We allow the textvariable to be any Variable, not necessarly + # We allow the textvariable to be any Variable, not necessarily # StringVar. This is useful for e.g. a button that displays the value # of an IntVar. textvariable: Variable = ..., @@ -1006,11 +1077,11 @@ class Button(Widget): underline: int = ..., width: _ScreenUnits = ..., wraplength: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def flash(self): ... + def flash(self) -> None: ... def invoke(self) -> Any: ... class Canvas(Widget, XView, YView): @@ -1043,7 +1114,7 @@ class Canvas(Widget, XView, YView): relief: _Relief = ..., # Setting scrollregion to None doesn't reset it back to empty, # but setting it to () does. - scrollregion: Tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | Tuple[()] = ..., + scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., selectbackground: _Color = ..., selectborderwidth: _ScreenUnits = ..., selectforeground: _Color = ..., @@ -1080,7 +1151,7 @@ class Canvas(Widget, XView, YView): insertwidth: _ScreenUnits = ..., offset: Any = ..., # undocumented relief: _Relief = ..., - scrollregion: Tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | Tuple[()] = ..., + scrollregion: tuple[_ScreenUnits, _ScreenUnits, _ScreenUnits, _ScreenUnits] | tuple[()] = ..., selectbackground: _Color = ..., selectborderwidth: _ScreenUnits = ..., selectforeground: _Color = ..., @@ -1091,9 +1162,9 @@ class Canvas(Widget, XView, YView): xscrollincrement: _ScreenUnits = ..., yscrollcommand: _XYScrollCommand = ..., yscrollincrement: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def addtag(self, *args): ... # internal method def addtag_above(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... @@ -1111,42 +1182,42 @@ class Canvas(Widget, XView, YView): def addtag_overlapping(self, newtag: str, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits) -> None: ... def addtag_withtag(self, newtag: str, tagOrId: str | _CanvasItemId) -> None: ... def find(self, *args): ... # internal method - def find_above(self, tagOrId: str | _CanvasItemId) -> Tuple[_CanvasItemId, ...]: ... - def find_all(self) -> Tuple[_CanvasItemId, ...]: ... - def find_below(self, tagOrId: str | _CanvasItemId) -> Tuple[_CanvasItemId, ...]: ... + def find_above(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + def find_all(self) -> tuple[_CanvasItemId, ...]: ... + def find_below(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... def find_closest( self, x: _ScreenUnits, y: _ScreenUnits, halo: _ScreenUnits | None = ..., start: str | _CanvasItemId | None = ... - ) -> Tuple[_CanvasItemId, ...]: ... + ) -> tuple[_CanvasItemId, ...]: ... def find_enclosed( self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: _ScreenUnits - ) -> Tuple[_CanvasItemId, ...]: ... - def find_overlapping(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: float) -> Tuple[_CanvasItemId, ...]: ... - def find_withtag(self, tagOrId: str | _CanvasItemId) -> Tuple[_CanvasItemId, ...]: ... - # Canvas.bbox() args are `str | _CanvasItemId`, but mypy rejects that - # description because it's incompatible with Misc.bbox(), an alias for - # Misc.grid_bbox(). Yes it is, but there's not much we can do about it. - def bbox(self, *args: str | _CanvasItemId) -> Tuple[int, int, int, int]: ... # type: ignore + ) -> tuple[_CanvasItemId, ...]: ... + def find_overlapping(self, x1: _ScreenUnits, y1: _ScreenUnits, x2: _ScreenUnits, y2: float) -> tuple[_CanvasItemId, ...]: ... + def find_withtag(self, tagOrId: str | _CanvasItemId) -> tuple[_CanvasItemId, ...]: ... + # Incompatible with Misc.bbox(), tkinter violates LSP + def bbox(self, *args: str | _CanvasItemId) -> tuple[int, int, int, int]: ... # type: ignore[override] @overload def tag_bind( self, - tagOrId: str | int, + tagOrId: str | _CanvasItemId, sequence: str | None = ..., func: Callable[[Event[Canvas]], Any] | None = ..., - add: bool | None = ..., + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def tag_bind(self, tagOrId: str | int, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def tag_bind( + self, tagOrId: str | int, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ... + ) -> None: ... @overload - def tag_bind(self, tagOrId: str | int, *, func: str, add: bool | None = ...) -> None: ... - def tag_unbind(self, tagOrId: str | int, sequence: str, funcid: str | None = ...) -> None: ... + def tag_bind(self, tagOrId: str | _CanvasItemId, *, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... + def tag_unbind(self, tagOrId: str | _CanvasItemId, sequence: str, funcid: str | None = ...) -> None: ... def canvasx(self, screenx, gridspacing: Any | None = ...): ... def canvasy(self, screeny, gridspacing: Any | None = ...): ... @overload - def coords(self) -> list[float]: ... + def coords(self, __tagOrId: str | _CanvasItemId) -> list[float]: ... @overload - def coords(self, __args: _TkinterSequence[int] | _TkinterSequence[float]) -> None: ... + def coords(self, __tagOrId: str | _CanvasItemId, __args: list[int] | list[float] | tuple[float, ...]) -> None: ... @overload - def coords(self, __x1: float, __y1: float, *args: float) -> None: ... + def coords(self, __tagOrId: str | _CanvasItemId, __x1: float, __y1: float, *args: float) -> None: ... # create_foo() methods accept coords as a list, a tuple, or as separate arguments. # Keyword arguments should be the same in each pair of overloads. def create_arc(self, *args, **kw) -> _CanvasItemId: ... @@ -1160,16 +1231,16 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., arrow: Literal["first", "last", "both"] = ..., - arrowshape: Tuple[float, float, float] = ..., + arrowshape: tuple[float, float, float] = ..., capstyle: Literal["round", "projecting", "butt"] = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledstipple: _Bitmap = ..., disabledwidth: _ScreenUnits = ..., @@ -1180,7 +1251,7 @@ class Canvas(Widget, XView, YView): splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1188,16 +1259,16 @@ class Canvas(Widget, XView, YView): self, __coords: tuple[float, float, float, float] | list[int] | list[float], *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., arrow: Literal["first", "last", "both"] = ..., - arrowshape: Tuple[float, float, float] = ..., + arrowshape: tuple[float, float, float] = ..., capstyle: Literal["round", "projecting", "butt"] = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledstipple: _Bitmap = ..., disabledwidth: _ScreenUnits = ..., @@ -1208,7 +1279,7 @@ class Canvas(Widget, XView, YView): splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1219,15 +1290,15 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1240,7 +1311,7 @@ class Canvas(Widget, XView, YView): outlinestipple: _Bitmap = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1248,15 +1319,15 @@ class Canvas(Widget, XView, YView): self, __coords: tuple[float, float, float, float] | list[int] | list[float], *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1269,7 +1340,7 @@ class Canvas(Widget, XView, YView): outlinestipple: _Bitmap = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1280,15 +1351,15 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *xy_pairs: float, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1304,23 +1375,23 @@ class Canvas(Widget, XView, YView): splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload def create_polygon( self, - __coords: Tuple[float, ...] | list[int] | list[float], + __coords: tuple[float, ...] | list[int] | list[float], *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1336,7 +1407,7 @@ class Canvas(Widget, XView, YView): splinesteps: float = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1347,15 +1418,15 @@ class Canvas(Widget, XView, YView): __x1: float, __y1: float, *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1368,7 +1439,7 @@ class Canvas(Widget, XView, YView): outlinestipple: _Bitmap = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1376,15 +1447,15 @@ class Canvas(Widget, XView, YView): self, __coords: tuple[float, float, float, float] | list[int] | list[float], *, - activedash: str | _TkinterSequence[int] = ..., + activedash: str | list[int] | tuple[int, ...] = ..., activefill: _Color = ..., activeoutline: _Color = ..., activeoutlinestipple: _Color = ..., activestipple: str = ..., activewidth: _ScreenUnits = ..., - dash: str | _TkinterSequence[int] = ..., + dash: str | list[int] | tuple[int, ...] = ..., dashoffset: _ScreenUnits = ..., - disableddash: str | _TkinterSequence[int] = ..., + disableddash: str | list[int] | tuple[int, ...] = ..., disabledfill: _Color = ..., disabledoutline: _Color = ..., disabledoutlinestipple: _Color = ..., @@ -1397,7 +1468,7 @@ class Canvas(Widget, XView, YView): outlinestipple: _Bitmap = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @overload @@ -1417,7 +1488,7 @@ class Canvas(Widget, XView, YView): offset: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @@ -1437,7 +1508,7 @@ class Canvas(Widget, XView, YView): offset: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., stipple: _Bitmap = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., text: float | str = ..., width: _ScreenUnits = ..., ) -> _CanvasItemId: ... @@ -1450,7 +1521,7 @@ class Canvas(Widget, XView, YView): anchor: _Anchor = ..., height: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., window: Widget = ..., ) -> _CanvasItemId: ... @@ -1462,30 +1533,31 @@ class Canvas(Widget, XView, YView): anchor: _Anchor = ..., height: _ScreenUnits = ..., state: Literal["normal", "active", "disabled"] = ..., - tags: str | _TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., width: _ScreenUnits = ..., window: Widget = ..., ) -> _CanvasItemId: ... - def dchars(self, *args): ... + def dchars(self, *args) -> None: ... def delete(self, *tagsOrCanvasIds: str | _CanvasItemId) -> None: ... @overload def dtag(self, __tag: str, __tag_to_delete: str | None = ...) -> None: ... @overload def dtag(self, __id: _CanvasItemId, __tag_to_delete: str) -> None: ... def focus(self, *args): ... - def gettags(self, __tagOrId: str | _CanvasItemId) -> Tuple[str, ...]: ... - def icursor(self, *args): ... + def gettags(self, __tagOrId: str | _CanvasItemId) -> tuple[str, ...]: ... + def icursor(self, *args) -> None: ... def index(self, *args): ... - def insert(self, *args): ... + def insert(self, *args) -> None: ... def itemcget(self, tagOrId, option): ... # itemconfigure kwargs depend on item type, which is not known when type checking def itemconfigure( self, tagOrId: str | _CanvasItemId, cnf: dict[str, Any] | None = ..., **kw: Any ) -> dict[str, tuple[str, str, str, str, str]] | None: ... itemconfig = itemconfigure - def move(self, *args): ... + def move(self, *args) -> None: ... if sys.version_info >= (3, 8): def moveto(self, tagOrId: str | _CanvasItemId, x: Literal[""] | float = ..., y: Literal[""] | float = ...) -> None: ... + def postscript(self, cnf=..., **kw): ... # tkinter does: # lower = tag_lower @@ -1493,18 +1565,18 @@ class Canvas(Widget, XView, YView): # # But mypy doesn't like aliasing here (maybe because Misc defines the same names) def tag_lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... - def lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore + def lower(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] def tag_raise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... - def tkraise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore - def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore - def scale(self, *args): ... - def scan_mark(self, x, y): ... - def scan_dragto(self, x, y, gain: int = ...): ... - def select_adjust(self, tagOrId, index): ... - def select_clear(self): ... - def select_from(self, tagOrId, index): ... + def tkraise(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def lift(self, __first: str | _CanvasItemId, __second: str | _CanvasItemId | None = ...) -> None: ... # type: ignore[override] + def scale(self, *args) -> None: ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y, gain: int = ...) -> None: ... + def select_adjust(self, tagOrId, index) -> None: ... + def select_clear(self) -> None: ... + def select_from(self, tagOrId, index) -> None: ... def select_item(self): ... - def select_to(self, tagOrId, index): ... + def select_to(self, tagOrId, index) -> None: ... def type(self, tagOrId): ... class Checkbutton(Widget): @@ -1614,15 +1686,15 @@ class Checkbutton(Widget): variable: Variable | Literal[""] = ..., width: _ScreenUnits = ..., wraplength: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self): ... - def flash(self): ... + def deselect(self) -> None: ... + def flash(self) -> None: ... def invoke(self) -> Any: ... - def select(self): ... - def toggle(self): ... + def select(self) -> None: ... + def toggle(self) -> None: ... _EntryIndex = Union[str, int] # "INDICES" in manual page @@ -1713,19 +1785,19 @@ class Entry(Widget, XView): vcmd: _EntryValidateCommand = ..., width: int = ..., xscrollcommand: _XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def delete(self, first: _EntryIndex, last: _EntryIndex | None = ...) -> None: ... def get(self) -> str: ... def icursor(self, index: _EntryIndex) -> None: ... def index(self, index: _EntryIndex) -> int: ... def insert(self, index: _EntryIndex, string: str) -> None: ... - def scan_mark(self, x): ... - def scan_dragto(self, x): ... + def scan_mark(self, x) -> None: ... + def scan_dragto(self, x) -> None: ... def selection_adjust(self, index: _EntryIndex) -> None: ... - def selection_clear(self) -> None: ... # type: ignore + def selection_clear(self) -> None: ... # type: ignore[override] def selection_from(self, index: _EntryIndex) -> None: ... def selection_present(self) -> bool: ... def selection_range(self, start: _EntryIndex, end: _EntryIndex) -> None: ... @@ -1748,9 +1820,9 @@ class Frame(Widget): bg: _Color = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., - class_: str = ..., - colormap: Literal["new", ""] | Misc = ..., - container: bool = ..., + class_: str = ..., # can't be changed with configure() + colormap: Literal["new", ""] | Misc = ..., # can't be changed with configure() + container: bool = ..., # can't be changed with configure() cursor: _Cursor = ..., height: _ScreenUnits = ..., highlightbackground: _Color = ..., @@ -1761,7 +1833,7 @@ class Frame(Widget): pady: _ScreenUnits = ..., relief: _Relief = ..., takefocus: _TakeFocusValue = ..., - visual: str | Tuple[str, int] = ..., + visual: str | tuple[str, int] = ..., # can't be changed with configure() width: _ScreenUnits = ..., ) -> None: ... @overload @@ -1784,9 +1856,9 @@ class Frame(Widget): relief: _Relief = ..., takefocus: _TakeFocusValue = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): @@ -1864,9 +1936,9 @@ class Label(Widget): underline: int = ..., width: _ScreenUnits = ..., wraplength: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Listbox(Widget, XView, YView): @@ -1954,30 +2026,30 @@ class Listbox(Widget, XView, YView): width: int = ..., xscrollcommand: _XYScrollCommand = ..., yscrollcommand: _XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def activate(self, index): ... - def bbox(self, index): ... + def activate(self, index) -> None: ... + def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] def curselection(self): ... - def delete(self, first, last: Any | None = ...): ... + def delete(self, first, last: Any | None = ...) -> None: ... def get(self, first, last: Any | None = ...): ... def index(self, index): ... - def insert(self, index, *elements): ... + def insert(self, index, *elements) -> None: ... def nearest(self, y): ... - def scan_mark(self, x, y): ... - def scan_dragto(self, x, y): ... - def see(self, index): ... - def selection_anchor(self, index): ... + def scan_mark(self, x, y) -> None: ... + def scan_dragto(self, x, y) -> None: ... + def see(self, index) -> None: ... + def selection_anchor(self, index) -> None: ... select_anchor: Any - def selection_clear(self, first, last: Any | None = ...): ... # type: ignore + def selection_clear(self, first, last: Any | None = ...) -> None: ... # type: ignore[override] select_clear: Any def selection_includes(self, index): ... select_includes: Any - def selection_set(self, first, last: Any | None = ...): ... + def selection_set(self, first, last: Any | None = ...) -> None: ... select_set: Any - def size(self): ... + def size(self) -> int: ... # type: ignore[override] def itemcget(self, index, option): ... def itemconfigure(self, index, cnf: Any | None = ..., **kw): ... itemconfig: Any @@ -2042,9 +2114,9 @@ class Menu(Widget): tearoffcommand: Callable[[str, str], Any] | str = ..., title: str = ..., type: Literal["menubar", "tearoff", "normal"] = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def tk_popup(self, x: int, y: int, entry: _MenuIndex = ...) -> None: ... def activate(self, index: _MenuIndex) -> None: ... @@ -2244,7 +2316,7 @@ class Menu(Widget): def entrycget(self, index: _MenuIndex, option: str) -> Any: ... def entryconfigure( self, index: _MenuIndex, cnf: dict[str, Any] | None = ..., **kw: Any - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... entryconfig = entryconfigure def index(self, index: _MenuIndex) -> int | None: ... def invoke(self, index: _MenuIndex) -> Any: ... @@ -2335,9 +2407,9 @@ class Menubutton(Widget): underline: int = ..., width: _ScreenUnits = ..., wraplength: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Message(Widget): @@ -2398,9 +2470,9 @@ class Message(Widget): text: float | str = ..., textvariable: Variable = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Radiobutton(Widget): @@ -2498,14 +2570,14 @@ class Radiobutton(Widget): variable: Variable | Literal[""] = ..., width: _ScreenUnits = ..., wraplength: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def deselect(self): ... - def flash(self): ... + def deselect(self) -> None: ... + def flash(self) -> None: ... def invoke(self) -> Any: ... - def select(self): ... + def select(self) -> None: ... class Scale(Widget): def __init__( @@ -2589,14 +2661,14 @@ class Scale(Widget): troughcolor: _Color = ..., variable: IntVar | DoubleVar = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def get(self): ... - def set(self, value): ... - def coords(self, value: Any | None = ...): ... - def identify(self, x, y): ... + def get(self) -> float: ... + def set(self, value) -> None: ... + def coords(self, value: float | None = ...) -> tuple[int, int]: ... + def identify(self, x, y) -> Literal["", "slider", "trough1", "trough2"]: ... class Scrollbar(Widget): def __init__( @@ -2615,7 +2687,7 @@ class Scrollbar(Widget): # 'SCROLLING COMMANDS' in scrollbar man page. There doesn't seem to # be any way to specify an overloaded callback function, so we say # that it can take any args while it can't in reality. - command: Callable[..., Tuple[float, float] | None] | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., cursor: _Cursor = ..., elementborderwidth: _ScreenUnits = ..., highlightbackground: _Color = ..., @@ -2643,7 +2715,7 @@ class Scrollbar(Widget): bg: _Color = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., - command: Callable[..., Tuple[float, float] | None] | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., cursor: _Cursor = ..., elementborderwidth: _ScreenUnits = ..., highlightbackground: _Color = ..., @@ -2657,16 +2729,16 @@ class Scrollbar(Widget): takefocus: _TakeFocusValue = ..., troughcolor: _Color = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def activate(self, index: Any | None = ...): ... - def delta(self, deltax, deltay): ... - def fraction(self, x, y): ... - def identify(self, x, y): ... - def get(self): ... - def set(self, first, last): ... + def delta(self, deltax: int, deltay: int) -> float: ... + def fraction(self, x: int, y: int) -> float: ... + def identify(self, x: int, y: int) -> Literal["arrow1", "arrow2", "slider", "trough1", "trough2", ""]: ... + def get(self) -> tuple[float, float, float, float] | tuple[float, float]: ... + def set(self, first: float, last: float) -> None: ... _TextIndex = Union[_tkinter.Tcl_Obj, str, float, Misc] @@ -2718,7 +2790,7 @@ class Text(Widget, XView, YView): startline: int | Literal[""] = ..., state: Literal["normal", "disabled"] = ..., # Literal inside Tuple doesn't actually work - tabs: _ScreenUnits | str | Tuple[_ScreenUnits | str, ...] = ..., + tabs: _ScreenUnits | str | tuple[_ScreenUnits | str, ...] = ..., tabstyle: Literal["tabular", "wordprocessor"] = ..., takefocus: _TakeFocusValue = ..., undo: bool = ..., @@ -2769,7 +2841,7 @@ class Text(Widget, XView, YView): spacing3: _ScreenUnits = ..., startline: int | Literal[""] = ..., state: Literal["normal", "disabled"] = ..., - tabs: _ScreenUnits | str | Tuple[_ScreenUnits | str, ...] = ..., + tabs: _ScreenUnits | str | tuple[_ScreenUnits | str, ...] = ..., tabstyle: Literal["tabular", "wordprocessor"] = ..., takefocus: _TakeFocusValue = ..., undo: bool = ..., @@ -2777,11 +2849,11 @@ class Text(Widget, XView, YView): wrap: Literal["none", "char", "word"] = ..., xscrollcommand: _XYScrollCommand = ..., yscrollcommand: _XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index: _TextIndex) -> Tuple[int, int, int, int] | None: ... # type: ignore + def bbox(self, index: _TextIndex) -> tuple[int, int, int, int] | None: ... # type: ignore[override] def compare(self, index1: _TextIndex, op: Literal["<", "<=", "==", ">=", ">", "!="], index2: _TextIndex) -> bool: ... def count(self, index1, index2, *args): ... # TODO @overload @@ -2789,7 +2861,7 @@ class Text(Widget, XView, YView): @overload def debug(self, boolean: bool) -> None: ... def delete(self, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... - def dlineinfo(self, index: _TextIndex) -> Tuple[int, int, int, int, int] | None: ... + def dlineinfo(self, index: _TextIndex) -> tuple[int, int, int, int, int] | None: ... @overload def dump( self, @@ -2803,7 +2875,7 @@ class Text(Widget, XView, YView): tag: bool = ..., text: bool = ..., window: bool = ..., - ) -> list[Tuple[str, str, str]]: ... + ) -> list[tuple[str, str, str]]: ... @overload def dump( self, @@ -2848,20 +2920,20 @@ class Text(Widget, XView, YView): def image_create(self, index, cnf=..., **kw): ... def image_names(self): ... def index(self, index: _TextIndex) -> str: ... - def insert(self, index: _TextIndex, chars: str, *args: str | _TkinterSequence[str]) -> None: ... + def insert(self, index: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... @overload def mark_gravity(self, markName: str, direction: None = ...) -> Literal["left", "right"]: ... @overload def mark_gravity(self, markName: str, direction: Literal["left", "right"]) -> None: ... # actually returns empty string - def mark_names(self) -> Tuple[str, ...]: ... + def mark_names(self) -> tuple[str, ...]: ... def mark_set(self, markName: str, index: _TextIndex) -> None: ... def mark_unset(self, *markNames: str) -> None: ... def mark_next(self, index: _TextIndex) -> str | None: ... def mark_previous(self, index: _TextIndex) -> str | None: ... # **kw of peer_create is same as the kwargs of Text.__init__ def peer_create(self, newPathName: str | Text, cnf: dict[str, Any] = ..., **kw: Any) -> None: ... - def peer_names(self) -> Tuple[_tkinter.Tcl_Obj, ...]: ... - def replace(self, index1: _TextIndex, index2: _TextIndex, chars: str, *args: str | _TkinterSequence[str]) -> None: ... + def peer_names(self) -> tuple[_tkinter.Tcl_Obj, ...]: ... + def replace(self, index1: _TextIndex, index2: _TextIndex, chars: str, *args: str | list[str] | tuple[str, ...]) -> None: ... def scan_mark(self, x: int, y: int) -> None: ... def scan_dragto(self, x: int, y: int) -> None: ... def search( @@ -2882,10 +2954,14 @@ class Text(Widget, XView, YView): # tag_bind stuff is very similar to Canvas @overload def tag_bind( - self, tagName: str, sequence: str | None, func: Callable[[Event[Text]], Any] | None, add: bool | None = ... + self, + tagName: str, + sequence: str | None, + func: Callable[[Event[Text]], Any] | None, + add: Literal["", "+"] | bool | None = ..., ) -> str: ... @overload - def tag_bind(self, tagName: str, sequence: str | None, func: str, add: bool | None = ...) -> None: ... + def tag_bind(self, tagName: str, sequence: str | None, func: str, add: Literal["", "+"] | bool | None = ...) -> None: ... def tag_unbind(self, tagName: str, sequence: str, funcid: str | None = ...) -> None: ... # allowing any string for cget instead of just Literals because there's no other way to look up tag options def tag_cget(self, tagName: str, option: str) -> Any: ... @@ -2923,30 +2999,30 @@ class Text(Widget, XView, YView): underline: bool = ..., underlinefg: _Color = ..., wrap: Literal["none", "char", "word"] = ..., # be careful with "none" vs None - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def tag_configure(self, tagName: str, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def tag_configure(self, tagName: str, cnf: str) -> tuple[str, str, str, Any, Any]: ... tag_config = tag_configure def tag_delete(self, __first_tag_name: str, *tagNames: str) -> None: ... # error if no tag names given def tag_lower(self, tagName: str, belowThis: str | None = ...) -> None: ... - def tag_names(self, index: _TextIndex | None = ...) -> Tuple[str, ...]: ... - def tag_nextrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> Tuple[str, str] | Tuple[()]: ... - def tag_prevrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> Tuple[str, str] | Tuple[()]: ... + def tag_names(self, index: _TextIndex | None = ...) -> tuple[str, ...]: ... + def tag_nextrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... + def tag_prevrange(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> tuple[str, str] | tuple[()]: ... def tag_raise(self, tagName: str, aboveThis: str | None = ...) -> None: ... - def tag_ranges(self, tagName: str) -> Tuple[_tkinter.Tcl_Obj, ...]: ... + def tag_ranges(self, tagName: str) -> tuple[_tkinter.Tcl_Obj, ...]: ... # tag_remove and tag_delete are different def tag_remove(self, tagName: str, index1: _TextIndex, index2: _TextIndex | None = ...) -> None: ... # TODO: window_* methods def window_cget(self, index, option): ... def window_configure(self, index, cnf: Any | None = ..., **kw): ... window_config = window_configure - def window_create(self, index, cnf=..., **kw): ... + def window_create(self, index, cnf=..., **kw) -> None: ... def window_names(self): ... def yview_pickplace(self, *what): ... # deprecated class _setit: - def __init__(self, var, value, callback: Any | None = ...): ... - def __call__(self, *args): ... + def __init__(self, var, value, callback: Any | None = ...) -> None: ... + def __call__(self, *args) -> None: ... # manual page: tk_optionMenu class OptionMenu(Menubutton): @@ -2963,7 +3039,7 @@ class OptionMenu(Menubutton): command: Callable[[StringVar], Any] | None = ..., ) -> None: ... # configure, config, cget are inherited from Menubutton - # destroy and __getitem__ are overrided, signature does not change + # destroy and __getitem__ are overridden, signature does not change class _Image(Protocol): tk: _tkinter.TkappType @@ -2973,9 +3049,11 @@ class _Image(Protocol): class Image: name: Any tk: _tkinter.TkappType - def __init__(self, imgtype, name: Any | None = ..., cnf=..., master: Misc | _tkinter.TkappType | None = ..., **kw): ... - def __del__(self): ... - def __setitem__(self, key, value): ... + def __init__( + self, imgtype, name: Any | None = ..., cnf=..., master: Misc | _tkinter.TkappType | None = ..., **kw + ) -> None: ... + def __del__(self) -> None: ... + def __setitem__(self, key, value) -> None: ... def __getitem__(self, key): ... configure: Any config: Any @@ -3016,9 +3094,21 @@ class PhotoImage(Image): def copy(self) -> PhotoImage: ... def zoom(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... def subsample(self, x: int, y: int | Literal[""] = ...) -> PhotoImage: ... - def get(self, x: int, y: int) -> Tuple[int, int, int]: ... - def put(self, data: str | _TkinterSequence[str] | _TkinterSequence2D[_Color], to: Tuple[int, int] | None = ...) -> None: ... - def write(self, filename: StrOrBytesPath, format: str | None = ..., from_coords: Tuple[int, int] | None = ...) -> None: ... + def get(self, x: int, y: int) -> tuple[int, int, int]: ... + def put( + self, + data: ( + str + | list[str] + | list[list[_Color]] + | list[tuple[_Color, ...]] + | tuple[str, ...] + | tuple[list[_Color], ...] + | tuple[tuple[_Color, ...], ...] + ), + to: tuple[int, int] | None = ..., + ) -> None: ... + def write(self, filename: StrOrBytesPath, format: str | None = ..., from_coords: tuple[int, int] | None = ...) -> None: ... if sys.version_info >= (3, 8): def transparency_get(self, x: int, y: int) -> bool: ... def transparency_set(self, x: int, y: int, boolean: bool) -> None: ... @@ -3038,8 +3128,8 @@ class BitmapImage(Image): maskfile: StrOrBytesPath = ..., ) -> None: ... -def image_names() -> Tuple[str, ...]: ... -def image_types() -> Tuple[str, ...]: ... +def image_names() -> tuple[str, ...]: ... +def image_types() -> tuple[str, ...]: ... class Spinbox(Widget, XView): def __init__( @@ -3057,8 +3147,8 @@ class Spinbox(Widget, XView): buttoncursor: _Cursor = ..., buttondownrelief: _Relief = ..., buttonuprelief: _Relief = ..., - # percent substitutions don't seem to be supported, it's similar to Entry's validion stuff - command: Callable[[], Any] | str | _TkinterSequence[str] = ..., + # percent substitutions don't seem to be supported, it's similar to Entry's validation stuff + command: Callable[[], Any] | str | list[str] | tuple[str, ...] = ..., cursor: _Cursor = ..., disabledbackground: _Color = ..., disabledforeground: _Color = ..., @@ -3095,7 +3185,7 @@ class Spinbox(Widget, XView): validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: _EntryValidateCommand = ..., vcmd: _EntryValidateCommand = ..., - values: _TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., wrap: bool = ..., xscrollcommand: _XYScrollCommand = ..., @@ -3115,7 +3205,7 @@ class Spinbox(Widget, XView): buttoncursor: _Cursor = ..., buttondownrelief: _Relief = ..., buttonuprelief: _Relief = ..., - command: Callable[[], Any] | str | _TkinterSequence[str] = ..., + command: Callable[[], Any] | str | list[str] | tuple[str, ...] = ..., cursor: _Cursor = ..., disabledbackground: _Color = ..., disabledforeground: _Color = ..., @@ -3151,27 +3241,27 @@ class Spinbox(Widget, XView): validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: _EntryValidateCommand = ..., vcmd: _EntryValidateCommand = ..., - values: _TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., wrap: bool = ..., xscrollcommand: _XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, index): ... - def delete(self, first, last: Any | None = ...): ... - def get(self): ... + def bbox(self, index) -> tuple[int, int, int, int] | None: ... # type: ignore[override] + def delete(self, first, last: Any | None = ...) -> Literal[""]: ... + def get(self) -> str: ... def icursor(self, index): ... - def identify(self, x, y): ... - def index(self, index): ... - def insert(self, index, s): ... + def identify(self, x: int, y: int) -> Literal["", "buttondown", "buttonup", "entry"]: ... + def index(self, index: _EntryIndex) -> int: ... + def insert(self, index: _EntryIndex, s: str) -> Literal[""]: ... # spinbox.invoke("asdf") gives error mentioning .invoke("none"), but it's not documented def invoke(self, element: Literal["none", "buttonup", "buttondown"]) -> Literal[""]: ... def scan(self, *args): ... def scan_mark(self, x): ... def scan_dragto(self, x): ... - def selection(self, *args: Any) -> Tuple[int, ...]: ... + def selection(self, *args: Any) -> tuple[int, ...]: ... def selection_adjust(self, index): ... def selection_clear(self): ... def selection_element(self, element: Any | None = ...): ... @@ -3192,9 +3282,9 @@ class LabelFrame(Widget): bg: _Color = ..., border: _ScreenUnits = ..., borderwidth: _ScreenUnits = ..., - class_: str = ..., - colormap: Literal["new", ""] | Misc = ..., - container: bool = ..., # undocumented + class_: str = ..., # can't be changed with configure() + colormap: Literal["new", ""] | Misc = ..., # can't be changed with configure() + container: bool = ..., # undocumented, can't be changed with configure() cursor: _Cursor = ..., fg: _Color = ..., font: _FontDescription = ..., @@ -3212,7 +3302,7 @@ class LabelFrame(Widget): relief: _Relief = ..., takefocus: _TakeFocusValue = ..., text: float | str = ..., - visual: str | Tuple[str, int] = ..., + visual: str | tuple[str, int] = ..., # can't be changed with configure() width: _ScreenUnits = ..., ) -> None: ... @overload @@ -3241,9 +3331,9 @@ class LabelFrame(Widget): takefocus: _TakeFocusValue = ..., text: float | str = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class PanedWindow(Widget): @@ -3301,14 +3391,14 @@ class PanedWindow(Widget): sashwidth: _ScreenUnits = ..., showhandle: bool = ..., width: _ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def add(self, child: Widget, **kw): ... - def remove(self, child): ... + def add(self, child: Widget, **kw) -> None: ... + def remove(self, child) -> None: ... forget: Any - def identify(self, x, y): ... + def identify(self, x: int, y: int): ... def proxy(self, *args): ... def proxy_coord(self): ... def proxy_forget(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/colorchooser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/colorchooser.pyi index 184f9a2c9914..e0473afa5a7a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/colorchooser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/colorchooser.pyi @@ -1,7 +1,11 @@ +import sys from tkinter.commondialog import Dialog -from typing import Any, ClassVar, Tuple +from typing import Any, ClassVar + +if sys.version_info >= (3, 9): + __all__ = ["Chooser", "askcolor"] class Chooser(Dialog): command: ClassVar[str] -def askcolor(color: str | bytes | None = ..., **options: Any) -> Tuple[None, None] | Tuple[Tuple[float, float, float], str]: ... +def askcolor(color: str | bytes | None = ..., **options: Any) -> tuple[None, None] | tuple[tuple[float, float, float], str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/commondialog.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/commondialog.pyi index aee58111c73f..1c5fb0f53706 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/commondialog.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/commondialog.pyi @@ -1,5 +1,9 @@ +import sys from typing import Any, ClassVar, Mapping +if sys.version_info >= (3, 9): + __all__ = ["Dialog"] + class Dialog: command: ClassVar[str | None] master: Any | None diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dialog.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dialog.pyi index bc927e1f24ac..f9c8487c44a9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dialog.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dialog.pyi @@ -1,6 +1,10 @@ +import sys from tkinter import Widget from typing import Any, Mapping +if sys.version_info >= (3, 9): + __all__ = ["Dialog"] + DIALOG_ICON: str class Dialog(Widget): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dnd.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dnd.pyi index 339291a20d0f..e2cfc43f606a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dnd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/dnd.pyi @@ -1,6 +1,10 @@ +import sys from tkinter import Event, Misc, Tk, Widget from typing import ClassVar, Protocol +if sys.version_info >= (3, 9): + __all__ = ["dnd_start", "DndHandler"] + class _DndSource(Protocol): def dnd_end(self, target: Widget | None, event: Event[Misc] | None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/filedialog.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/filedialog.pyi index ca5ecfff262d..dc0e01a6d1d1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/filedialog.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/filedialog.pyi @@ -1,9 +1,27 @@ +import sys from _typeshed import StrOrBytesPath -from tkinter import Button, Entry, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, _TkinterSequence, commondialog -from typing import IO, Any, ClassVar, Iterable, Tuple +from tkinter import Button, Entry, Frame, Listbox, Misc, Scrollbar, StringVar, Toplevel, commondialog +from typing import IO, Any, ClassVar, Iterable from typing_extensions import Literal -dialogstates: dict[Any, Tuple[Any, Any]] +if sys.version_info >= (3, 9): + __all__ = [ + "FileDialog", + "LoadFileDialog", + "SaveFileDialog", + "Open", + "SaveAs", + "Directory", + "askopenfilename", + "asksaveasfilename", + "askopenfilenames", + "askopenfile", + "askopenfiles", + "asksaveasfile", + "askdirectory", + ] + +dialogstates: dict[Any, tuple[Any, Any]] class FileDialog: title: str @@ -46,7 +64,7 @@ class LoadFileDialog(FileDialog): class SaveFileDialog(FileDialog): title: str - def ok_command(self): ... + def ok_command(self) -> None: ... class _Dialog(commondialog.Dialog): ... @@ -64,7 +82,7 @@ def asksaveasfilename( *, confirmoverwrite: bool | None = ..., defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., @@ -74,7 +92,7 @@ def asksaveasfilename( def askopenfilename( *, defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., @@ -84,13 +102,13 @@ def askopenfilename( def askopenfilenames( *, defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> Literal[""] | Tuple[str, ...]: ... +) -> Literal[""] | tuple[str, ...]: ... def askdirectory( *, initialdir: StrOrBytesPath | None = ..., mustexist: bool | None = ..., parent: Misc | None = ..., title: str | None = ... ) -> str: ... # can be empty string @@ -101,7 +119,7 @@ def asksaveasfile( *, confirmoverwrite: bool | None = ..., defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., @@ -112,7 +130,7 @@ def askopenfile( mode: str = ..., *, defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., @@ -123,11 +141,11 @@ def askopenfiles( mode: str = ..., *, defaultextension: str | None = ..., - filetypes: Iterable[Tuple[str, str] | Tuple[str, _TkinterSequence[str]]] | None = ..., + filetypes: Iterable[tuple[str, str | list[str] | tuple[str, ...]]] | None = ..., initialdir: StrOrBytesPath | None = ..., initialfile: StrOrBytesPath | None = ..., parent: Misc | None = ..., title: str | None = ..., typevariable: StringVar | str | None = ..., -) -> Tuple[IO[Any], ...]: ... # can be empty tuple +) -> tuple[IO[Any], ...]: ... # can be empty tuple def test() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/font.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/font.pyi index df828c448ae3..e16f325b589a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/font.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/font.pyi @@ -1,9 +1,12 @@ import _tkinter import sys import tkinter -from typing import Any, Tuple, Union, overload +from typing import Any, Union, overload from typing_extensions import Literal, TypedDict +if sys.version_info >= (3, 9): + __all__ = ["NORMAL", "ROMAN", "BOLD", "ITALIC", "nametofont", "Font", "families", "names"] + NORMAL: Literal["normal"] ROMAN: Literal["roman"] BOLD: Literal["bold"] @@ -15,7 +18,8 @@ _FontDescription = Union[ # A font object constructed in Python Font, # ("Helvetica", 12, BOLD) - tkinter._TkinterSequence[Any], + list[Any], + tuple[Any, ...], # A font object constructed in Tcl _tkinter.Tcl_Obj, ] @@ -100,9 +104,10 @@ class Font: @overload def metrics(self, *, displayof: tkinter.Misc | None = ...) -> _MetricsDict: ... def measure(self, text: str, displayof: tkinter.Misc | None = ...) -> int: ... + def __eq__(self, other: object) -> bool: ... -def families(root: tkinter.Misc | None = ..., displayof: tkinter.Misc | None = ...) -> Tuple[str, ...]: ... -def names(root: tkinter.Misc | None = ...) -> Tuple[str, ...]: ... +def families(root: tkinter.Misc | None = ..., displayof: tkinter.Misc | None = ...) -> tuple[str, ...]: ... +def names(root: tkinter.Misc | None = ...) -> tuple[str, ...]: ... if sys.version_info >= (3, 10): def nametofont(name: str, root: tkinter.Misc | None = ...) -> Font: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/messagebox.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/messagebox.pyi index fc4afcef0cc2..96109b116786 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/messagebox.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/messagebox.pyi @@ -1,6 +1,19 @@ +import sys from tkinter.commondialog import Dialog from typing import Any, ClassVar +if sys.version_info >= (3, 9): + __all__ = [ + "showinfo", + "showwarning", + "showerror", + "askquestion", + "askokcancel", + "askyesno", + "askyesnocancel", + "askretrycancel", + ] + ERROR: str INFO: str QUESTION: str diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/scrolledtext.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/scrolledtext.pyi index 246979d5afd7..00309431d457 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/scrolledtext.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/scrolledtext.pyi @@ -1,6 +1,8 @@ from tkinter import Frame, Misc, Scrollbar, Text from typing import Any +__all__ = ["ScrolledText"] + # The methods from Pack, Place, and Grid are dynamically added over the parent's impls class ScrolledText(Text): frame: Frame diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/simpledialog.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/simpledialog.pyi index ec801afaceee..fbe78530721f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/simpledialog.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/simpledialog.pyi @@ -4,7 +4,7 @@ from typing import Any class Dialog(Toplevel): def __init__(self, parent: Misc | None, title: str | None = ...) -> None: ... def body(self, master) -> None: ... - def buttonbox(self): ... + def buttonbox(self) -> None: ... class SimpleDialog: def __init__( @@ -22,6 +22,6 @@ class SimpleDialog: def wm_delete_window(self) -> None: ... def done(self, num: int) -> None: ... -def askfloat(title: str | None, prompt: str, **kwargs: Any) -> float: ... -def askinteger(title: str | None, prompt: str, **kwargs: Any) -> int: ... -def askstring(title: str | None, prompt: str, **kwargs: Any) -> str: ... +def askfloat(title: str | None, prompt: str, **kwargs: Any) -> float | None: ... +def askinteger(title: str | None, prompt: str, **kwargs: Any) -> int | None: ... +def askstring(title: str | None, prompt: str, **kwargs: Any) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/tix.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/tix.pyi index 3518802aaa71..6f9201a1bdf9 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/tix.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/tix.pyi @@ -1,5 +1,5 @@ import tkinter -from typing import Any, Tuple +from typing import Any from typing_extensions import Literal WINDOW: Literal["window"] @@ -45,7 +45,7 @@ class tixCommand: def tix_resetoptions(self, newScheme: str, newFontSet: str, newScmPrio: str | None = ...) -> None: ... class Tk(tkinter.Tk, tixCommand): - def __init__(self, screenName: str | None = ..., baseName: str | None = ..., className: str = ...): ... + def __init__(self, screenName: str | None = ..., baseName: str | None = ..., className: str = ...) -> None: ... class TixWidget(tkinter.Widget): def __init__( @@ -193,8 +193,8 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def indicator_delete(self, entry: str) -> None: ... def indicator_size(self, entry: str) -> int: ... def info_anchor(self) -> str: ... - def info_bbox(self, entry: str) -> Tuple[int, int, int, int]: ... - def info_children(self, entry: str | None = ...) -> Tuple[str, ...]: ... + def info_bbox(self, entry: str) -> tuple[int, int, int, int]: ... + def info_children(self, entry: str | None = ...) -> tuple[str, ...]: ... def info_data(self, entry: str) -> Any: ... def info_dragsite(self) -> str: ... def info_dropsite(self) -> str: ... @@ -203,7 +203,7 @@ class HList(TixWidget, tkinter.XView, tkinter.YView): def info_next(self, entry: str) -> str: ... def info_parent(self, entry: str) -> str: ... def info_prev(self, entry: str) -> str: ... - def info_selection(self) -> Tuple[str, ...]: ... + def info_selection(self) -> tuple[str, ...]: ... def item_cget(self, entry: str, col: int, opt: Any) -> Any: ... def item_configure(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw: Any) -> Any | None: ... def item_create(self, entry: str, col: int, cnf: dict[str, Any] = ..., **kw: Any) -> None: ... @@ -224,7 +224,7 @@ class CheckList(TixWidget): def close(self, entrypath: str) -> None: ... def getmode(self, entrypath: str) -> str: ... def open(self, entrypath: str) -> None: ... - def getselection(self, mode: str = ...) -> Tuple[str, ...]: ... + def getselection(self, mode: str = ...) -> tuple[str, ...]: ... def getstatus(self, entrypath: str) -> str: ... def setstatus(self, entrypath: str, mode: str = ...) -> None: ... @@ -253,7 +253,7 @@ class TList(TixWidget, tkinter.XView, tkinter.YView): def info_down(self, index: int) -> int: ... def info_left(self, index: int) -> int: ... def info_right(self, index: int) -> int: ... - def info_selection(self) -> Tuple[int, ...]: ... + def info_selection(self) -> tuple[int, ...]: ... def info_size(self) -> int: ... def info_up(self, index: int) -> int: ... def nearest(self, x: int, y: int) -> int: ... @@ -266,7 +266,7 @@ class PanedWindow(TixWidget): def __init__(self, master: tkinter.Widget | None, cnf: dict[str, Any] = ..., **kw: Any) -> None: ... def add(self, name: str, cnf: dict[str, Any] = ..., **kw: Any) -> None: ... def delete(self, name: str) -> None: ... - def forget(self, name: str) -> None: ... # type: ignore + def forget(self, name: str) -> None: ... # type: ignore[override] def panecget(self, entry: str, opt: Any) -> Any: ... def paneconfigure(self, entry: str, cnf: dict[str, Any] = ..., **kw: Any) -> Any | None: ... def panes(self) -> list[tkinter.Widget]: ... @@ -296,6 +296,6 @@ class Form: def form(self, cnf: dict[str, Any] = ..., **kw: Any) -> None: ... def check(self) -> bool: ... def forget(self) -> None: ... - def grid(self, xsize: int = ..., ysize: int = ...) -> Tuple[int, int] | None: ... + def grid(self, xsize: int = ..., ysize: int = ...) -> tuple[int, int] | None: ... def info(self, option: str | None = ...) -> Any: ... def slaves(self) -> list[tkinter.Widget]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi index 9569239a7861..41a5b64c5c35 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tkinter/ttk.pyi @@ -2,10 +2,66 @@ import _tkinter import sys import tkinter from tkinter.font import _FontDescription -from typing import Any, Callable, Tuple, Union, overload +from typing import Any, Callable, Union, overload from typing_extensions import Literal, TypedDict -def tclobjs_to_py(adict): ... +if sys.version_info >= (3, 7): + __all__ = [ + "Button", + "Checkbutton", + "Combobox", + "Entry", + "Frame", + "Label", + "Labelframe", + "LabelFrame", + "Menubutton", + "Notebook", + "Panedwindow", + "PanedWindow", + "Progressbar", + "Radiobutton", + "Scale", + "Scrollbar", + "Separator", + "Sizegrip", + "Spinbox", + "Style", + "Treeview", + "LabeledScale", + "OptionMenu", + "tclobjs_to_py", + "setup_master", + ] +else: + __all__ = [ + "Button", + "Checkbutton", + "Combobox", + "Entry", + "Frame", + "Label", + "Labelframe", + "LabelFrame", + "Menubutton", + "Notebook", + "Panedwindow", + "PanedWindow", + "Progressbar", + "Radiobutton", + "Scale", + "Scrollbar", + "Separator", + "Sizegrip", + "Style", + "Treeview", + "LabeledScale", + "OptionMenu", + "tclobjs_to_py", + "setup_master", + ] + +def tclobjs_to_py(adict: dict[Any, Any]) -> dict[Any, Any]: ... def setup_master(master: Any | None = ...): ... # from ttk_widget (aka ttk::widget) manual page, differs from tkinter._Compound @@ -14,25 +70,25 @@ _TtkCompound = Literal["text", "image", tkinter._Compound] class Style: master: Any tk: _tkinter.TkappType - def __init__(self, master: tkinter.Misc | None = ...): ... + def __init__(self, master: tkinter.Misc | None = ...) -> None: ... def configure(self, style, query_opt: Any | None = ..., **kw): ... def map(self, style, query_opt: Any | None = ..., **kw): ... def lookup(self, style, option, state: Any | None = ..., default: Any | None = ...): ... def layout(self, style, layoutspec: Any | None = ...): ... - def element_create(self, elementname, etype, *args, **kw): ... + def element_create(self, elementname, etype, *args, **kw) -> None: ... def element_names(self): ... def element_options(self, elementname): ... - def theme_create(self, themename, parent: Any | None = ..., settings: Any | None = ...): ... - def theme_settings(self, themename, settings): ... - def theme_names(self) -> Tuple[str, ...]: ... + def theme_create(self, themename, parent: Any | None = ..., settings: Any | None = ...) -> None: ... + def theme_settings(self, themename, settings) -> None: ... + def theme_names(self) -> tuple[str, ...]: ... @overload def theme_use(self, themename: str) -> None: ... @overload def theme_use(self, themename: None = ...) -> str: ... class Widget(tkinter.Widget): - def __init__(self, master: tkinter.Misc | None, widgetname, kw: Any | None = ...): ... - def identify(self, x, y): ... + def __init__(self, master: tkinter.Misc | None, widgetname, kw: Any | None = ...) -> None: ... + def identify(self, x: int, y: int) -> str: ... def instate(self, statespec, callback: Any | None = ..., *args, **kw): ... def state(self, statespec: Any | None = ...): ... @@ -75,9 +131,9 @@ class Button(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: ... @@ -127,9 +183,9 @@ class Checkbutton(Widget): underline: int = ..., variable: tkinter.Variable = ..., width: int | Literal[""] = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: ... @@ -158,7 +214,7 @@ class Entry(Widget, tkinter.Entry): width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., ) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., @@ -179,11 +235,11 @@ class Entry(Widget, tkinter.Entry): validatecommand: tkinter._EntryValidateCommand = ..., width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Entry().config is mypy error (don't know why) - @overload # type: ignore + @overload # type: ignore[override] def config( self, cnf: dict[str, Any] | None = ..., @@ -204,11 +260,11 @@ class Entry(Widget, tkinter.Entry): validatecommand: tkinter._EntryValidateCommand = ..., width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def config(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... - def bbox(self, index): ... - def identify(self, x, y): ... + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def bbox(self, index) -> tuple[int, int, int, int]: ... # type: ignore[override] + def identify(self, x: int, y: int) -> str: ... def validate(self): ... class Combobox(Entry): @@ -234,11 +290,11 @@ class Combobox(Entry): textvariable: tkinter.Variable = ..., validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., # undocumented validatecommand: tkinter._EntryValidateCommand = ..., # undocumented - values: tkinter._TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., # undocumented ) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., @@ -259,14 +315,14 @@ class Combobox(Entry): textvariable: tkinter.Variable = ..., validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: tkinter._EntryValidateCommand = ..., - values: tkinter._TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Combobox().config is mypy error (don't know why) - @overload # type: ignore + @overload # type: ignore[override] def config( self, cnf: dict[str, Any] | None = ..., @@ -287,14 +343,14 @@ class Combobox(Entry): textvariable: tkinter.Variable = ..., validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: tkinter._EntryValidateCommand = ..., - values: tkinter._TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., xscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def config(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... - def current(self, newindex: Any | None = ...): ... - def set(self, value): ... + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def current(self, newindex: int | None = ...) -> int: ... + def set(self, value: Any) -> None: ... class Frame(Widget): def __init__( @@ -327,9 +383,9 @@ class Frame(Widget): style: str = ..., takefocus: tkinter._TakeFocusValue = ..., width: tkinter._ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Label(Widget): @@ -385,9 +441,9 @@ class Label(Widget): underline: int = ..., width: int | Literal[""] = ..., wraplength: tkinter._ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Labelframe(Widget): @@ -429,9 +485,9 @@ class Labelframe(Widget): text: float | str = ..., underline: int = ..., width: tkinter._ScreenUnits = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure LabelFrame = Labelframe @@ -475,9 +531,9 @@ class Menubutton(Widget): textvariable: tkinter.Variable = ..., underline: int = ..., width: int | Literal[""] = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Notebook(Widget): @@ -505,9 +561,9 @@ class Notebook(Widget): style: str = ..., takefocus: tkinter._TakeFocusValue = ..., width: int = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def add( self, @@ -521,11 +577,11 @@ class Notebook(Widget): compound: tkinter._Compound = ..., underline: int = ..., ) -> None: ... - def forget(self, tab_id): ... - def hide(self, tab_id): ... - def identify(self, x, y): ... + def forget(self, tab_id) -> None: ... + def hide(self, tab_id) -> None: ... + def identify(self, x: int, y: int) -> str: ... def index(self, tab_id): ... - def insert(self, pos, child, **kw): ... + def insert(self, pos, child, **kw) -> None: ... def select(self, tab_id: Any | None = ...): ... def tab(self, tab_id, option: Any | None = ..., **kw): ... def tabs(self): ... @@ -541,13 +597,13 @@ class Panedwindow(Widget, tkinter.PanedWindow): # width and height for tkinter.ttk.Panedwindow are int but for tkinter.PanedWindow they are screen units height: int = ..., name: str = ..., - orient: Literal["vertical", "horizontal"] = ..., + orient: Literal["vertical", "horizontal"] = ..., # can't be changed with configure() style: str = ..., takefocus: tkinter._TakeFocusValue = ..., width: int = ..., ) -> None: ... def add(self, child: tkinter.Widget, *, weight: int = ..., **kw) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., @@ -557,11 +613,11 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: tkinter._TakeFocusValue = ..., width: int = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Panedwindow().config is mypy error (don't know why) - @overload # type: ignore + @overload # type: ignore[override] def config( self, cnf: dict[str, Any] | None = ..., @@ -571,11 +627,11 @@ class Panedwindow(Widget, tkinter.PanedWindow): style: str = ..., takefocus: tkinter._TakeFocusValue = ..., width: int = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def config(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... forget: Any - def insert(self, pos, child, **kw): ... + def insert(self, pos, child, **kw) -> None: ... def pane(self, pane, option: Any | None = ..., **kw): ... def sashpos(self, index, newpos: Any | None = ...): ... @@ -614,13 +670,13 @@ class Progressbar(Widget): takefocus: tkinter._TakeFocusValue = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def start(self, interval: Any | None = ...): ... - def step(self, amount: Any | None = ...): ... - def stop(self): ... + def start(self, interval: Literal["idle"] | int | None = ...) -> None: ... + def step(self, amount: float | None = ...) -> None: ... + def stop(self) -> None: ... class Radiobutton(Widget): def __init__( @@ -663,13 +719,14 @@ class Radiobutton(Widget): value: Any = ..., variable: tkinter.Variable | Literal[""] = ..., width: int | Literal[""] = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure def invoke(self) -> Any: ... -class Scale(Widget, tkinter.Scale): +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scale(Widget, tkinter.Scale): # type: ignore[misc] def __init__( self, master: tkinter.Misc | None = ..., @@ -688,7 +745,7 @@ class Scale(Widget, tkinter.Scale): value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., ) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., @@ -704,11 +761,11 @@ class Scale(Widget, tkinter.Scale): to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scale().config is mypy error (don't know why) - @overload # type: ignore + @overload # type: ignore[override] def config( self, cnf: dict[str, Any] | None = ..., @@ -724,51 +781,52 @@ class Scale(Widget, tkinter.Scale): to: float = ..., value: float = ..., variable: tkinter.IntVar | tkinter.DoubleVar = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def config(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... - def get(self, x: Any | None = ..., y: Any | None = ...): ... + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + def get(self, x: int | None = ..., y: int | None = ...) -> float: ... -class Scrollbar(Widget, tkinter.Scrollbar): +# type ignore, because identify() methods of Widget and tkinter.Scale are incompatible +class Scrollbar(Widget, tkinter.Scrollbar): # type: ignore[misc] def __init__( self, master: tkinter.Misc | None = ..., *, class_: str = ..., - command: Callable[..., Tuple[float, float] | None] | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., name: str = ..., orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., ) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., *, - command: Callable[..., Tuple[float, float] | None] | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... # config must be copy/pasted, otherwise ttk.Scrollbar().config is mypy error (don't know why) - @overload # type: ignore + @overload # type: ignore[override] def config( self, cnf: dict[str, Any] | None = ..., *, - command: Callable[..., Tuple[float, float] | None] | str = ..., + command: Callable[..., tuple[float, float] | None] | str = ..., cursor: tkinter._Cursor = ..., orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def config(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def config(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... class Separator(Widget): def __init__( @@ -791,9 +849,9 @@ class Separator(Widget): orient: Literal["horizontal", "vertical"] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure class Sizegrip(Widget): @@ -815,9 +873,9 @@ class Sizegrip(Widget): cursor: tkinter._Cursor = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure if sys.version_info >= (3, 7): @@ -828,7 +886,7 @@ if sys.version_info >= (3, 7): *, background: tkinter._Color = ..., # undocumented class_: str = ..., - command: Callable[[], Any] | str | tkinter._TkinterSequence[str] = ..., + command: Callable[[], Any] | str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., # undocumented font: _FontDescription = ..., # undocumented @@ -847,18 +905,18 @@ if sys.version_info >= (3, 7): to: float = ..., validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: tkinter._EntryValidateCommand = ..., - values: tkinter._TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., # undocumented wrap: bool = ..., xscrollcommand: tkinter._XYScrollCommand = ..., ) -> None: ... - @overload # type: ignore + @overload # type: ignore[override] def configure( self, cnf: dict[str, Any] | None = ..., *, background: tkinter._Color = ..., - command: Callable[[], Any] | str | tkinter._TkinterSequence[str] = ..., + command: Callable[[], Any] | str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., exportselection: bool = ..., font: _FontDescription = ..., @@ -876,22 +934,22 @@ if sys.version_info >= (3, 7): to: float = ..., validate: Literal["none", "focus", "focusin", "focusout", "key", "all"] = ..., validatecommand: tkinter._EntryValidateCommand = ..., - values: tkinter._TkinterSequence[str] = ..., + values: list[str] | tuple[str, ...] = ..., width: int = ..., wrap: bool = ..., xscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... - config = configure # type: ignore + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... + config = configure # type: ignore[assignment] def set(self, value: Any) -> None: ... class _TreeviewItemDict(TypedDict): text: str - image: Literal[""] | list[str] # no idea why it's wrapped in list - values: list[Any] + image: list[str] | Literal[""] # no idea why it's wrapped in list + values: list[Any] | Literal[""] open: bool # actually 0 or 1 - tags: list[str] + tags: list[str] | Literal[""] class _TreeviewTagDict(TypedDict): # There is also 'text' and 'anchor', but they don't seem to do anything, using them is likely a bug @@ -902,7 +960,7 @@ class _TreeviewTagDict(TypedDict): class _TreeviewHeaderDict(TypedDict): text: str - image: list[str] + image: list[str] | Literal[""] anchor: tkinter._Anchor command: str state: str # Doesn't seem to appear anywhere else than in these dicts @@ -922,18 +980,18 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): master: tkinter.Misc | None = ..., *, class_: str = ..., - columns: str | tkinter._TkinterSequence[str] = ..., + columns: str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | tkinter._TkinterSequence[str] | tkinter._TkinterSequence[int] | Literal["#all"] = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] | Literal["#all"] = ..., height: int = ..., name: str = ..., padding: tkinter._Padding = ..., selectmode: Literal["extended", "browse", "none"] = ..., - # _TkinterSequences of Literal don't actually work, using str instead. + # list/tuple of Literal don't actually work in mypy # # 'tree headings' is same as ['tree', 'headings'], and I wouldn't be - # surprised if someone was using it. - show: Literal["tree", "headings", "tree headings"] | tkinter._TkinterSequence[str] = ..., + # surprised if someone is using it. + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., xscrollcommand: tkinter._XYScrollCommand = ..., @@ -944,23 +1002,23 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): self, cnf: dict[str, Any] | None = ..., *, - columns: str | tkinter._TkinterSequence[str] = ..., + columns: str | list[str] | tuple[str, ...] = ..., cursor: tkinter._Cursor = ..., - displaycolumns: str | tkinter._TkinterSequence[str] | tkinter._TkinterSequence[int] | Literal["#all"] = ..., + displaycolumns: str | list[str] | tuple[str, ...] | list[int] | tuple[int, ...] | Literal["#all"] = ..., height: int = ..., padding: tkinter._Padding = ..., selectmode: Literal["extended", "browse", "none"] = ..., - show: Literal["tree", "headings", "tree headings"] | tkinter._TkinterSequence[str] = ..., + show: Literal["tree", "headings", "tree headings", ""] | list[str] | tuple[str, ...] = ..., style: str = ..., takefocus: tkinter._TakeFocusValue = ..., xscrollcommand: tkinter._XYScrollCommand = ..., yscrollcommand: tkinter._XYScrollCommand = ..., - ) -> dict[str, Tuple[str, str, str, Any, Any]] | None: ... + ) -> dict[str, tuple[str, str, str, Any, Any]] | None: ... @overload - def configure(self, cnf: str) -> Tuple[str, str, str, Any, Any]: ... + def configure(self, cnf: str) -> tuple[str, str, str, Any, Any]: ... config = configure - def bbox(self, item, column: _TreeviewColumnId | None = ...) -> Tuple[int, int, int, int] | Literal[""]: ... # type: ignore - def get_children(self, item: str | None = ...) -> Tuple[str, ...]: ... + def bbox(self, item, column: _TreeviewColumnId | None = ...) -> tuple[int, int, int, int] | Literal[""]: ... # type: ignore[override] + def get_children(self, item: str | None = ...) -> tuple[str, ...]: ... def set_children(self, item: str, *newchildren: str) -> None: ... @overload def column(self, column: _TreeviewColumnId, option: Literal["width", "minwidth"]) -> int: ... @@ -987,14 +1045,14 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def delete(self, *items: str) -> None: ... def detach(self, *items: str) -> None: ... def exists(self, item: str) -> bool: ... - @overload # type: ignore + @overload # type: ignore[override] def focus(self, item: None = ...) -> str: ... # can return empty string @overload def focus(self, item: str) -> Literal[""]: ... @overload def heading(self, column: _TreeviewColumnId, option: Literal["text"]) -> str: ... @overload - def heading(self, column: _TreeviewColumnId, option: Literal["image"]) -> Tuple[str]: ... + def heading(self, column: _TreeviewColumnId, option: Literal["image"]) -> tuple[str] | str: ... @overload def heading(self, column: _TreeviewColumnId, option: Literal["anchor"]) -> _tkinter.Tcl_Obj: ... @overload @@ -1002,6 +1060,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): @overload def heading(self, column: _TreeviewColumnId, option: str) -> Any: ... @overload + def heading(self, column: _TreeviewColumnId, option: None = ...) -> _TreeviewHeaderDict: ... # type: ignore[misc] + @overload def heading( self, column: _TreeviewColumnId, @@ -1011,8 +1071,8 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._ImageSpec = ..., anchor: tkinter._Anchor = ..., command: str | Callable[[], Any] = ..., - ) -> _TreeviewHeaderDict | None: ... - def identify(self, component, x, y): ... + ) -> None: ... + def identify(self, component, x, y): ... # Internal Method. Leave untyped def identify_row(self, y: int) -> str: ... def identify_column(self, x: int) -> str: ... def identify_region(self, x: int, y: int) -> Literal["heading", "separator", "tree", "cell", "nothing"]: ... @@ -1027,23 +1087,25 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): id: str = ..., # same as iid text: str = ..., image: tkinter._ImageSpec = ..., - values: tkinter._TkinterSequence[Any] = ..., + values: list[Any] | tuple[Any, ...] = ..., open: bool = ..., - tags: str | tkinter._TkinterSequence[str] = ..., + tags: str | list[str] | tuple[str, ...] = ..., ) -> str: ... @overload def item(self, item: str, option: Literal["text"]) -> str: ... @overload - def item(self, item: str, option: Literal["image"]) -> Literal[""] | Tuple[str]: ... + def item(self, item: str, option: Literal["image"]) -> tuple[str] | Literal[""]: ... @overload - def item(self, item: str, option: Literal["values"]) -> Literal[""] | Tuple[Any, ...]: ... + def item(self, item: str, option: Literal["values"]) -> tuple[Any, ...] | Literal[""]: ... @overload def item(self, item: str, option: Literal["open"]) -> bool: ... # actually 0 or 1 @overload - def item(self, item: str, option: Literal["tags"]) -> Literal[""] | Tuple[str, ...]: ... + def item(self, item: str, option: Literal["tags"]) -> tuple[str, ...] | Literal[""]: ... @overload def item(self, item: str, option: str) -> Any: ... @overload + def item(self, item: str, option: None = ...) -> _TreeviewItemDict: ... # type: ignore[misc] + @overload def item( self, item: str, @@ -1051,10 +1113,10 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): *, text: str = ..., image: tkinter._ImageSpec = ..., - values: tkinter._TkinterSequence[Any] = ..., + values: list[Any] | tuple[Any, ...] | Literal[""] = ..., open: bool = ..., - tags: str | tkinter._TkinterSequence[str] = ..., - ) -> _TreeviewItemDict | None: ... + tags: str | list[str] | tuple[str, ...] = ..., + ) -> None: ... def move(self, item: str, parent: str, index: int) -> None: ... reattach = move def next(self, item: str) -> str: ... # returning empty string means last item @@ -1062,13 +1124,14 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): def prev(self, item: str) -> str: ... # returning empty string means first item def see(self, item: str) -> None: ... if sys.version_info >= (3, 8): - def selection(self) -> Tuple[str, ...]: ... + def selection(self) -> tuple[str, ...]: ... else: - def selection(self, selop: Any | None = ..., items: Any | None = ...) -> Tuple[str, ...]: ... - def selection_set(self, items: str | tkinter._TkinterSequence[str]) -> None: ... - def selection_add(self, items: str | tkinter._TkinterSequence[str]) -> None: ... - def selection_remove(self, items: str | tkinter._TkinterSequence[str]) -> None: ... - def selection_toggle(self, items: str | tkinter._TkinterSequence[str]) -> None: ... + def selection(self, selop: Any | None = ..., items: Any | None = ...) -> tuple[str, ...]: ... + + def selection_set(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_add(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_remove(self, items: str | list[str] | tuple[str, ...]) -> None: ... + def selection_toggle(self, items: str | list[str] | tuple[str, ...]) -> None: ... @overload def set(self, item: str, column: None = ..., value: None = ...) -> dict[str, Any]: ... @overload @@ -1104,7 +1167,7 @@ class Treeview(Widget, tkinter.XView, tkinter.YView): image: tkinter._ImageSpec = ..., ) -> _TreeviewTagDict | Any: ... # can be None but annoying to check @overload - def tag_has(self, tagname: str, item: None = ...) -> Tuple[str, ...]: ... + def tag_has(self, tagname: str, item: None = ...) -> tuple[str, ...]: ... @overload def tag_has(self, tagname: str, item: str) -> bool: ... @@ -1119,10 +1182,10 @@ class LabeledScale(Frame): from_: float = ..., to: float = ..., *, - compound: Literal["top"] | Literal["bottom"] = ..., + compound: Literal["top", "bottom"] = ..., **kw: Any, ) -> None: ... - # destroy is overrided, signature does not change + # destroy is overridden, signature does not change value: Any class OptionMenu(Menubutton): @@ -1134,9 +1197,9 @@ class OptionMenu(Menubutton): *values: str, # rest of these are keyword-only because *args syntax used above style: str = ..., - direction: Literal["above"] | Literal["below"] | Literal["left"] | Literal["right"] | Literal["flush"] = ..., + direction: Literal["above", "below", "left", "right", "flush"] = ..., command: Callable[[tkinter.StringVar], Any] | None = ..., ) -> None: ... # configure, config, cget, destroy are inherited from Menubutton - # destroy and __setitem__ are overrided, signature does not change - def set_menu(self, default: Any | None = ..., *values): ... + # destroy and __setitem__ are overridden, signature does not change + def set_menu(self, default: Any | None = ..., *values) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/token.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/token.pyi index 90381833511b..49329ec442f0 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/token.pyi @@ -1,5 +1,284 @@ import sys +if sys.version_info >= (3, 10): + __all__ = [ + "tok_name", + "ISTERMINAL", + "ISNONTERMINAL", + "ISEOF", + "ENDMARKER", + "NAME", + "NUMBER", + "STRING", + "NEWLINE", + "INDENT", + "DEDENT", + "LPAR", + "RPAR", + "LSQB", + "RSQB", + "COLON", + "COMMA", + "SEMI", + "PLUS", + "MINUS", + "STAR", + "SLASH", + "VBAR", + "AMPER", + "LESS", + "GREATER", + "EQUAL", + "DOT", + "PERCENT", + "LBRACE", + "RBRACE", + "EQEQUAL", + "NOTEQUAL", + "LESSEQUAL", + "GREATEREQUAL", + "TILDE", + "CIRCUMFLEX", + "LEFTSHIFT", + "RIGHTSHIFT", + "DOUBLESTAR", + "PLUSEQUAL", + "MINEQUAL", + "STAREQUAL", + "SLASHEQUAL", + "PERCENTEQUAL", + "AMPEREQUAL", + "VBAREQUAL", + "CIRCUMFLEXEQUAL", + "LEFTSHIFTEQUAL", + "RIGHTSHIFTEQUAL", + "DOUBLESTAREQUAL", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "AT", + "ATEQUAL", + "RARROW", + "ELLIPSIS", + "COLONEQUAL", + "OP", + "AWAIT", + "ASYNC", + "TYPE_IGNORE", + "TYPE_COMMENT", + "SOFT_KEYWORD", + "ERRORTOKEN", + "COMMENT", + "NL", + "ENCODING", + "N_TOKENS", + "NT_OFFSET", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "tok_name", + "ISTERMINAL", + "ISNONTERMINAL", + "ISEOF", + "ENDMARKER", + "NAME", + "NUMBER", + "STRING", + "NEWLINE", + "INDENT", + "DEDENT", + "LPAR", + "RPAR", + "LSQB", + "RSQB", + "COLON", + "COMMA", + "SEMI", + "PLUS", + "MINUS", + "STAR", + "SLASH", + "VBAR", + "AMPER", + "LESS", + "GREATER", + "EQUAL", + "DOT", + "PERCENT", + "LBRACE", + "RBRACE", + "EQEQUAL", + "NOTEQUAL", + "LESSEQUAL", + "GREATEREQUAL", + "TILDE", + "CIRCUMFLEX", + "LEFTSHIFT", + "RIGHTSHIFT", + "DOUBLESTAR", + "PLUSEQUAL", + "MINEQUAL", + "STAREQUAL", + "SLASHEQUAL", + "PERCENTEQUAL", + "AMPEREQUAL", + "VBAREQUAL", + "CIRCUMFLEXEQUAL", + "LEFTSHIFTEQUAL", + "RIGHTSHIFTEQUAL", + "DOUBLESTAREQUAL", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "AT", + "ATEQUAL", + "RARROW", + "ELLIPSIS", + "COLONEQUAL", + "OP", + "AWAIT", + "ASYNC", + "TYPE_IGNORE", + "TYPE_COMMENT", + "ERRORTOKEN", + "COMMENT", + "NL", + "ENCODING", + "N_TOKENS", + "NT_OFFSET", + ] +elif sys.version_info >= (3, 7): + __all__ = [ + "tok_name", + "ISTERMINAL", + "ISNONTERMINAL", + "ISEOF", + "ENDMARKER", + "NAME", + "NUMBER", + "STRING", + "NEWLINE", + "INDENT", + "DEDENT", + "LPAR", + "RPAR", + "LSQB", + "RSQB", + "COLON", + "COMMA", + "SEMI", + "PLUS", + "MINUS", + "STAR", + "SLASH", + "VBAR", + "AMPER", + "LESS", + "GREATER", + "EQUAL", + "DOT", + "PERCENT", + "LBRACE", + "RBRACE", + "EQEQUAL", + "NOTEQUAL", + "LESSEQUAL", + "GREATEREQUAL", + "TILDE", + "CIRCUMFLEX", + "LEFTSHIFT", + "RIGHTSHIFT", + "DOUBLESTAR", + "PLUSEQUAL", + "MINEQUAL", + "STAREQUAL", + "SLASHEQUAL", + "PERCENTEQUAL", + "AMPEREQUAL", + "VBAREQUAL", + "CIRCUMFLEXEQUAL", + "LEFTSHIFTEQUAL", + "RIGHTSHIFTEQUAL", + "DOUBLESTAREQUAL", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "AT", + "ATEQUAL", + "RARROW", + "ELLIPSIS", + "OP", + "ERRORTOKEN", + "COMMENT", + "NL", + "ENCODING", + "N_TOKENS", + "NT_OFFSET", + ] +else: + __all__ = [ + "tok_name", + "ISTERMINAL", + "ISNONTERMINAL", + "ISEOF", + "ENDMARKER", + "NAME", + "NUMBER", + "STRING", + "NEWLINE", + "INDENT", + "DEDENT", + "LPAR", + "RPAR", + "LSQB", + "RSQB", + "COLON", + "COMMA", + "SEMI", + "PLUS", + "MINUS", + "STAR", + "SLASH", + "VBAR", + "AMPER", + "LESS", + "GREATER", + "EQUAL", + "DOT", + "PERCENT", + "LBRACE", + "RBRACE", + "EQEQUAL", + "NOTEQUAL", + "LESSEQUAL", + "GREATEREQUAL", + "TILDE", + "CIRCUMFLEX", + "LEFTSHIFT", + "RIGHTSHIFT", + "DOUBLESTAR", + "PLUSEQUAL", + "MINEQUAL", + "STAREQUAL", + "SLASHEQUAL", + "PERCENTEQUAL", + "AMPEREQUAL", + "VBAREQUAL", + "CIRCUMFLEXEQUAL", + "LEFTSHIFTEQUAL", + "RIGHTSHIFTEQUAL", + "DOUBLESTAREQUAL", + "DOUBLESLASH", + "DOUBLESLASHEQUAL", + "AT", + "ATEQUAL", + "RARROW", + "ELLIPSIS", + "OP", + "AWAIT", + "ASYNC", + "ERRORTOKEN", + "N_TOKENS", + "NT_OFFSET", + ] + ENDMARKER: int NAME: int NUMBER: int @@ -71,6 +350,8 @@ if sys.version_info >= (3, 8): TYPE_IGNORE: int COLONEQUAL: int EXACT_TOKEN_TYPES: dict[str, int] +if sys.version_info >= (3, 10): + SOFT_KEYWORD: int def ISTERMINAL(x: int) -> bool: ... def ISNONTERMINAL(x: int) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tokenize.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tokenize.pyi index 136dcfcf0a14..f908261cace6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tokenize.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tokenize.pyi @@ -1,8 +1,13 @@ import sys from _typeshed import StrOrBytesPath from builtins import open as _builtin_open -from token import * # noqa: F403 -from typing import Any, Callable, Generator, Iterable, NamedTuple, Pattern, Sequence, Set, TextIO, Tuple, Union +from token import * +from typing import Any, Callable, Generator, Iterable, NamedTuple, Pattern, Sequence, TextIO, Union + +if sys.version_info >= (3, 8): + from token import EXACT_TOKEN_TYPES as EXACT_TOKEN_TYPES +else: + EXACT_TOKEN_TYPES: dict[str, int] if sys.version_info < (3, 7): COMMENT: int @@ -12,7 +17,7 @@ if sys.version_info < (3, 7): cookie_re: Pattern[str] blank_re: Pattern[bytes] -_Position = Tuple[int, int] +_Position = tuple[int, int] class _TokenInfo(NamedTuple): type: int @@ -44,7 +49,7 @@ class Untokenizer: # the docstring says "returns bytes" but is incorrect -- # if the ENCODING token is missing, it skips the encode def untokenize(iterable: Iterable[_Token]) -> Any: ... -def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ... +def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ... def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ... def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented def open(filename: StrOrBytesPath | int) -> TextIO: ... @@ -69,7 +74,7 @@ Floatnumber: str # undocumented Imagnumber: str # undocumented Number: str # undocumented -def _all_string_prefixes() -> Set[str]: ... # undocumented +def _all_string_prefixes() -> set[str]: ... # undocumented StringPrefix: str # undocumented @@ -95,7 +100,7 @@ PseudoExtras: str # undocumented PseudoToken: str # undocumented endpats: dict[str, str] # undocumented -single_quoted: Set[str] # undocumented -triple_quoted: Set[str] # undocumented +single_quoted: set[str] # undocumented +triple_quoted: set[str] # undocumented tabsize: int # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/trace.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/trace.pyi index c6d993bf6143..640b014594a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/trace.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/trace.pyi @@ -1,28 +1,30 @@ import sys import types from _typeshed import StrPath -from typing import Any, Callable, Mapping, Optional, Sequence, Tuple, TypeVar +from typing import Any, Callable, Mapping, Optional, Sequence, TypeVar from typing_extensions import ParamSpec +__all__ = ["Trace", "CoverageResults"] + _T = TypeVar("_T") _P = ParamSpec("_P") _localtrace = Callable[[types.FrameType, str, Any], Callable[..., Any]] -_fileModuleFunction = Tuple[str, Optional[str], str] +_fileModuleFunction = tuple[str, Optional[str], str] class CoverageResults: def __init__( self, - counts: dict[Tuple[str, int], int] | None = ..., + counts: dict[tuple[str, int], int] | None = ..., calledfuncs: dict[_fileModuleFunction, int] | None = ..., infile: StrPath | None = ..., - callers: dict[Tuple[_fileModuleFunction, _fileModuleFunction], int] | None = ..., + callers: dict[tuple[_fileModuleFunction, _fileModuleFunction], int] | None = ..., outfile: StrPath | None = ..., ) -> None: ... # undocumented def update(self, other: CoverageResults) -> None: ... def write_results(self, show_missing: bool = ..., summary: bool = ..., coverdir: StrPath | None = ...) -> None: ... def write_results_file( self, path: StrPath, lines: Sequence[str], lnotab: Any, lines_hit: Mapping[int, int], encoding: str | None = ... - ) -> Tuple[int, int]: ... + ) -> tuple[int, int]: ... def is_ignored_filename(self, filename: str) -> bool: ... # undocumented class Trace: @@ -43,9 +45,10 @@ class Trace: self, cmd: str | types.CodeType, globals: Mapping[str, Any] | None = ..., locals: Mapping[str, Any] | None = ... ) -> None: ... if sys.version_info >= (3, 9): - def runfunc(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... # type: ignore + def runfunc(self, __func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... else: - def runfunc(self, func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... # type: ignore + def runfunc(self, func: Callable[_P, _T], *args: _P.args, **kw: _P.kwargs) -> _T: ... + def file_module_function_of(self, frame: types.FrameType) -> _fileModuleFunction: ... def globaltrace_trackcallers(self, frame: types.FrameType, why: str, arg: Any) -> None: ... def globaltrace_countfuncs(self, frame: types.FrameType, why: str, arg: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/traceback.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/traceback.pyi index e071a3158816..57bae3b6dc79 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/traceback.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/traceback.pyi @@ -1,31 +1,76 @@ import sys -from _typeshed import SupportsWrite +from _typeshed import Self, SupportsWrite from types import FrameType, TracebackType -from typing import IO, Any, Generator, Iterable, Iterator, List, Mapping, Optional, Set, Tuple, Type +from typing import IO, Any, Generator, Iterable, Iterator, Mapping, Optional, overload +from typing_extensions import Literal -_PT = Tuple[str, int, str, Optional[str]] +__all__ = [ + "extract_stack", + "extract_tb", + "format_exception", + "format_exception_only", + "format_list", + "format_stack", + "format_tb", + "print_exc", + "format_exc", + "print_exception", + "print_last", + "print_stack", + "print_tb", + "clear_frames", + "FrameSummary", + "StackSummary", + "TracebackException", + "walk_stack", + "walk_tb", +] + +_PT = tuple[str, int, str, Optional[str]] def print_tb(tb: TracebackType | None, limit: int | None = ..., file: IO[str] | None = ...) -> None: ... if sys.version_info >= (3, 10): + @overload def print_exception( - __exc: Type[BaseException] | None, + __exc: type[BaseException] | None, value: BaseException | None = ..., tb: TracebackType | None = ..., limit: int | None = ..., file: IO[str] | None = ..., chain: bool = ..., ) -> None: ... + @overload + def print_exception( + __exc: BaseException, *, limit: int | None = ..., file: IO[str] | None = ..., chain: bool = ... + ) -> None: ... + @overload + def format_exception( + __exc: type[BaseException] | None, + value: BaseException | None = ..., + tb: TracebackType | None = ..., + limit: int | None = ..., + chain: bool = ..., + ) -> list[str]: ... + @overload + def format_exception(__exc: BaseException, *, limit: int | None = ..., chain: bool = ...) -> list[str]: ... else: def print_exception( - etype: Type[BaseException] | None, + etype: type[BaseException] | None, value: BaseException | None, tb: TracebackType | None, limit: int | None = ..., file: IO[str] | None = ..., chain: bool = ..., ) -> None: ... + def format_exception( + etype: type[BaseException] | None, + value: BaseException | None, + tb: TracebackType | None, + limit: int | None = ..., + chain: bool = ..., + ) -> list[str]: ... def print_exc(limit: int | None = ..., file: IO[str] | None = ..., chain: bool = ...) -> None: ... def print_last(limit: int | None = ..., file: IO[str] | None = ..., chain: bool = ...) -> None: ... @@ -38,42 +83,24 @@ def format_list(extracted_list: list[FrameSummary]) -> list[str]: ... def print_list(extracted_list: list[FrameSummary], file: SupportsWrite[str] | None = ...) -> None: ... if sys.version_info >= (3, 10): - def format_exception_only(__exc: Type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... - -else: - def format_exception_only(etype: Type[BaseException] | None, value: BaseException | None) -> list[str]: ... - -if sys.version_info >= (3, 10): - def format_exception( - __exc: Type[BaseException] | None, - value: BaseException | None = ..., - tb: TracebackType | None = ..., - limit: int | None = ..., - chain: bool = ..., - ) -> list[str]: ... + def format_exception_only(__exc: type[BaseException] | None, value: BaseException | None = ...) -> list[str]: ... else: - def format_exception( - etype: Type[BaseException] | None, - value: BaseException | None, - tb: TracebackType | None, - limit: int | None = ..., - chain: bool = ..., - ) -> list[str]: ... + def format_exception_only(etype: type[BaseException] | None, value: BaseException | None) -> list[str]: ... def format_exc(limit: int | None = ..., chain: bool = ...) -> str: ... def format_tb(tb: TracebackType | None, limit: int | None = ...) -> list[str]: ... def format_stack(f: FrameType | None = ..., limit: int | None = ...) -> list[str]: ... def clear_frames(tb: TracebackType) -> None: ... -def walk_stack(f: FrameType | None) -> Iterator[Tuple[FrameType, int]]: ... -def walk_tb(tb: TracebackType | None) -> Iterator[Tuple[FrameType, int]]: ... +def walk_stack(f: FrameType | None) -> Iterator[tuple[FrameType, int]]: ... +def walk_tb(tb: TracebackType | None) -> Iterator[tuple[FrameType, int]]: ... class TracebackException: __cause__: TracebackException __context__: TracebackException __suppress_context__: bool stack: StackSummary - exc_type: Type[BaseException] + exc_type: type[BaseException] filename: str lineno: int text: str @@ -82,71 +109,102 @@ class TracebackException: if sys.version_info >= (3, 10): def __init__( self, - exc_type: Type[BaseException], + exc_type: type[BaseException], exc_value: BaseException, - exc_traceback: TracebackType, + exc_traceback: TracebackType | None, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ..., compact: bool = ..., - _seen: Set[int] | None = ..., + _seen: set[int] | None = ..., ) -> None: ... @classmethod def from_exception( - cls, + cls: type[Self], exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ..., compact: bool = ..., - ) -> TracebackException: ... + ) -> Self: ... else: def __init__( self, - exc_type: Type[BaseException], + exc_type: type[BaseException], exc_value: BaseException, - exc_traceback: TracebackType, + exc_traceback: TracebackType | None, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ..., - _seen: Set[int] | None = ..., + _seen: set[int] | None = ..., ) -> None: ... @classmethod def from_exception( - cls, exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... - ) -> TracebackException: ... + cls: type[Self], exc: BaseException, *, limit: int | None = ..., lookup_lines: bool = ..., capture_locals: bool = ... + ) -> Self: ... + + def __eq__(self, other: object) -> bool: ... def format(self, *, chain: bool = ...) -> Generator[str, None, None]: ... def format_exception_only(self) -> Generator[str, None, None]: ... class FrameSummary(Iterable[Any]): + if sys.version_info >= (3, 11): + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = ..., + locals: Mapping[str, str] | None = ..., + line: str | None = ..., + end_lineno: int | None = ..., + colno: int | None = ..., + end_colno: int | None = ..., + ) -> None: ... + end_lineno: int | None + colno: int | None + end_colno: int | None + else: + def __init__( + self, + filename: str, + lineno: int | None, + name: str, + *, + lookup_line: bool = ..., + locals: Mapping[str, str] | None = ..., + line: str | None = ..., + ) -> None: ... filename: str - lineno: int + lineno: int | None name: str - line: str locals: dict[str, str] | None - def __init__( - self, - filename: str, - lineno: int, - name: str, - *, - lookup_line: bool = ..., - locals: Mapping[str, str] | None = ..., - line: str | None = ..., - ) -> None: ... - # TODO: more precise typing for __getitem__ and __iter__, - # for a namedtuple-like view on (filename, lineno, name, str). + @property + def line(self) -> str | None: ... + @overload + def __getitem__(self, i: Literal[0]) -> str: ... + @overload + def __getitem__(self, i: Literal[1]) -> int: ... + @overload + def __getitem__(self, i: Literal[2]) -> str: ... + @overload + def __getitem__(self, i: Literal[3]) -> str | None: ... + @overload def __getitem__(self, i: int) -> Any: ... def __iter__(self) -> Iterator[Any]: ... + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 8): + def __len__(self) -> Literal[4]: ... -class StackSummary(List[FrameSummary]): +class StackSummary(list[FrameSummary]): @classmethod def extract( cls, - frame_gen: Generator[Tuple[FrameType, int], None, None], + frame_gen: Iterable[tuple[FrameType, int]], *, limit: int | None = ..., lookup_lines: bool = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tracemalloc.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tracemalloc.pyi index e812b8247332..f3198f9414a3 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tracemalloc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tracemalloc.pyi @@ -1,7 +1,7 @@ import sys -from typing import Optional, Sequence, Tuple, Union, overload - from _tracemalloc import * +from typing import Any, Optional, Sequence, Union, overload +from typing_extensions import SupportsIndex def get_object_traceback(obj: object) -> Traceback | None: ... def take_snapshot() -> Snapshot: ... @@ -26,6 +26,7 @@ class Statistic: size: int traceback: Traceback def __init__(self, traceback: Traceback, size: int, count: int) -> None: ... + def __eq__(self, other: object) -> bool: ... class StatisticDiff: count: int @@ -34,24 +35,36 @@ class StatisticDiff: size_diff: int traceback: Traceback def __init__(self, traceback: Traceback, size: int, size_diff: int, count: int, count_diff: int) -> None: ... + def __eq__(self, other: object) -> bool: ... -_FrameTupleT = Tuple[str, int] +_FrameTupleT = tuple[str, int] class Frame: filename: str lineno: int def __init__(self, frame: _FrameTupleT) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Frame) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Frame) -> bool: ... + def __ge__(self, other: Frame) -> bool: ... + def __le__(self, other: Frame) -> bool: ... + else: + def __gt__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Frame, NotImplemented: Any = ...) -> bool: ... if sys.version_info >= (3, 9): - _TraceTupleT = Union[Tuple[int, int, Sequence[_FrameTupleT], Optional[int]], Tuple[int, int, Sequence[_FrameTupleT]]] + _TraceTupleT = Union[tuple[int, int, Sequence[_FrameTupleT], Optional[int]], tuple[int, int, Sequence[_FrameTupleT]]] else: - _TraceTupleT = Tuple[int, int, Sequence[_FrameTupleT]] + _TraceTupleT = tuple[int, int, Sequence[_FrameTupleT]] class Trace: domain: int size: int traceback: Traceback def __init__(self, trace: _TraceTupleT) -> None: ... + def __eq__(self, other: object) -> bool: ... class Traceback(Sequence[Frame]): if sys.version_info >= (3, 9): @@ -63,11 +76,22 @@ class Traceback(Sequence[Frame]): def format(self, limit: int | None = ..., most_recent_first: bool = ...) -> list[str]: ... else: def format(self, limit: int | None = ...) -> list[str]: ... + @overload - def __getitem__(self, i: int) -> Frame: ... + def __getitem__(self, i: SupportsIndex) -> Frame: ... @overload def __getitem__(self, s: slice) -> Sequence[Frame]: ... def __len__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: Traceback) -> bool: ... + if sys.version_info >= (3, 11): + def __gt__(self, other: Traceback) -> bool: ... + def __ge__(self, other: Traceback) -> bool: ... + def __le__(self, other: Traceback) -> bool: ... + else: + def __gt__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __ge__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... + def __le__(self, other: Traceback, NotImplemented: Any = ...) -> bool: ... class Snapshot: def __init__(self, traces: Sequence[_TraceTupleT], traceback_limit: int) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/tty.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/tty.pyi index c0dc418e9933..015669a68084 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/tty.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/tty.pyi @@ -1,15 +1,16 @@ +import sys from typing import IO, Union -_FD = Union[int, IO[str]] +if sys.platform != "win32": + _FD = Union[int, IO[str]] -# XXX: Undocumented integer constants -IFLAG: int -OFLAG: int -CFLAG: int -LFLAG: int -ISPEED: int -OSPEED: int -CC: int - -def setraw(fd: _FD, when: int = ...) -> None: ... -def setcbreak(fd: _FD, when: int = ...) -> None: ... + # XXX: Undocumented integer constants + IFLAG: int + OFLAG: int + CFLAG: int + LFLAG: int + ISPEED: int + OSPEED: int + CC: int + def setraw(fd: _FD, when: int = ...) -> None: ... + def setcbreak(fd: _FD, when: int = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/turtle.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/turtle.pyi index a289fd03b068..521ac5838b62 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/turtle.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/turtle.pyi @@ -1,24 +1,37 @@ -from tkinter import Canvas, PhotoImage -from typing import Any, Callable, Dict, Sequence, Tuple, TypeVar, Union, overload +from _typeshed import Self +from tkinter import Canvas, Frame, Misc, PhotoImage, Scrollbar +from typing import Any, Callable, ClassVar, Sequence, Union, overload # Note: '_Color' is the alias we use for arguments and _AnyColor is the # alias we use for return types. Really, these two aliases should be the # same, but as per the "no union returns" typeshed policy, we'll return # Any instead. -_Color = Union[str, Tuple[float, float, float]] +_Color = Union[str, tuple[float, float, float]] _AnyColor = Any # TODO: Replace this with a TypedDict once it becomes standardized. -_PenState = Dict[str, Any] +_PenState = dict[str, Any] _Speed = Union[str, float] -_PolygonCoords = Sequence[Tuple[float, float]] +_PolygonCoords = Sequence[tuple[float, float]] # TODO: Type this more accurately # Vec2D is actually a custom subclass of 'tuple'. -Vec2D = Tuple[float, float] +Vec2D = tuple[float, float] -class TurtleScreenBase(object): +# Does not actually inherit from Canvas, but dynamically gets all methods of Canvas +class ScrolledCanvas(Canvas, Frame): # type: ignore[misc] + bg: str + hscroll: Scrollbar + vscroll: Scrollbar + def __init__( + self, master: Misc | None, width: int = ..., height: int = ..., canvwidth: int = ..., canvheight: int = ... + ) -> None: ... + canvwidth: int + canvheight: int + def reset(self, canvwidth: int | None = ..., canvheight: int | None = ..., bg: str | None = ...) -> None: ... + +class TurtleScreenBase: cv: Canvas canvwidth: int canvheight: int @@ -34,7 +47,7 @@ class TurtleScreenBase(object): class Terminator(Exception): ... class TurtleGraphicsError(Exception): ... -class Shape(object): +class Shape: def __init__(self, type_: str, data: _PolygonCoords | PhotoImage | None = ...) -> None: ... def addcomponent(self, poly: _PolygonCoords, fill: _Color, outline: _Color | None = ...) -> None: ... @@ -81,7 +94,7 @@ class TurtleScreen(TurtleScreenBase): @overload def bgpic(self, picname: str) -> None: ... @overload - def screensize(self, canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> Tuple[int, int]: ... + def screensize(self, canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... # Looks like if self.cv is not a ScrolledCanvas, this could return a tuple as well @overload def screensize(self, canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... @@ -92,7 +105,7 @@ class TurtleScreen(TurtleScreenBase): def onkeypress(self, fun: Callable[[], Any], key: str | None = ...) -> None: ... onkeyrelease = onkey -class TNavigator(object): +class TNavigator: START_ORIENTATION: dict[str, Vec2D] DEFAULT_MODE: str DEFAULT_ANGLEOFFSET: int @@ -109,18 +122,18 @@ class TNavigator(object): def xcor(self) -> float: ... def ycor(self) -> float: ... @overload - def goto(self, x: Tuple[float, float], y: None = ...) -> None: ... + def goto(self, x: tuple[float, float], y: None = ...) -> None: ... @overload def goto(self, x: float, y: float) -> None: ... def home(self) -> None: ... def setx(self, x: float) -> None: ... def sety(self, y: float) -> None: ... @overload - def distance(self, x: TNavigator | Tuple[float, float], y: None = ...) -> float: ... + def distance(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... @overload def distance(self, x: float, y: float) -> float: ... @overload - def towards(self, x: TNavigator | Tuple[float, float], y: None = ...) -> float: ... + def towards(self, x: TNavigator | tuple[float, float], y: None = ...) -> float: ... @overload def towards(self, x: float, y: float) -> float: ... def heading(self) -> float: ... @@ -136,7 +149,7 @@ class TNavigator(object): setposition = goto seth = setheading -class TPen(object): +class TPen: def __init__(self, resizemode: str = ...) -> None: ... @overload def resizemode(self, rmode: None = ...) -> str: ... @@ -166,7 +179,7 @@ class TPen(object): @overload def fillcolor(self, r: float, g: float, b: float) -> None: ... @overload - def color(self) -> Tuple[_AnyColor, _AnyColor]: ... + def color(self) -> tuple[_AnyColor, _AnyColor]: ... @overload def color(self, color: _Color) -> None: ... @overload @@ -178,7 +191,7 @@ class TPen(object): def isvisible(self) -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload - def pen(self) -> _PenState: ... # type: ignore + def pen(self) -> _PenState: ... # type: ignore[misc] @overload def pen( self, @@ -191,7 +204,7 @@ class TPen(object): pensize: int = ..., speed: int = ..., resizemode: str = ..., - stretchfactor: Tuple[float, float] = ..., + stretchfactor: tuple[float, float] = ..., outline: int = ..., tilt: float = ..., ) -> None: ... @@ -203,9 +216,9 @@ class TPen(object): st = showturtle ht = hideturtle -_T = TypeVar("_T") - class RawTurtle(TPen, TNavigator): + screen: TurtleScreen + screens: ClassVar[list[TurtleScreen]] def __init__( self, canvas: Canvas | TurtleScreen | None = ..., shape: str = ..., undobuffersize: int = ..., visible: bool = ... ) -> None: ... @@ -213,14 +226,14 @@ class RawTurtle(TPen, TNavigator): def setundobuffer(self, size: int | None) -> None: ... def undobufferentries(self) -> int: ... def clear(self) -> None: ... - def clone(self: _T) -> _T: ... + def clone(self: Self) -> Self: ... @overload def shape(self, name: None = ...) -> str: ... @overload def shape(self, name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapesize(self) -> Tuple[float, float, float]: ... # type: ignore + def shapesize(self) -> tuple[float, float, float]: ... # type: ignore[misc] @overload def shapesize( self, stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ... @@ -231,7 +244,7 @@ class RawTurtle(TPen, TNavigator): def shearfactor(self, shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload - def shapetransform(self) -> Tuple[float, float, float, float]: ... # type: ignore + def shapetransform(self) -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( self, t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... @@ -247,18 +260,18 @@ class RawTurtle(TPen, TNavigator): # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. def stamp(self) -> Any: ... - def clearstamp(self, stampid: int | Tuple[int, ...]) -> None: ... + def clearstamp(self, stampid: int | tuple[int, ...]) -> None: ... def clearstamps(self, n: int | None = ...) -> None: ... def filling(self) -> bool: ... def begin_fill(self) -> None: ... def end_fill(self) -> None: ... def dot(self, size: int | None = ..., *color: _Color) -> None: ... - def write(self, arg: object, move: bool = ..., align: str = ..., font: Tuple[str, int, str] = ...) -> None: ... + def write(self, arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... def begin_poly(self) -> None: ... def end_poly(self) -> None: ... def get_poly(self) -> _PolygonCoords | None: ... def getscreen(self) -> TurtleScreen: ... - def getturtle(self: _T) -> _T: ... + def getturtle(self: Self) -> Self: ... getpen = getturtle def onclick(self, fun: Callable[[float, float], Any], btn: int = ..., add: bool | None = ...) -> None: ... def onrelease(self, fun: Callable[[float, float], Any], btn: int = ..., add: bool | None = ...) -> None: ... @@ -346,7 +359,7 @@ def bgpic(picname: None = ...) -> str: ... @overload def bgpic(picname: str) -> None: ... @overload -def screensize(canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> Tuple[int, int]: ... +def screensize(canvwidth: None = ..., canvheight: None = ..., bg: None = ...) -> tuple[int, int]: ... @overload def screensize(canvwidth: int, canvheight: int, bg: _Color | None = ...) -> None: ... @@ -379,18 +392,18 @@ def pos() -> Vec2D: ... def xcor() -> float: ... def ycor() -> float: ... @overload -def goto(x: Tuple[float, float], y: None = ...) -> None: ... +def goto(x: tuple[float, float], y: None = ...) -> None: ... @overload def goto(x: float, y: float) -> None: ... def home() -> None: ... def setx(x: float) -> None: ... def sety(y: float) -> None: ... @overload -def distance(x: TNavigator | Tuple[float, float], y: None = ...) -> float: ... +def distance(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... @overload def distance(x: float, y: float) -> float: ... @overload -def towards(x: TNavigator | Tuple[float, float], y: None = ...) -> float: ... +def towards(x: TNavigator | tuple[float, float], y: None = ...) -> float: ... @overload def towards(x: float, y: float) -> float: ... def heading() -> float: ... @@ -436,7 +449,7 @@ def fillcolor(color: _Color) -> None: ... @overload def fillcolor(r: float, g: float, b: float) -> None: ... @overload -def color() -> Tuple[_AnyColor, _AnyColor]: ... +def color() -> tuple[_AnyColor, _AnyColor]: ... @overload def color(color: _Color) -> None: ... @overload @@ -449,7 +462,7 @@ def isvisible() -> bool: ... # Note: signatures 1 and 2 overlap unsafely when no arguments are provided @overload -def pen() -> _PenState: ... # type: ignore +def pen() -> _PenState: ... # type: ignore[misc] @overload def pen( pen: _PenState | None = ..., @@ -461,7 +474,7 @@ def pen( pensize: int = ..., speed: int = ..., resizemode: str = ..., - stretchfactor: Tuple[float, float] = ..., + stretchfactor: tuple[float, float] = ..., outline: int = ..., tilt: float = ..., ) -> None: ... @@ -485,7 +498,7 @@ def shape(name: str) -> None: ... # Unsafely overlaps when no arguments are provided @overload -def shapesize() -> Tuple[float, float, float]: ... # type: ignore +def shapesize() -> tuple[float, float, float]: ... # type: ignore[misc] @overload def shapesize(stretch_wid: float | None = ..., stretch_len: float | None = ..., outline: float | None = ...) -> None: ... @overload @@ -495,7 +508,7 @@ def shearfactor(shear: float) -> None: ... # Unsafely overlaps when no arguments are provided @overload -def shapetransform() -> Tuple[float, float, float, float]: ... # type: ignore +def shapetransform() -> tuple[float, float, float, float]: ... # type: ignore[misc] @overload def shapetransform( t11: float | None = ..., t12: float | None = ..., t21: float | None = ..., t22: float | None = ... @@ -512,13 +525,13 @@ def tilt(angle: float) -> None: ... # a compound stamp or not. So, as per the "no Union return" policy, # we return Any. def stamp() -> Any: ... -def clearstamp(stampid: int | Tuple[int, ...]) -> None: ... +def clearstamp(stampid: int | tuple[int, ...]) -> None: ... def clearstamps(n: int | None = ...) -> None: ... def filling() -> bool: ... def begin_fill() -> None: ... def end_fill() -> None: ... def dot(size: int | None = ..., *color: _Color) -> None: ... -def write(arg: object, move: bool = ..., align: str = ..., font: Tuple[str, int, str] = ...) -> None: ... +def write(arg: object, move: bool = ..., align: str = ..., font: tuple[str, int, str] = ...) -> None: ... def begin_poly() -> None: ... def end_poly() -> None: ... def get_poly() -> _PolygonCoords | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi index 7cd99a429461..90729323dd7d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/types.pyi @@ -1,4 +1,5 @@ import sys +from _typeshed import SupportsKeysAndGetItem from importlib.abc import _LoaderProtocol from importlib.machinery import ModuleSpec from typing import ( @@ -6,6 +7,7 @@ from typing import ( AsyncGenerator, Awaitable, Callable, + Coroutine, Generator, Generic, ItemsView, @@ -13,17 +15,159 @@ from typing import ( Iterator, KeysView, Mapping, - Tuple, - Type, + MutableSequence, TypeVar, ValuesView, overload, ) -from typing_extensions import Literal, final +from typing_extensions import Literal, ParamSpec, final + +if sys.version_info >= (3, 10): + __all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "CellType", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "BuiltinMethodType", + "WrapperDescriptorType", + "MethodWrapperType", + "MethodDescriptorType", + "ClassMethodDescriptorType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "resolve_bases", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "GenericAlias", + "UnionType", + "EllipsisType", + "NoneType", + "NotImplementedType", + ] +elif sys.version_info >= (3, 9): + __all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "CellType", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "BuiltinMethodType", + "WrapperDescriptorType", + "MethodWrapperType", + "MethodDescriptorType", + "ClassMethodDescriptorType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "resolve_bases", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "GenericAlias", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "CellType", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "BuiltinMethodType", + "WrapperDescriptorType", + "MethodWrapperType", + "MethodDescriptorType", + "ClassMethodDescriptorType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "resolve_bases", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + ] +elif sys.version_info >= (3, 7): + __all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "BuiltinMethodType", + "WrapperDescriptorType", + "MethodWrapperType", + "MethodDescriptorType", + "ClassMethodDescriptorType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "resolve_bases", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + ] +else: + __all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + ] # Note, all classes "defined" here require special handling. -_T = TypeVar("_T") _T1 = TypeVar("_T1") _T2 = TypeVar("_T2") _T_co = TypeVar("_T_co", covariant=True) @@ -34,14 +178,15 @@ _V_co = TypeVar("_V_co", covariant=True) @final class _Cell: - __hash__: None # type: ignore + __hash__: None # type: ignore[assignment] cell_contents: Any +# Make sure this class definition stays roughly in line with `builtins.function` @final class FunctionType: - __closure__: Tuple[_Cell, ...] | None + __closure__: tuple[_Cell, ...] | None __code__: CodeType - __defaults__: Tuple[Any, ...] | None + __defaults__: tuple[Any, ...] | None __dict__: dict[str, Any] __globals__: dict[str, Any] __name__: str @@ -53,18 +198,19 @@ class FunctionType: code: CodeType, globals: dict[str, Any], name: str | None = ..., - argdefs: Tuple[object, ...] | None = ..., - closure: Tuple[_Cell, ...] | None = ..., + argdefs: tuple[object, ...] | None = ..., + closure: tuple[_Cell, ...] | None = ..., ) -> None: ... def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __get__(self, obj: object | None, type: type | None) -> MethodType: ... + @overload + def __get__(self, obj: None, type: type) -> FunctionType: ... + @overload + def __get__(self, obj: object, type: type | None = ...) -> MethodType: ... LambdaType = FunctionType @final class CodeType: - """Create a code object. Not for the faint of heart.""" - co_argcount: int if sys.version_info >= (3, 8): co_posonlyargcount: int @@ -73,15 +219,15 @@ class CodeType: co_stacksize: int co_flags: int co_code: bytes - co_consts: Tuple[Any, ...] - co_names: Tuple[str, ...] - co_varnames: Tuple[str, ...] + co_consts: tuple[Any, ...] + co_names: tuple[str, ...] + co_varnames: tuple[str, ...] co_filename: str co_name: str co_firstlineno: int co_lnotab: bytes - co_freevars: Tuple[str, ...] - co_cellvars: Tuple[str, ...] + co_freevars: tuple[str, ...] + co_cellvars: tuple[str, ...] if sys.version_info >= (3, 8): def __init__( self, @@ -92,15 +238,15 @@ class CodeType: stacksize: int, flags: int, codestring: bytes, - constants: Tuple[Any, ...], - names: Tuple[str, ...], - varnames: Tuple[str, ...], + constants: tuple[Any, ...], + names: tuple[str, ...], + varnames: tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: bytes, - freevars: Tuple[str, ...] = ..., - cellvars: Tuple[str, ...] = ..., + freevars: tuple[str, ...] = ..., + cellvars: tuple[str, ...] = ..., ) -> None: ... else: def __init__( @@ -111,17 +257,40 @@ class CodeType: stacksize: int, flags: int, codestring: bytes, - constants: Tuple[Any, ...], - names: Tuple[str, ...], - varnames: Tuple[str, ...], + constants: tuple[Any, ...], + names: tuple[str, ...], + varnames: tuple[str, ...], filename: str, name: str, firstlineno: int, lnotab: bytes, - freevars: Tuple[str, ...] = ..., - cellvars: Tuple[str, ...] = ..., + freevars: tuple[str, ...] = ..., + cellvars: tuple[str, ...] = ..., ) -> None: ... - if sys.version_info >= (3, 8): + if sys.version_info >= (3, 10): + def replace( + self, + *, + co_argcount: int = ..., + co_posonlyargcount: int = ..., + co_kwonlyargcount: int = ..., + co_nlocals: int = ..., + co_stacksize: int = ..., + co_flags: int = ..., + co_firstlineno: int = ..., + co_code: bytes = ..., + co_consts: tuple[Any, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_linetable: object = ..., + ) -> CodeType: ... + def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... + co_linetable: object + elif sys.version_info >= (3, 8): def replace( self, *, @@ -133,21 +302,23 @@ class CodeType: co_flags: int = ..., co_firstlineno: int = ..., co_code: bytes = ..., - co_consts: Tuple[Any, ...] = ..., - co_names: Tuple[str, ...] = ..., - co_varnames: Tuple[str, ...] = ..., - co_freevars: Tuple[str, ...] = ..., - co_cellvars: Tuple[str, ...] = ..., + co_consts: tuple[Any, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., co_filename: str = ..., co_name: str = ..., co_lnotab: bytes = ..., ) -> CodeType: ... + if sys.version_info >= (3, 11): + def co_positions(self) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... @final class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): - __hash__: None # type: ignore - def __init__(self, mapping: Mapping[_KT, _VT_co]) -> None: ... - def __getitem__(self, k: _KT) -> _VT_co: ... + __hash__: None # type: ignore[assignment] + def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ... + def __getitem__(self, __k: _KT) -> _VT_co: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... def copy(self) -> dict[_KT, _VT_co]: ... @@ -161,20 +332,25 @@ class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT_co | _T2]: ... class SimpleNamespace: - __hash__: None # type: ignore + __hash__: None # type: ignore[assignment] def __init__(self, **kwargs: Any) -> None: ... - def __getattribute__(self, name: str) -> Any: ... - def __setattr__(self, name: str, value: Any) -> None: ... - def __delattr__(self, name: str) -> None: ... + def __getattribute__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... class ModuleType: __name__: str - __file__: str + __file__: str | None __dict__: dict[str, Any] __loader__: _LoaderProtocol | None __package__: str | None + __path__: MutableSequence[str] __spec__: ModuleSpec | None def __init__(self, name: str, doc: str | None = ...) -> None: ... + # __getattr__ doesn't exist at runtime, + # but having it here in typeshed makes dynamic imports + # using `builtins.__import__` or `importlib.import_module` less painful + def __getattr__(self, name: str) -> Any: ... @final class GeneratorType(Generator[_T_co, _T_contra, _V_co]): @@ -182,13 +358,15 @@ class GeneratorType(Generator[_T_co, _T_contra, _V_co]): gi_frame: FrameType gi_running: bool gi_yieldfrom: GeneratorType[_T_co, _T_contra, Any] | None + __name__: str + __qualname__: str def __iter__(self) -> GeneratorType[_T_co, _T_contra, _V_co]: ... def __next__(self) -> _T_co: ... def close(self) -> None: ... def send(self, __arg: _T_contra) -> _T_co: ... @overload def throw( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... ) -> _T_co: ... @overload def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... @@ -199,50 +377,55 @@ class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): ag_frame: FrameType ag_running: bool ag_code: CodeType + __name__: str + __qualname__: str def __aiter__(self) -> AsyncGeneratorType[_T_co, _T_contra]: ... - def __anext__(self) -> Awaitable[_T_co]: ... - def asend(self, __val: _T_contra) -> Awaitable[_T_co]: ... + def __anext__(self) -> Coroutine[Any, Any, _T_co]: ... + def asend(self, __val: _T_contra) -> Coroutine[Any, Any, _T_co]: ... @overload - def athrow( - self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... - ) -> Awaitable[_T_co]: ... + async def athrow( + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... @overload - def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Awaitable[_T_co]: ... - def aclose(self) -> Awaitable[None]: ... + async def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def aclose(self) -> Coroutine[Any, Any, None]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... @final -class CoroutineType: +class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): + __name__: str + __qualname__: str cr_await: Any | None cr_code: CodeType cr_frame: FrameType cr_running: bool def close(self) -> None: ... - def send(self, __arg: Any) -> Any: ... + def __await__(self) -> Generator[Any, None, _V_co]: ... + def send(self, __arg: _T_contra) -> _T_co: ... @overload - def throw(self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ...) -> Any: ... + def throw( + self, __typ: type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... @overload - def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Any: ... + def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... class _StaticFunctionType: - """Fictional type to correct the type of MethodType.__func__. - - FunctionType is a descriptor, so mypy follows the descriptor protocol and - converts MethodType.__func__ back to MethodType (the return type of - FunctionType.__get__). But this is actually a special case; MethodType is - implemented in C and its attribute access doesn't go through - __getattribute__. - - By wrapping FunctionType in _StaticFunctionType, we get the right result; - similar to wrapping a function in staticmethod() at runtime to prevent it - being bound as a method. - """ - + # Fictional type to correct the type of MethodType.__func__. + # FunctionType is a descriptor, so mypy follows the descriptor protocol and + # converts MethodType.__func__ back to MethodType (the return type of + # FunctionType.__get__). But this is actually a special case; MethodType is + # implemented in C and its attribute access doesn't go through + # __getattribute__. + # By wrapping FunctionType in _StaticFunctionType, we get the right result; + # similar to wrapping a function in staticmethod() at runtime to prevent it + # being bound as a method. def __get__(self, obj: object | None, type: type | None) -> FunctionType: ... @final class MethodType: - __closure__: Tuple[_Cell, ...] | None # inherited from the added function - __defaults__: Tuple[Any, ...] | None # inherited from the added function + __closure__: tuple[_Cell, ...] | None # inherited from the added function + __defaults__: tuple[Any, ...] | None # inherited from the added function __func__: _StaticFunctionType __self__: object __name__: str # inherited from the added function @@ -266,7 +449,8 @@ if sys.version_info >= (3, 7): __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __get__(self, obj: Any, type: type = ...) -> Any: ... + def __get__(self, __obj: Any, __type: type = ...) -> Any: ... + @final class MethodWrapperType: __self__: object @@ -274,8 +458,9 @@ if sys.version_info >= (3, 7): __qualname__: str __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, __other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + @final class MethodDescriptorType: __name__: str @@ -283,6 +468,7 @@ if sys.version_info >= (3, 7): __objclass__: type def __call__(self, *args: Any, **kwargs: Any) -> Any: ... def __get__(self, obj: Any, type: type = ...) -> Any: ... + @final class ClassMethodDescriptorType: __name__: str @@ -314,7 +500,10 @@ class FrameType: f_code: CodeType f_globals: dict[str, Any] f_lasti: int - f_lineno: int + # see discussion in #6769: f_lineno *can* sometimes be None, + # but you should probably file a bug report with CPython if you encounter it being None in the wild. + # An `int | None` annotation here causes too many false-positive errors. + f_lineno: int | Any f_locals: dict[str, Any] f_trace: Callable[[FrameType, str, Any], Any] | None if sys.version_info >= (3, 7): @@ -325,18 +514,20 @@ class FrameType: @final class GetSetDescriptorType: __name__: str + __qualname__: str __objclass__: type def __get__(self, __obj: Any, __type: type = ...) -> Any: ... def __set__(self, __instance: Any, __value: Any) -> None: ... - def __delete__(self, obj: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... @final class MemberDescriptorType: __name__: str + __qualname__: str __objclass__: type def __get__(self, __obj: Any, __type: type = ...) -> Any: ... def __set__(self, __instance: Any, __value: Any) -> None: ... - def __delete__(self, obj: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... if sys.version_info >= (3, 7): def new_class( @@ -345,24 +536,33 @@ if sys.version_info >= (3, 7): kwds: dict[str, Any] | None = ..., exec_body: Callable[[dict[str, Any]], None] | None = ..., ) -> type: ... - def resolve_bases(bases: Iterable[object]) -> Tuple[Any, ...]: ... + def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... else: def new_class( name: str, - bases: Tuple[type, ...] = ..., + bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = ..., exec_body: Callable[[dict[str, Any]], None] | None = ..., ) -> type: ... def prepare_class( - name: str, bases: Tuple[type, ...] = ..., kwds: dict[str, Any] | None = ... -) -> Tuple[type, dict[str, Any], dict[str, Any]]: ... + name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = ... +) -> tuple[type, dict[str, Any], dict[str, Any]]: ... # Actually a different type, but `property` is special and we want that too. DynamicClassAttribute = property -def coroutine(func: Callable[..., Any]) -> CoroutineType: ... +_Fn = TypeVar("_Fn", bound=Callable[..., object]) +_R = TypeVar("_R") +_P = ParamSpec("_P") + +# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable +# The type: ignore is due to overlapping overloads, not the use of ParamSpec +@overload +def coroutine(func: Callable[_P, Generator[_R, Any, Any]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +@overload +def coroutine(func: _Fn) -> _Fn: ... if sys.version_info >= (3, 8): CellType = _Cell @@ -370,8 +570,8 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 9): class GenericAlias: __origin__: type - __args__: Tuple[Any, ...] - __parameters__: Tuple[Any, ...] + __args__: tuple[Any, ...] + __parameters__: tuple[Any, ...] def __init__(self, origin: type, args: Any) -> None: ... def __getattr__(self, name: str) -> Any: ... # incomplete @@ -385,6 +585,6 @@ if sys.version_info >= (3, 10): NotImplementedType = _NotImplementedType # noqa F811 from builtins @final class UnionType: - __args__: Tuple[Any, ...] - def __or__(self, obj: Any) -> UnionType: ... - def __ror__(self, obj: Any) -> UnionType: ... + __args__: tuple[Any, ...] + def __or__(self, __obj: Any) -> UnionType: ... + def __ror__(self, __obj: Any) -> UnionType: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi index b87788667c37..f941a55042b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/typing.pyi @@ -1,8 +1,9 @@ import collections # Needed by aliases like DefaultDict, see mypy issue 2986 import sys +from _typeshed import Self as TypeshedSelf, SupportsKeysAndGetItem from abc import ABCMeta, abstractmethod from types import BuiltinFunctionType, CodeType, FrameType, FunctionType, MethodType, ModuleType, TracebackType -from typing_extensions import Literal as _Literal, ParamSpec as _ParamSpec +from typing_extensions import Literal as _Literal, ParamSpec as _ParamSpec, final as _final if sys.version_info >= (3, 7): from types import MethodDescriptorType, MethodWrapperType, WrapperDescriptorType @@ -10,30 +11,485 @@ if sys.version_info >= (3, 7): if sys.version_info >= (3, 9): from types import GenericAlias -# Definitions of special type checking related constructs. Their definitions -# are not used, so their value does not matter. +if sys.version_info >= (3, 11): + __all__ = [ + "Annotated", + "Any", + "Callable", + "ClassVar", + "Concatenate", + "Final", + "ForwardRef", + "Generic", + "Literal", + "Optional", + "ParamSpec", + "Protocol", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "Collection", + "AsyncGenerator", + "AsyncContextManager", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "ChainMap", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "OrderedDict", + "Set", + "FrozenSet", + "NamedTuple", + "TypedDict", + "Generator", + "BinaryIO", + "IO", + "Match", + "Pattern", + "TextIO", + "AnyStr", + "assert_never", + "cast", + "final", + "get_args", + "get_origin", + "get_type_hints", + "is_typeddict", + "Never", + "NewType", + "no_type_check", + "no_type_check_decorator", + "NoReturn", + "overload", + "ParamSpecArgs", + "ParamSpecKwargs", + "reveal_type", + "runtime_checkable", + "Self", + "Text", + "TYPE_CHECKING", + "TypeAlias", + "TypeGuard", + ] +elif sys.version_info >= (3, 10): + __all__ = [ + "Annotated", + "Any", + "Callable", + "ClassVar", + "Concatenate", + "Final", + "ForwardRef", + "Generic", + "Literal", + "Optional", + "ParamSpec", + "Protocol", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "Collection", + "AsyncGenerator", + "AsyncContextManager", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "ChainMap", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "OrderedDict", + "Set", + "FrozenSet", + "NamedTuple", + "TypedDict", + "Generator", + "BinaryIO", + "IO", + "Match", + "Pattern", + "TextIO", + "AnyStr", + "cast", + "final", + "get_args", + "get_origin", + "get_type_hints", + "is_typeddict", + "NewType", + "no_type_check", + "no_type_check_decorator", + "NoReturn", + "overload", + "ParamSpecArgs", + "ParamSpecKwargs", + "runtime_checkable", + "Text", + "TYPE_CHECKING", + "TypeAlias", + "TypeGuard", + ] +elif sys.version_info >= (3, 9): + __all__ = [ + "Annotated", + "Any", + "Callable", + "ClassVar", + "Final", + "ForwardRef", + "Generic", + "Literal", + "Optional", + "Protocol", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "Collection", + "AsyncGenerator", + "AsyncContextManager", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "ChainMap", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "OrderedDict", + "Set", + "FrozenSet", + "NamedTuple", + "TypedDict", + "Generator", + "BinaryIO", + "IO", + "Match", + "Pattern", + "TextIO", + "AnyStr", + "cast", + "final", + "get_args", + "get_origin", + "get_type_hints", + "NewType", + "no_type_check", + "no_type_check_decorator", + "NoReturn", + "overload", + "runtime_checkable", + "Text", + "TYPE_CHECKING", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "Any", + "Callable", + "ClassVar", + "Final", + "ForwardRef", + "Generic", + "Literal", + "Optional", + "Protocol", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "Collection", + "AsyncGenerator", + "AsyncContextManager", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + "ChainMap", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "OrderedDict", + "Set", + "FrozenSet", + "NamedTuple", + "TypedDict", + "Generator", + "AnyStr", + "cast", + "final", + "get_args", + "get_origin", + "get_type_hints", + "NewType", + "no_type_check", + "no_type_check_decorator", + "NoReturn", + "overload", + "runtime_checkable", + "Text", + "TYPE_CHECKING", + ] +elif sys.version_info >= (3, 7): + __all__ = [ + "Any", + "Callable", + "ClassVar", + "ForwardRef", + "Generic", + "Optional", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "Collection", + "AsyncGenerator", + "AsyncContextManager", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "SupportsRound", + "ChainMap", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "OrderedDict", + "Set", + "FrozenSet", + "NamedTuple", + "Generator", + "AnyStr", + "cast", + "get_type_hints", + "NewType", + "no_type_check", + "no_type_check_decorator", + "NoReturn", + "overload", + "Text", + "TYPE_CHECKING", + ] +else: + __all__ = [ + "Any", + "Callable", + "ClassVar", + "Generic", + "Optional", + "Tuple", + "Type", + "TypeVar", + "Union", + "AbstractSet", + "GenericMeta", + "ByteString", + "Container", + "ContextManager", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Sequence", + "Sized", + "ValuesView", + "Reversible", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "SupportsRound", + "Counter", + "Deque", + "Dict", + "DefaultDict", + "List", + "Set", + "FrozenSet", + "NamedTuple", + "Generator", + "AnyStr", + "cast", + "get_type_hints", + "NewType", + "no_type_check", + "no_type_check_decorator", + "overload", + "Text", + "TYPE_CHECKING", + ] Any = object() +@_final class TypeVar: __name__: str - __bound__: Type[Any] | None - __constraints__: Tuple[Type[Any], ...] + __bound__: Any | None + __constraints__: tuple[Any, ...] __covariant__: bool __contravariant__: bool def __init__( - self, - name: str, - *constraints: Type[Any], - bound: None | Type[Any] | str = ..., - covariant: bool = ..., - contravariant: bool = ..., + self, name: str, *constraints: Any, bound: Any | None = ..., covariant: bool = ..., contravariant: bool = ... ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... +# Used for an undocumented mypy feature. Does not exist at runtime. _promote = object() +# N.B. Keep this definition in sync with typing_extensions._SpecialForm +@_final class _SpecialForm: def __getitem__(self, typeargs: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... _F = TypeVar("_F", bound=Callable[..., Any]) _P = _ParamSpec("_P") @@ -41,22 +497,34 @@ _T = TypeVar("_T") def overload(func: _F) -> _F: ... +# Unlike the vast majority module-level objects in stub files, +# these `_SpecialForm` objects in typing need the default value `= ...`, +# due to the fact that they are used elswhere in the same file. +# Otherwise, flake8 erroneously flags them as undefined. +# `_SpecialForm` objects in typing.py that are not used elswhere in the same file +# do not need the default value assignment. Union: _SpecialForm = ... -Optional: _SpecialForm = ... -Tuple: _SpecialForm = ... Generic: _SpecialForm = ... # Protocol is only present in 3.8 and later, but mypy needs it unconditionally Protocol: _SpecialForm = ... Callable: _SpecialForm = ... Type: _SpecialForm = ... -ClassVar: _SpecialForm = ... +NoReturn: _SpecialForm = ... + +Optional: _SpecialForm +Tuple: _SpecialForm +ClassVar: _SpecialForm if sys.version_info >= (3, 8): - Final: _SpecialForm = ... + Final: _SpecialForm def final(f: _T) -> _T: ... - Literal: _SpecialForm = ... + Literal: _SpecialForm # TypedDict is a (non-subscriptable) special form. TypedDict: object +if sys.version_info >= (3, 11): + Self: _SpecialForm + Never: _SpecialForm = ... + if sys.version_info < (3, 7): class GenericMeta(type): ... @@ -64,29 +532,36 @@ if sys.version_info >= (3, 10): class ParamSpecArgs: __origin__: ParamSpec def __init__(self, origin: ParamSpec) -> None: ... + class ParamSpecKwargs: __origin__: ParamSpec def __init__(self, origin: ParamSpec) -> None: ... + class ParamSpec: __name__: str - __bound__: Type[Any] | None + __bound__: Any | None __covariant__: bool __contravariant__: bool - def __init__( - self, name: str, *, bound: None | Type[Any] | str = ..., contravariant: bool = ..., covariant: bool = ... - ) -> None: ... + def __init__(self, name: str, *, bound: Any | None = ..., contravariant: bool = ..., covariant: bool = ...) -> None: ... @property def args(self) -> ParamSpecArgs: ... @property def kwargs(self) -> ParamSpecKwargs: ... - Concatenate: _SpecialForm = ... - TypeAlias: _SpecialForm = ... - TypeGuard: _SpecialForm = ... + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + + class NewType: + def __init__(self, name: str, tp: type) -> None: ... + def __call__(self, x: _T) -> _T: ... + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + __supertype__: type -# Return type that indicates a function does not return. -# This type is equivalent to the None type, but the no-op Union is necessary to -# distinguish the None type from the None value. -NoReturn = Union[None] +else: + def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... # These type variables are used by the container types. _S = TypeVar("_S") @@ -100,7 +575,7 @@ _T_contra = TypeVar("_T_contra", contravariant=True) # Ditto contravariant. _TC = TypeVar("_TC", bound=Type[object]) def no_type_check(arg: _F) -> _F: ... -def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # type: ignore +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # type: ignore[misc] # Type aliases and type constructors @@ -121,10 +596,14 @@ if sys.version_info >= (3, 7): OrderedDict = _Alias() if sys.version_info >= (3, 9): - Annotated: _SpecialForm = ... + Annotated: _SpecialForm # Predefined type variables. -AnyStr = TypeVar("AnyStr", str, bytes) +AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 + +# Technically in 3.7 this inherited from GenericMeta. But let's not reflect that, since +# type checkers tend to assume that Protocols all have the ABCMeta metaclass. +class _ProtocolMeta(ABCMeta): ... # Abstract base classes. @@ -167,7 +646,7 @@ class SupportsRound(Protocol[_T_co]): def __round__(self) -> int: ... @overload @abstractmethod - def __round__(self, ndigits: int) -> _T_co: ... + def __round__(self, __ndigits: int) -> _T_co: ... @runtime_checkable class Sized(Protocol, metaclass=ABCMeta): @@ -250,7 +729,7 @@ class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): @abstractmethod def close(self) -> None: ... -# NOTE: This type does not exist in typing.py or PEP 484. +# NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. # The parameters correspond to Generator, but the 4th is the original type. class AwaitableGenerator( Awaitable[_V_co], Generator[_T_co, _T_contra, _V_co], Generic[_T_co, _T_contra, _V_co, _S], metaclass=ABCMeta @@ -264,24 +743,24 @@ class AsyncIterable(Protocol[_T_co]): @runtime_checkable class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): @abstractmethod - def __anext__(self) -> Awaitable[_T_co]: ... + async def __anext__(self) -> _T_co: ... def __aiter__(self) -> AsyncIterator[_T_co]: ... class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): @abstractmethod - def __anext__(self) -> Awaitable[_T_co]: ... + async def __anext__(self) -> _T_co: ... @abstractmethod - def asend(self, __value: _T_contra) -> Awaitable[_T_co]: ... + async def asend(self, __value: _T_contra) -> _T_co: ... @overload @abstractmethod - def athrow( + async def athrow( self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... - ) -> Awaitable[_T_co]: ... + ) -> _T_co: ... @overload @abstractmethod - def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Awaitable[_T_co]: ... + async def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... @abstractmethod - def aclose(self) -> Awaitable[None]: ... + async def aclose(self) -> None: ... @abstractmethod def __aiter__(self) -> AsyncGenerator[_T_co, _T_contra]: ... @property @@ -304,9 +783,7 @@ class Collection(Iterable[_T_co], Container[_T_co], Protocol[_T_co]): @abstractmethod def __len__(self) -> int: ... -_Collection = Collection[_T_co] - -class Sequence(_Collection[_T_co], Reversible[_T_co], Generic[_T_co]): +class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): @overload @abstractmethod def __getitem__(self, i: int) -> _T_co: ... @@ -348,11 +825,12 @@ class MutableSequence(Sequence[_T], Generic[_T]): def reverse(self) -> None: ... def pop(self, index: int = ...) -> _T: ... def remove(self, value: _T) -> None: ... - def __iadd__(self, x: Iterable[_T]) -> MutableSequence[_T]: ... + def __iadd__(self: TypeshedSelf, x: Iterable[_T]) -> TypeshedSelf: ... -class AbstractSet(_Collection[_T_co], Generic[_T_co]): +class AbstractSet(Collection[_T_co], Generic[_T_co]): @abstractmethod def __contains__(self, x: object) -> bool: ... + def _hash(self) -> int: ... # Mixin methods def __le__(self, s: AbstractSet[Any]) -> bool: ... def __lt__(self, s: AbstractSet[Any]) -> bool: ... @@ -373,44 +851,46 @@ class MutableSet(AbstractSet[_T], Generic[_T]): def clear(self) -> None: ... def pop(self) -> _T: ... def remove(self, value: _T) -> None: ... - def __ior__(self, s: AbstractSet[_S]) -> MutableSet[_T | _S]: ... - def __iand__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... - def __ixor__(self, s: AbstractSet[_S]) -> MutableSet[_T | _S]: ... - def __isub__(self, s: AbstractSet[Any]) -> MutableSet[_T]: ... + def __ior__(self: TypeshedSelf, s: AbstractSet[_T]) -> TypeshedSelf: ... # type: ignore[override,misc] + def __iand__(self: TypeshedSelf, s: AbstractSet[Any]) -> TypeshedSelf: ... + def __ixor__(self: TypeshedSelf, s: AbstractSet[_T]) -> TypeshedSelf: ... # type: ignore[override,misc] + def __isub__(self: TypeshedSelf, s: AbstractSet[Any]) -> TypeshedSelf: ... class MappingView(Sized): def __init__(self, mapping: Mapping[Any, Any]) -> None: ... # undocumented def __len__(self) -> int: ... -class ItemsView(MappingView, AbstractSet[Tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): +class ItemsView(MappingView, AbstractSet[tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): def __init__(self, mapping: Mapping[_KT_co, _VT_co]) -> None: ... # undocumented - def __and__(self, o: Iterable[Any]) -> Set[Tuple[_KT_co, _VT_co]]: ... - def __rand__(self, o: Iterable[_T]) -> Set[_T]: ... + def __and__(self, o: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rand__(self, o: Iterable[_T]) -> set[_T]: ... def __contains__(self, o: object) -> bool: ... - def __iter__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... if sys.version_info >= (3, 8): - def __reversed__(self) -> Iterator[Tuple[_KT_co, _VT_co]]: ... - def __or__(self, o: Iterable[_T]) -> Set[Tuple[_KT_co, _VT_co] | _T]: ... - def __ror__(self, o: Iterable[_T]) -> Set[Tuple[_KT_co, _VT_co] | _T]: ... - def __sub__(self, o: Iterable[Any]) -> Set[Tuple[_KT_co, _VT_co]]: ... - def __rsub__(self, o: Iterable[_T]) -> Set[_T]: ... - def __xor__(self, o: Iterable[_T]) -> Set[Tuple[_KT_co, _VT_co] | _T]: ... - def __rxor__(self, o: Iterable[_T]) -> Set[Tuple[_KT_co, _VT_co] | _T]: ... + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + + def __or__(self, o: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __ror__(self, o: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __sub__(self, o: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rsub__(self, o: Iterable[_T]) -> set[_T]: ... + def __xor__(self, o: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __rxor__(self, o: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): def __init__(self, mapping: Mapping[_KT_co, Any]) -> None: ... # undocumented - def __and__(self, o: Iterable[Any]) -> Set[_KT_co]: ... - def __rand__(self, o: Iterable[_T]) -> Set[_T]: ... + def __and__(self, o: Iterable[Any]) -> set[_KT_co]: ... + def __rand__(self, o: Iterable[_T]) -> set[_T]: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT_co]: ... if sys.version_info >= (3, 8): def __reversed__(self) -> Iterator[_KT_co]: ... - def __or__(self, o: Iterable[_T]) -> Set[_KT_co | _T]: ... - def __ror__(self, o: Iterable[_T]) -> Set[_KT_co | _T]: ... - def __sub__(self, o: Iterable[Any]) -> Set[_KT_co]: ... - def __rsub__(self, o: Iterable[_T]) -> Set[_T]: ... - def __xor__(self, o: Iterable[_T]) -> Set[_KT_co | _T]: ... - def __rxor__(self, o: Iterable[_T]) -> Set[_KT_co | _T]: ... + + def __or__(self, o: Iterable[_T]) -> set[_KT_co | _T]: ... + def __ror__(self, o: Iterable[_T]) -> set[_KT_co | _T]: ... + def __sub__(self, o: Iterable[Any]) -> set[_KT_co]: ... + def __rsub__(self, o: Iterable[_T]) -> set[_T]: ... + def __xor__(self, o: Iterable[_T]) -> set[_KT_co | _T]: ... + def __rxor__(self, o: Iterable[_T]) -> set[_KT_co | _T]: ... class ValuesView(MappingView, Iterable[_VT_co], Generic[_VT_co]): def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented @@ -428,59 +908,75 @@ class ContextManager(Protocol[_T_co]): @runtime_checkable class AsyncContextManager(Protocol[_T_co]): - def __aenter__(self) -> Awaitable[_T_co]: ... - def __aexit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None - ) -> Awaitable[bool | None]: ... + async def __aenter__(self) -> _T_co: ... + async def __aexit__( + self, __exc_type: Type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool | None: ... -class Mapping(_Collection[_KT], Generic[_KT, _VT_co]): +class Mapping(Collection[_KT], Generic[_KT, _VT_co]): # TODO: We wish the key type could also be covariant, but that doesn't work, # see discussion in https://github.com/python/typing/pull/273. @abstractmethod - def __getitem__(self, k: _KT) -> _VT_co: ... + def __getitem__(self, __k: _KT) -> _VT_co: ... # Mixin methods @overload - def get(self, key: _KT) -> _VT_co | None: ... + def get(self, __key: _KT) -> _VT_co | None: ... @overload - def get(self, key: _KT, default: _VT_co | _T) -> _VT_co | _T: ... + def get(self, __key: _KT, default: _VT_co | _T) -> _VT_co | _T: ... def items(self) -> ItemsView[_KT, _VT_co]: ... def keys(self) -> KeysView[_KT]: ... def values(self) -> ValuesView[_VT_co]: ... - def __contains__(self, o: object) -> bool: ... + def __contains__(self, __o: object) -> bool: ... class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): @abstractmethod - def __setitem__(self, k: _KT, v: _VT) -> None: ... + def __setitem__(self, __k: _KT, __v: _VT) -> None: ... @abstractmethod - def __delitem__(self, v: _KT) -> None: ... + def __delitem__(self, __v: _KT) -> None: ... def clear(self) -> None: ... @overload - def pop(self, key: _KT) -> _VT: ... + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... + def popitem(self) -> tuple[_KT, _VT]: ... + # This overload should be allowed only if the value type is compatible with None. + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. @overload - def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... - def popitem(self) -> Tuple[_KT, _VT]: ... - def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT) -> _T | None: ... + @overload + def setdefault(self, __key: _KT, __default: _VT) -> _VT: ... # 'update' used to take a Union, but using overloading is better. # The second overloaded type here is a bit too general, because - # Mapping[Tuple[_KT, _VT], W] is a subclass of Iterable[Tuple[_KT, _VT]], + # Mapping[tuple[_KT, _VT], W] is a subclass of Iterable[tuple[_KT, _VT]], # but will always have the behavior of the first overloaded type - # at runtime, leading to keys of a mix of types _KT and Tuple[_KT, _VT]. + # at runtime, leading to keys of a mix of types _KT and tuple[_KT, _VT]. # We don't currently have any way of forcing all Mappings to use # the first overload, but by using overloading rather than a Union, # mypy will commit to using the first overload when the argument is # known to be a Mapping with unknown type parameters, which is closer # to the behavior we want. See mypy issue #1430. + # + # Various mapping classes have __ior__ methods that should be kept roughly in line with .update(): + # -- dict.__ior__ + # -- os._Environ.__ior__ + # -- collections.UserDict.__ior__ + # -- collections.ChainMap.__ior__ + # -- weakref.WeakValueDictionary.__ior__ + # -- weakref.WeakKeyDictionary.__ior__ @overload - def update(self, __m: Mapping[_KT, _VT], **kwargs: _VT) -> None: ... + def update(self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... @overload - def update(self, __m: Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + def update(self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... @overload def update(self, **kwargs: _VT) -> None: ... Text = str -TYPE_CHECKING = True +TYPE_CHECKING: bool +# In stubs, the arguments of the IO class are marked as positional-only. +# This differs from runtime, but better reflects the fact that in reality +# classes deriving from IO use different names for the arguments. class IO(Iterator[AnyStr], Generic[AnyStr]): # TODO use abstract properties @property @@ -498,27 +994,27 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): @abstractmethod def isatty(self) -> bool: ... @abstractmethod - def read(self, n: int = ...) -> AnyStr: ... + def read(self, __n: int = ...) -> AnyStr: ... @abstractmethod def readable(self) -> bool: ... @abstractmethod - def readline(self, limit: int = ...) -> AnyStr: ... + def readline(self, __limit: int = ...) -> AnyStr: ... @abstractmethod - def readlines(self, hint: int = ...) -> list[AnyStr]: ... + def readlines(self, __hint: int = ...) -> list[AnyStr]: ... @abstractmethod - def seek(self, offset: int, whence: int = ...) -> int: ... + def seek(self, __offset: int, __whence: int = ...) -> int: ... @abstractmethod def seekable(self) -> bool: ... @abstractmethod def tell(self) -> int: ... @abstractmethod - def truncate(self, size: int | None = ...) -> int: ... + def truncate(self, __size: int | None = ...) -> int: ... @abstractmethod def writable(self) -> bool: ... @abstractmethod - def write(self, s: AnyStr) -> int: ... + def write(self, __s: AnyStr) -> int: ... @abstractmethod - def writelines(self, lines: Iterable[AnyStr]) -> None: ... + def writelines(self, __lines: Iterable[AnyStr]) -> None: ... @abstractmethod def __next__(self) -> AnyStr: ... @abstractmethod @@ -527,7 +1023,7 @@ class IO(Iterator[AnyStr], Generic[AnyStr]): def __enter__(self) -> IO[AnyStr]: ... @abstractmethod def __exit__( - self, t: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, __t: Type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None ) -> bool | None: ... class BinaryIO(IO[bytes]): @@ -551,11 +1047,12 @@ class TextIO(IO[str]): class ByteString(Sequence[int], metaclass=ABCMeta): ... +@_final class Match(Generic[AnyStr]): pos: int endpos: int lastindex: int | None - lastgroup: AnyStr | None + lastgroup: str | None string: AnyStr # The regular expression object whose match() or search() method produced @@ -568,13 +1065,13 @@ class Match(Generic[AnyStr]): @overload def group(self, __group: str | int) -> AnyStr | Any: ... @overload - def group(self, __group1: str | int, __group2: str | int, *groups: str | int) -> Tuple[AnyStr | Any, ...]: ... + def group(self, __group1: str | int, __group2: str | int, *groups: str | int) -> tuple[AnyStr | Any, ...]: ... # Each item of groups()'s return tuple is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload - def groups(self) -> Tuple[AnyStr | Any, ...]: ... + def groups(self) -> tuple[AnyStr | Any, ...]: ... @overload - def groups(self, default: _T) -> Tuple[AnyStr | _T, ...]: ... + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... # Each value in groupdict()'s return dict is either "AnyStr" or # "AnyStr | None", depending on the pattern. @overload @@ -583,17 +1080,20 @@ class Match(Generic[AnyStr]): def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... def start(self, __group: int | str = ...) -> int: ... def end(self, __group: int | str = ...) -> int: ... - def span(self, __group: int | str = ...) -> Tuple[int, int]: ... + def span(self, __group: int | str = ...) -> tuple[int, int]: ... @property - def regs(self) -> Tuple[Tuple[int, int], ...]: ... # undocumented + def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. @overload def __getitem__(self, __key: _Literal[0]) -> AnyStr: ... @overload def __getitem__(self, __key: int | str) -> AnyStr | Any: ... + def __copy__(self) -> Match[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Match[AnyStr]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... +@_final class Pattern(Generic[AnyStr]): flags: int groupindex: Mapping[str, int] @@ -602,7 +1102,7 @@ class Pattern(Generic[AnyStr]): def search(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr] | None: ... def match(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr] | None: ... def fullmatch(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Match[AnyStr] | None: ... - def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr]: ... + def split(self, string: AnyStr, maxsplit: int = ...) -> list[AnyStr | Any]: ... def findall(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> list[Any]: ... def finditer(self, string: AnyStr, pos: int = ..., endpos: int = ...) -> Iterator[Match[AnyStr]]: ... @overload @@ -610,9 +1110,11 @@ class Pattern(Generic[AnyStr]): @overload def sub(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> AnyStr: ... @overload - def subn(self, repl: AnyStr, string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... + def subn(self, repl: AnyStr, string: AnyStr, count: int = ...) -> tuple[AnyStr, int]: ... @overload - def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> Tuple[AnyStr, int]: ... + def subn(self, repl: Callable[[Match[AnyStr]], AnyStr], string: AnyStr, count: int = ...) -> tuple[AnyStr, int]: ... + def __copy__(self) -> Pattern[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -650,7 +1152,7 @@ else: if sys.version_info >= (3, 8): def get_origin(tp: Any) -> Any | None: ... - def get_args(tp: Any) -> Tuple[Any, ...]: ... + def get_args(tp: Any) -> tuple[Any, ...]: ... @overload def cast(typ: Type[_T], val: Any) -> _T: ... @@ -659,26 +1161,36 @@ def cast(typ: str, val: Any) -> Any: ... @overload def cast(typ: object, val: Any) -> Any: ... +if sys.version_info >= (3, 11): + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: Never) -> Never: ... + # Type constructors -# NamedTuple is special-cased in the type checker -class NamedTuple(Tuple[Any, ...]): - _field_types: collections.OrderedDict[str, Type[Any]] +class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: collections.OrderedDict[str, type] + elif sys.version_info < (3, 9): + _field_types: dict[str, type] _field_defaults: dict[str, Any] - _fields: Tuple[str, ...] + _fields: tuple[str, ...] _source: str - def __init__(self, typename: str, fields: Iterable[Tuple[str, Any]] = ..., **kwargs: Any) -> None: ... + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... @classmethod def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... if sys.version_info >= (3, 8): def _asdict(self) -> dict[str, Any]: ... else: def _asdict(self) -> collections.OrderedDict[str, Any]: ... - def _replace(self: _T, **kwargs: Any) -> _T: ... + + def _replace(self: TypeshedSelf, **kwargs: Any) -> TypeshedSelf: ... # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=ABCMeta): - def copy(self: _T) -> _T: ... + def copy(self: TypeshedSelf) -> TypeshedSelf: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... @@ -689,15 +1201,14 @@ class _TypedDict(Mapping[str, object], metaclass=ABCMeta): def items(self) -> ItemsView[str, object]: ... def keys(self) -> KeysView[str]: ... def values(self) -> ValuesView[object]: ... - def __or__(self: _T, __value: _T) -> _T: ... - def __ior__(self: _T, __value: _T) -> _T: ... - -def NewType(name: str, tp: Type[_T]) -> Type[_T]: ... + def __or__(self: TypeshedSelf, __value: TypeshedSelf) -> TypeshedSelf: ... + def __ior__(self: TypeshedSelf, __value: TypeshedSelf) -> TypeshedSelf: ... # This itself is only available during type checking def type_check_only(func_or_cls: _F) -> _F: ... if sys.version_info >= (3, 7): + @_final class ForwardRef: __forward_arg__: str __forward_code__: CodeType @@ -705,14 +1216,17 @@ if sys.version_info >= (3, 7): __forward_value__: Any | None __forward_is_argument__: bool if sys.version_info >= (3, 9): - # The module argument was added in Python 3.9.7. - def __init__(self, arg: str, is_argument: bool = ..., module: Any | None = ...) -> None: ... + # The module and is_class arguments were added in later Python 3.9 versions. + def __init__(self, arg: str, is_argument: bool = ..., module: Any | None = ..., *, is_class: bool = ...) -> None: ... else: def __init__(self, arg: str, is_argument: bool = ...) -> None: ... + def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... - def __eq__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... def __hash__(self) -> int: ... - def __repr__(self) -> str: ... + if sys.version_info >= (3, 11): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... if sys.version_info >= (3, 10): - def is_typeddict(tp: Any) -> bool: ... + def is_typeddict(tp: object) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi index 23829d54bfac..8d3852fce174 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/typing_extensions.pyi @@ -1,6 +1,7 @@ import abc import sys -from typing import ( +from _typeshed import Self as TypeshedSelf # see #6932 for why the alias cannot have a leading underscore +from typing import ( # noqa Y022 TYPE_CHECKING as TYPE_CHECKING, Any, AsyncContextManager as AsyncContextManager, @@ -22,7 +23,6 @@ from typing import ( NewType as NewType, NoReturn as NoReturn, Text as Text, - Tuple, Type as Type, TypeVar, ValuesView, @@ -34,25 +34,38 @@ _T = TypeVar("_T") _F = TypeVar("_F", bound=Callable[..., Any]) _TC = TypeVar("_TC", bound=Type[object]) +# unfortunately we have to duplicate this class definition from typing.pyi or we break pytype class _SpecialForm: - def __getitem__(self, typeargs: Any) -> Any: ... + def __getitem__(self, typeargs: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +# Do not import (and re-export) Protocol or runtime_checkable from +# typing module because type checkers need to be able to distinguish +# typing.Protocol and typing_extensions.Protocol so they can properly +# warn users about potential runtime exceptions when using typing.Protocol +# on older versions of Python. +Protocol: _SpecialForm = ... def runtime_checkable(cls: _TC) -> _TC: ... # This alias for above is kept here for backwards compatibility. runtime = runtime_checkable -Protocol: _SpecialForm = ... -Final: _SpecialForm = ... +Final: _SpecialForm def final(f: _F) -> _F: ... -Literal: _SpecialForm = ... +Literal: _SpecialForm def IntVar(name: str) -> Any: ... # returns a new TypeVar # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): - def copy(self: _T) -> _T: ... + __required_keys__: frozenset[str] + __optional_keys__: frozenset[str] + __total__: bool + def copy(self: TypeshedSelf) -> TypeshedSelf: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... @@ -65,62 +78,93 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def __delitem__(self, k: NoReturn) -> None: ... # TypedDict is a (non-subscriptable) special form. -TypedDict: object = ... +TypedDict: object OrderedDict = _Alias() -def get_type_hints( - obj: Callable[..., Any], - globalns: dict[str, Any] | None = ..., - localns: dict[str, Any] | None = ..., - include_extras: bool = ..., -) -> dict[str, Any]: ... - if sys.version_info >= (3, 7): - def get_args(tp: Any) -> Tuple[Any, ...]: ... + def get_type_hints( + obj: Callable[..., Any], + globalns: dict[str, Any] | None = ..., + localns: dict[str, Any] | None = ..., + include_extras: bool = ..., + ) -> dict[str, Any]: ... + def get_args(tp: Any) -> tuple[Any, ...]: ... def get_origin(tp: Any) -> Any | None: ... -Annotated: _SpecialForm = ... -_AnnotatedAlias: Any = ... # undocumented +Annotated: _SpecialForm +_AnnotatedAlias: Any # undocumented @runtime_checkable class SupportsIndex(Protocol, metaclass=abc.ABCMeta): @abc.abstractmethod def __index__(self) -> int: ... -# PEP 612 support for Python < 3.9 +# New things in 3.10 if sys.version_info >= (3, 10): - from typing import Concatenate as Concatenate, ParamSpec as ParamSpec, TypeAlias as TypeAlias, TypeGuard as TypeGuard + from typing import ( + Concatenate as Concatenate, + ParamSpec as ParamSpec, + TypeAlias as TypeAlias, + TypeGuard as TypeGuard, + is_typeddict as is_typeddict, + ) else: class ParamSpecArgs: __origin__: ParamSpec def __init__(self, origin: ParamSpec) -> None: ... + class ParamSpecKwargs: __origin__: ParamSpec def __init__(self, origin: ParamSpec) -> None: ... + class ParamSpec: __name__: str - __bound__: Type[Any] | None + __bound__: type[Any] | None __covariant__: bool __contravariant__: bool def __init__( - self, name: str, *, bound: None | Type[Any] | str = ..., contravariant: bool = ..., covariant: bool = ... + self, name: str, *, bound: None | type[Any] | str = ..., contravariant: bool = ..., covariant: bool = ... ) -> None: ... @property def args(self) -> ParamSpecArgs: ... @property def kwargs(self) -> ParamSpecKwargs: ... - Concatenate: _SpecialForm = ... - TypeAlias: _SpecialForm = ... - TypeGuard: _SpecialForm = ... - -# PEP 646 -Unpack: _SpecialForm = ... - + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + def is_typeddict(tp: object) -> bool: ... + +# New things in 3.11 +if sys.version_info >= (3, 11): + from typing import Never as Never, Self as Self, assert_never as assert_never, reveal_type as reveal_type +else: + Self: _SpecialForm + Never: _SpecialForm + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: NoReturn) -> NoReturn: ... + +# Experimental (hopefully these will be in 3.11) +Required: _SpecialForm +NotRequired: _SpecialForm +LiteralString: _SpecialForm +Unpack: _SpecialForm + +@final class TypeVarTuple: __name__: str def __init__(self, name: str) -> None: ... + def __iter__(self) -> Any: ... # Unpack[Self] + +def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_descriptors: tuple[type[Any] | Callable[..., Any], ...] = ..., +) -> Callable[[_T], _T]: ... + +# Types not yet implemented in typing_extensions library -# PEP 655 -Required: _SpecialForm = ... -NotRequired: _SpecialForm = ... +# Proposed extension to PEP 647 +StrictTypeGuard: _SpecialForm = ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unicodedata.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unicodedata.pyi index 66c93f7439f7..aec8867df48d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unicodedata.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unicodedata.pyi @@ -1,10 +1,13 @@ import sys from typing import Any, TypeVar +from typing_extensions import final ucd_3_2_0: UCD -ucnhash_CAPI: Any unidata_version: str +if sys.version_info < (3, 10): + ucnhash_CAPI: Any + _T = TypeVar("_T") def bidirectional(__chr: str) -> str: ... @@ -23,8 +26,8 @@ def mirrored(__chr: str) -> int: ... def name(__chr: str, __default: _T = ...) -> str | _T: ... def normalize(__form: str, __unistr: str) -> str: ... def numeric(__chr: str, __default: _T = ...) -> float | _T: ... - -class UCD(object): +@final +class UCD: # The methods below are constructed from the same array in C # (unicodedata_functions) and hence identical to the methods above. unidata_version: str diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/__init__.pyi index 8f0ef896fa0c..4bbf98c992c1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/__init__.pyi @@ -1,5 +1,4 @@ import sys -from unittest.async_case import * from .case import ( FunctionTestCase as FunctionTestCase, @@ -10,15 +9,78 @@ from .case import ( skipIf as skipIf, skipUnless as skipUnless, ) +from .loader import ( + TestLoader as TestLoader, + defaultTestLoader as defaultTestLoader, + findTestCases as findTestCases, + getTestCaseNames as getTestCaseNames, + makeSuite as makeSuite, +) +from .main import TestProgram as TestProgram, main as main +from .result import TestResult as TestResult +from .runner import TextTestResult as TextTestResult, TextTestRunner as TextTestRunner +from .signals import ( + installHandler as installHandler, + registerResult as registerResult, + removeHandler as removeHandler, + removeResult as removeResult, +) +from .suite import BaseTestSuite as BaseTestSuite, TestSuite as TestSuite if sys.version_info >= (3, 8): + from unittest.async_case import * + from .case import addModuleCleanup as addModuleCleanup -from unittest.loader import * -from unittest.main import * -from unittest.result import TestResult as TestResult -from unittest.runner import * -from unittest.signals import * -from unittest.suite import * + __all__ = [ + "TestResult", + "TestCase", + "IsolatedAsyncioTestCase", + "TestSuite", + "TextTestRunner", + "TestLoader", + "FunctionTestCase", + "main", + "defaultTestLoader", + "SkipTest", + "skip", + "skipIf", + "skipUnless", + "expectedFailure", + "TextTestResult", + "installHandler", + "registerResult", + "removeResult", + "removeHandler", + "addModuleCleanup", + "getTestCaseNames", + "makeSuite", + "findTestCases", + ] + +else: + __all__ = [ + "TestResult", + "TestCase", + "TestSuite", + "TextTestRunner", + "TestLoader", + "FunctionTestCase", + "main", + "defaultTestLoader", + "SkipTest", + "skip", + "skipIf", + "skipUnless", + "expectedFailure", + "TextTestResult", + "installHandler", + "registerResult", + "removeResult", + "removeHandler", + "getTestCaseNames", + "makeSuite", + "findTestCases", + ] def load_tests(loader: TestLoader, tests: TestSuite, pattern: str | None) -> TestSuite: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/_log.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/_log.pyi new file mode 100644 index 000000000000..c27de6386038 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/_log.pyi @@ -0,0 +1,28 @@ +import logging +import sys +from types import TracebackType +from typing import ClassVar, Generic, NamedTuple, TypeVar +from unittest.case import TestCase + +_L = TypeVar("_L", None, _LoggingWatcher) + +class _LoggingWatcher(NamedTuple): + records: list[logging.LogRecord] + output: list[str] + +class _AssertLogsContext(Generic[_L]): + LOGGING_FORMAT: ClassVar[str] + test_case: TestCase + logger_name: str + level: int + msg: None + if sys.version_info >= (3, 10): + def __init__(self, test_case: TestCase, logger_name: str, level: int, no_logs: bool) -> None: ... + no_logs: bool + else: + def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... + + def __enter__(self) -> _L: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/async_case.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/async_case.pyi index bdf534b37c9f..0442ec444559 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/async_case.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/async_case.pyi @@ -1,10 +1,11 @@ -import sys -from typing import Any, Awaitable, Callable +from typing import Awaitable, Callable +from typing_extensions import ParamSpec from .case import TestCase -if sys.version_info >= (3, 8): - class IsolatedAsyncioTestCase(TestCase): - async def asyncSetUp(self) -> None: ... - async def asyncTearDown(self) -> None: ... - def addAsyncCleanup(self, __func: Callable[..., Awaitable[Any]], *args: Any, **kwargs: Any) -> None: ... +_P = ParamSpec("_P") + +class IsolatedAsyncioTestCase(TestCase): + async def asyncSetUp(self) -> None: ... + async def asyncTearDown(self) -> None: ... + def addAsyncCleanup(self, __func: Callable[_P, Awaitable[object]], *args: _P.args, **kwargs: _P.kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/case.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/case.pyi index ebb1f2457e68..28b5a07aabf6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/case.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/case.pyi @@ -3,14 +3,15 @@ import logging import sys import unittest.result from _typeshed import Self -from collections.abc import Set +from collections.abc import Set as AbstractSet +from contextlib import AbstractContextManager from types import TracebackType from typing import ( Any, AnyStr, Callable, + ClassVar, Container, - ContextManager, Generic, Iterable, Mapping, @@ -18,11 +19,10 @@ from typing import ( NoReturn, Pattern, Sequence, - Tuple, - Type, TypeVar, overload, ) +from typing_extensions import ParamSpec from warnings import WarningMessage if sys.version_info >= (3, 9): @@ -30,9 +30,39 @@ if sys.version_info >= (3, 9): _E = TypeVar("_E", bound=BaseException) _FT = TypeVar("_FT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +DIFF_OMITTED: str + +class _BaseTestCaseContext: + def __init__(self, test_case: TestCase) -> None: ... + +if sys.version_info >= (3, 9): + from unittest._log import _AssertLogsContext, _LoggingWatcher +else: + # Unused dummy for _AssertLogsContext. Starting with Python 3.10, + # this is generic over the logging watcher, but in lower versions + # the watcher is hard-coded. + _L = TypeVar("_L") + + class _LoggingWatcher(NamedTuple): + records: list[logging.LogRecord] + output: list[str] + + class _AssertLogsContext(_BaseTestCaseContext, Generic[_L]): + LOGGING_FORMAT: ClassVar[str] + test_case: TestCase + logger_name: str + level: int + msg: None + def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... + def __enter__(self) -> _LoggingWatcher: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... if sys.version_info >= (3, 8): - def addModuleCleanup(__function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... + def addModuleCleanup(__function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... def doModuleCleanups() -> None: ... def expectedFailure(test_item: _FT) -> _FT: ... @@ -44,7 +74,7 @@ class SkipTest(Exception): def __init__(self, reason: str) -> None: ... class TestCase: - failureException: Type[BaseException] + failureException: type[BaseException] longMessage: bool maxDiff: int | None # undocumented @@ -52,6 +82,7 @@ class TestCase: # undocumented _testMethodDoc: str def __init__(self, methodName: str = ...) -> None: ... + def __eq__(self, other: object) -> bool: ... def setUp(self) -> None: ... def tearDown(self) -> None: ... @classmethod @@ -61,7 +92,7 @@ class TestCase: def run(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def __call__(self, result: unittest.result.TestResult | None = ...) -> unittest.result.TestResult | None: ... def skipTest(self, reason: Any) -> None: ... - def subTest(self, msg: Any = ..., **params: Any) -> ContextManager[None]: ... + def subTest(self, msg: Any = ..., **params: Any) -> AbstractContextManager[None]: ... def debug(self) -> None: ... def _addSkip(self, result: unittest.result.TestResult, test_case: TestCase, reason: str) -> None: ... def assertEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... @@ -74,61 +105,75 @@ class TestCase: def assertIsNotNone(self, obj: Any, msg: Any = ...) -> None: ... def assertIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... def assertNotIn(self, member: Any, container: Iterable[Any] | Container[Any], msg: Any = ...) -> None: ... - def assertIsInstance(self, obj: Any, cls: type | Tuple[type, ...], msg: Any = ...) -> None: ... - def assertNotIsInstance(self, obj: Any, cls: type | Tuple[type, ...], msg: Any = ...) -> None: ... + def assertIsInstance(self, obj: Any, cls: type | tuple[type, ...], msg: Any = ...) -> None: ... + def assertNotIsInstance(self, obj: Any, cls: type | tuple[type, ...], msg: Any = ...) -> None: ... def assertGreater(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertGreaterEqual(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertLess(self, a: Any, b: Any, msg: Any = ...) -> None: ... def assertLessEqual(self, a: Any, b: Any, msg: Any = ...) -> None: ... + # `assertRaises`, `assertRaisesRegex`, and `assertRaisesRegexp` + # are not using `ParamSpec` intentionally, + # because they might be used with explicitly wrong arg types to raise some error in tests. @overload - def assertRaises( # type: ignore + def assertRaises( # type: ignore[misc] self, - expected_exception: Type[BaseException] | Tuple[Type[BaseException], ...], - callable: Callable[..., Any], + expected_exception: type[BaseException] | tuple[type[BaseException], ...], + callable: Callable[..., object], *args: Any, **kwargs: Any, ) -> None: ... @overload - def assertRaises(self, expected_exception: Type[_E] | Tuple[Type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... + def assertRaises(self, expected_exception: type[_E] | tuple[type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... @overload - def assertRaisesRegex( # type: ignore + def assertRaisesRegex( # type: ignore[misc] self, - expected_exception: Type[BaseException] | Tuple[Type[BaseException], ...], + expected_exception: type[BaseException] | tuple[type[BaseException], ...], expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - callable: Callable[..., Any], + callable: Callable[..., object], *args: Any, **kwargs: Any, ) -> None: ... @overload def assertRaisesRegex( self, - expected_exception: Type[_E] | Tuple[Type[_E], ...], + expected_exception: type[_E] | tuple[type[_E], ...], expected_regex: str | bytes | Pattern[str] | Pattern[bytes], msg: Any = ..., ) -> _AssertRaisesContext[_E]: ... @overload - def assertWarns( # type: ignore - self, expected_warning: Type[Warning] | Tuple[Type[Warning], ...], callable: Callable[..., Any], *args: Any, **kwargs: Any + def assertWarns( # type: ignore[misc] + self, + expected_warning: type[Warning] | tuple[type[Warning], ...], + callable: Callable[_P, object], + *args: _P.args, + **kwargs: _P.kwargs, ) -> None: ... @overload - def assertWarns(self, expected_warning: Type[Warning] | Tuple[Type[Warning], ...], msg: Any = ...) -> _AssertWarnsContext: ... + def assertWarns(self, expected_warning: type[Warning] | tuple[type[Warning], ...], msg: Any = ...) -> _AssertWarnsContext: ... @overload - def assertWarnsRegex( # type: ignore + def assertWarnsRegex( # type: ignore[misc] self, - expected_warning: Type[Warning] | Tuple[Type[Warning], ...], + expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - callable: Callable[..., Any], - *args: Any, - **kwargs: Any, + callable: Callable[_P, object], + *args: _P.args, + **kwargs: _P.kwargs, ) -> None: ... @overload def assertWarnsRegex( self, - expected_warning: Type[Warning] | Tuple[Type[Warning], ...], + expected_warning: type[Warning] | tuple[type[Warning], ...], expected_regex: str | bytes | Pattern[str] | Pattern[bytes], msg: Any = ..., ) -> _AssertWarnsContext: ... - def assertLogs(self, logger: str | logging.Logger | None = ..., level: int | str | None = ...) -> _AssertLogsContext: ... + def assertLogs( + self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + ) -> _AssertLogsContext[_LoggingWatcher]: ... + if sys.version_info >= (3, 10): + def assertNoLogs( + self, logger: str | logging.Logger | None = ..., level: int | str | None = ... + ) -> _AssertLogsContext[None]: ... + @overload def assertAlmostEqual( self, first: float, second: float, places: int | None = ..., msg: Any = ..., delta: float | None = ... @@ -160,14 +205,14 @@ class TestCase: def assertRegex(self, text: AnyStr, expected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertNotRegex(self, text: AnyStr, unexpected_regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... def assertCountEqual(self, first: Iterable[Any], second: Iterable[Any], msg: Any = ...) -> None: ... - def addTypeEqualityFunc(self, typeobj: Type[Any], function: Callable[..., None]) -> None: ... + def addTypeEqualityFunc(self, typeobj: type[Any], function: Callable[..., None]) -> None: ... def assertMultiLineEqual(self, first: str, second: str, msg: Any = ...) -> None: ... def assertSequenceEqual( - self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = ..., seq_type: Type[Sequence[Any]] | None = ... + self, seq1: Sequence[Any], seq2: Sequence[Any], msg: Any = ..., seq_type: type[Sequence[Any]] | None = ... ) -> None: ... def assertListEqual(self, list1: list[Any], list2: list[Any], msg: Any = ...) -> None: ... - def assertTupleEqual(self, tuple1: Tuple[Any, ...], tuple2: Tuple[Any, ...], msg: Any = ...) -> None: ... - def assertSetEqual(self, set1: Set[object], set2: Set[object], msg: Any = ...) -> None: ... + def assertTupleEqual(self, tuple1: tuple[Any, ...], tuple2: tuple[Any, ...], msg: Any = ...) -> None: ... + def assertSetEqual(self, set1: AbstractSet[object], set2: AbstractSet[object], msg: Any = ...) -> None: ... def assertDictEqual(self, d1: Mapping[Any, object], d2: Mapping[Any, object], msg: Any = ...) -> None: ... def fail(self, msg: Any = ...) -> NoReturn: ... def countTestCases(self) -> int: ... @@ -175,60 +220,66 @@ class TestCase: def id(self) -> str: ... def shortDescription(self) -> str | None: ... if sys.version_info >= (3, 8): - def addCleanup(self, __function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... + def addCleanup(self, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... else: - def addCleanup(self, function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... + def addCleanup(self, function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def doCleanups(self) -> None: ... if sys.version_info >= (3, 8): @classmethod - def addClassCleanup(cls, __function: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... + def addClassCleanup(cls, __function: Callable[_P, object], *args: _P.args, **kwargs: _P.kwargs) -> None: ... @classmethod def doClassCleanups(cls) -> None: ... + def _formatMessage(self, msg: str | None, standardMsg: str) -> str: ... # undocumented def _getAssertEqualityFunc(self, first: Any, second: Any) -> Callable[..., None]: ... # undocumented - # below is deprecated - def failUnlessEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def failIfEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def assertNotEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... - def failUnless(self, expr: bool, msg: Any = ...) -> None: ... - def assert_(self, expr: bool, msg: Any = ...) -> None: ... - def failIf(self, expr: bool, msg: Any = ...) -> None: ... - @overload - def failUnlessRaises( # type: ignore - self, - exception: Type[BaseException] | Tuple[Type[BaseException], ...], - callable: Callable[..., Any] = ..., - *args: Any, - **kwargs: Any, - ) -> None: ... - @overload - def failUnlessRaises(self, exception: Type[_E] | Tuple[Type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... - def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... - def assertAlmostEquals(self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ...) -> None: ... - def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... - def assertNotAlmostEquals( - self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ... - ) -> None: ... - def assertRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - def assertNotRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... - @overload - def assertRaisesRegexp( # type: ignore - self, - exception: Type[BaseException] | Tuple[Type[BaseException], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - callable: Callable[..., Any], - *args: Any, - **kwargs: Any, - ) -> None: ... - @overload - def assertRaisesRegexp( - self, - exception: Type[_E] | Tuple[Type[_E], ...], - expected_regex: str | bytes | Pattern[str] | Pattern[bytes], - msg: Any = ..., - ) -> _AssertRaisesContext[_E]: ... - def assertDictContainsSubset(self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = ...) -> None: ... + if sys.version_info < (3, 12): + def failUnlessEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... + def assertEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... + def failIfEqual(self, first: Any, second: Any, msg: Any = ...) -> None: ... + def assertNotEquals(self, first: Any, second: Any, msg: Any = ...) -> None: ... + def failUnless(self, expr: bool, msg: Any = ...) -> None: ... + def assert_(self, expr: bool, msg: Any = ...) -> None: ... + def failIf(self, expr: bool, msg: Any = ...) -> None: ... + @overload + def failUnlessRaises( # type: ignore[misc] + self, + exception: type[BaseException] | tuple[type[BaseException], ...], + callable: Callable[_P, object] = ..., + *args: _P.args, + **kwargs: _P.kwargs, + ) -> None: ... + @overload + def failUnlessRaises(self, exception: type[_E] | tuple[type[_E], ...], msg: Any = ...) -> _AssertRaisesContext[_E]: ... + def failUnlessAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... + def assertAlmostEquals( + self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ... + ) -> None: ... + def failIfAlmostEqual(self, first: float, second: float, places: int = ..., msg: Any = ...) -> None: ... + def assertNotAlmostEquals( + self, first: float, second: float, places: int = ..., msg: Any = ..., delta: float = ... + ) -> None: ... + def assertRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... + def assertNotRegexpMatches(self, text: AnyStr, regex: AnyStr | Pattern[AnyStr], msg: Any = ...) -> None: ... + @overload + def assertRaisesRegexp( # type: ignore[misc] + self, + exception: type[BaseException] | tuple[type[BaseException], ...], + expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + callable: Callable[..., object], + *args: Any, + **kwargs: Any, + ) -> None: ... + @overload + def assertRaisesRegexp( + self, + exception: type[_E] | tuple[type[_E], ...], + expected_regex: str | bytes | Pattern[str] | Pattern[bytes], + msg: Any = ..., + ) -> _AssertRaisesContext[_E]: ... + def assertDictContainsSubset( + self, subset: Mapping[Any, Any], dictionary: Mapping[Any, Any], msg: object = ... + ) -> None: ... class FunctionTestCase(TestCase): def __init__( @@ -240,15 +291,11 @@ class FunctionTestCase(TestCase): ) -> None: ... def runTest(self) -> None: ... -class _LoggingWatcher(NamedTuple): - records: list[logging.LogRecord] - output: list[str] - class _AssertRaisesContext(Generic[_E]): exception: _E def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> bool: ... if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... @@ -260,18 +307,5 @@ class _AssertWarnsContext: warnings: list[WarningMessage] def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... - -class _AssertLogsContext: - LOGGING_FORMAT: str - records: list[logging.LogRecord] - output: list[str] - def __init__(self, test_case: TestCase, logger_name: str, level: int) -> None: ... - if sys.version_info >= (3, 10): - def __enter__(self) -> _LoggingWatcher | None: ... - else: - def __enter__(self) -> _LoggingWatcher: ... - def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> bool | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/loader.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/loader.pyi index d3cb4cef733b..8b3c82233cec 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/loader.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/loader.pyi @@ -3,13 +3,15 @@ import unittest.case import unittest.result import unittest.suite from types import ModuleType -from typing import Any, Callable, List, Sequence, Type +from typing import Any, Callable, Pattern, Sequence _SortComparisonMethod = Callable[[str, str], int] -_SuiteClass = Callable[[List[unittest.case.TestCase]], unittest.suite.TestSuite] +_SuiteClass = Callable[[list[unittest.case.TestCase]], unittest.suite.TestSuite] + +VALID_MODULE_NAME: Pattern[str] class TestLoader: - errors: list[Type[BaseException]] + errors: list[type[BaseException]] testMethodPrefix: str sortTestMethodsUsing: _SortComparisonMethod @@ -17,18 +19,18 @@ class TestLoader: testNamePatterns: list[str] | None suiteClass: _SuiteClass - def loadTestsFromTestCase(self, testCaseClass: Type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... + def loadTestsFromTestCase(self, testCaseClass: type[unittest.case.TestCase]) -> unittest.suite.TestSuite: ... def loadTestsFromModule(self, module: ModuleType, *args: Any, pattern: Any = ...) -> unittest.suite.TestSuite: ... def loadTestsFromName(self, name: str, module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... def loadTestsFromNames(self, names: Sequence[str], module: ModuleType | None = ...) -> unittest.suite.TestSuite: ... - def getTestCaseNames(self, testCaseClass: Type[unittest.case.TestCase]) -> Sequence[str]: ... + def getTestCaseNames(self, testCaseClass: type[unittest.case.TestCase]) -> Sequence[str]: ... def discover(self, start_dir: str, pattern: str = ..., top_level_dir: str | None = ...) -> unittest.suite.TestSuite: ... defaultTestLoader: TestLoader if sys.version_info >= (3, 7): def getTestCaseNames( - testCaseClass: Type[unittest.case.TestCase], + testCaseClass: type[unittest.case.TestCase], prefix: str, sortUsing: _SortComparisonMethod = ..., testNamePatterns: list[str] | None = ..., @@ -36,11 +38,11 @@ if sys.version_info >= (3, 7): else: def getTestCaseNames( - testCaseClass: Type[unittest.case.TestCase], prefix: str, sortUsing: _SortComparisonMethod = ... + testCaseClass: type[unittest.case.TestCase], prefix: str, sortUsing: _SortComparisonMethod = ... ) -> Sequence[str]: ... def makeSuite( - testCaseClass: Type[unittest.case.TestCase], + testCaseClass: type[unittest.case.TestCase], prefix: str = ..., sortUsing: _SortComparisonMethod = ..., suiteClass: _SuiteClass = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/main.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/main.pyi index cd887cec27d0..d3c0ca722f4d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/main.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/main.pyi @@ -4,7 +4,10 @@ import unittest.loader import unittest.result import unittest.suite from types import ModuleType -from typing import Any, Iterable, Protocol, Type +from typing import Any, Iterable, Protocol + +MAIN_EXAMPLES: str +MODULE_EXAMPLES: str class _TestRunner(Protocol): def run(self, test: unittest.suite.TestSuite | unittest.case.TestCase) -> unittest.result.TestResult: ... @@ -27,7 +30,7 @@ class TestProgram: module: None | str | ModuleType = ..., defaultTest: str | Iterable[str] | None = ..., argv: list[str] | None = ..., - testRunner: Type[_TestRunner] | _TestRunner | None = ..., + testRunner: type[_TestRunner] | _TestRunner | None = ..., testLoader: unittest.loader.TestLoader = ..., exit: bool = ..., verbosity: int = ..., @@ -44,6 +47,7 @@ class TestProgram: def createTests(self, from_discovery: bool = ..., Loader: unittest.loader.TestLoader | None = ...) -> None: ... else: def createTests(self) -> None: ... + def runTests(self) -> None: ... # undocumented main = TestProgram diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi index 7ccaf3acaeb5..7b14b0f78250 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/mock.pyi @@ -1,28 +1,63 @@ import sys -from typing import Any, Callable, Generic, Iterable, List, Mapping, Sequence, Tuple, Type, TypeVar, overload +from _typeshed import Self +from typing import Any, Awaitable, Callable, Generic, Iterable, Mapping, Sequence, TypeVar, overload +from typing_extensions import Literal -_F = TypeVar("_F", bound=Callable[..., Any]) _T = TypeVar("_T") -_TT = TypeVar("_TT", bound=Type[Any]) +_TT = TypeVar("_TT", bound=type[Any]) _R = TypeVar("_R") -__all__ = [ - "Mock", - "MagicMock", - "patch", - "sentinel", - "DEFAULT", - "ANY", - "call", - "create_autospec", - "AsyncMock", - "FILTER_DIR", - "NonCallableMock", - "NonCallableMagicMock", - "mock_open", - "PropertyMock", - "seal", -] +if sys.version_info >= (3, 8): + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "AsyncMock", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) +elif sys.version_info >= (3, 7): + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + "seal", + ) +else: + __all__ = ( + "Mock", + "MagicMock", + "patch", + "sentinel", + "DEFAULT", + "ANY", + "call", + "create_autospec", + "FILTER_DIR", + "NonCallableMock", + "NonCallableMagicMock", + "mock_open", + "PropertyMock", + ) __version__: str FILTER_DIR: Any @@ -40,46 +75,50 @@ class _Sentinel: sentinel: Any DEFAULT: Any -class _Call(Tuple[Any, ...]): +class _Call(tuple[Any, ...]): def __new__( - cls, value: Any = ..., name: Any | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... - ) -> Any: ... + cls: type[Self], + value: Any = ..., + name: Any | None = ..., + parent: Any | None = ..., + two: bool = ..., + from_kall: bool = ..., + ) -> Self: ... name: Any parent: Any from_kall: Any def __init__( self, value: Any = ..., name: Any | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... ) -> None: ... - def __eq__(self, other: Any) -> bool: ... - __ne__: Any + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... def __getattr__(self, attr: Any) -> Any: ... - def count(self, *args: Any, **kwargs: Any) -> Any: ... - def index(self, *args: Any, **kwargs: Any) -> Any: ... + def __getattribute__(self, attr: str) -> Any: ... + if sys.version_info >= (3, 8): + @property + def args(self): ... + @property + def kwargs(self): ... + def call_list(self) -> Any: ... call: _Call -class _CallList(List[_Call]): +class _CallList(list[_Call]): def __contains__(self, value: Any) -> bool: ... -class _MockIter: - obj: Any - def __init__(self, obj: Any) -> None: ... - def __iter__(self) -> Any: ... - def __next__(self) -> Any: ... - class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... -class NonCallableMock(Base, Any): # type: ignore - def __new__(__cls, *args: Any, **kw: Any) -> NonCallableMock: ... +class NonCallableMock(Base, Any): + def __new__(__cls: type[Self], *args: Any, **kw: Any) -> Self: ... def __init__( self, - spec: list[str] | object | Type[object] | None = ..., + spec: list[str] | object | type[object] | None = ..., wraps: Any | None = ..., name: str | None = ..., - spec_set: list[str] | object | Type[object] | None = ..., + spec_set: list[str] | object | type[object] | None = ..., parent: NonCallableMock | None = ..., _spec_state: Any | None = ..., _new_name: str = ..., @@ -90,6 +129,8 @@ class NonCallableMock(Base, Any): # type: ignore **kwargs: Any, ) -> None: ... def __getattr__(self, name: str) -> Any: ... + def __delattr__(self, name: str) -> None: ... + def __setattr__(self, name: str, value: Any) -> None: ... if sys.version_info >= (3, 8): def _calls_repr(self, prefix: str = ...) -> str: ... def assert_called_with(self, *args: Any, **kwargs: Any) -> None: ... @@ -107,10 +148,12 @@ class NonCallableMock(Base, Any): # type: ignore else: def assert_called(_mock_self) -> None: ... def assert_called_once(_mock_self) -> None: ... + def reset_mock(self, visited: Any = ..., *, return_value: bool = ..., side_effect: bool = ...) -> None: ... if sys.version_info >= (3, 7): def _extract_mock_name(self) -> str: ... def _get_call_signature_from_name(self, name: str) -> Any: ... + def assert_any_call(self, *args: Any, **kwargs: Any) -> None: ... def assert_has_calls(self, calls: Sequence[_Call], any_order: bool = ...) -> None: ... def mock_add_spec(self, spec: Any, spec_set: bool = ...) -> None: ... @@ -125,7 +168,7 @@ class NonCallableMock(Base, Any): # type: ignore call_args_list: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... - def _call_matcher(self, _call: Tuple[_Call, ...]) -> _Call: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... class CallableMixin(Base): @@ -144,7 +187,10 @@ class CallableMixin(Base): _new_parent: Any | None = ..., **kwargs: Any, ) -> None: ... - def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... + if sys.version_info >= (3, 8): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + else: + def __call__(_mock_self, *args: Any, **kwargs: Any) -> Any: ... class Mock(CallableMixin, NonCallableMock): ... @@ -163,23 +209,49 @@ class _patch(Generic[_T]): additional_patchers: Any # If new==DEFAULT, self is _patch[Any]. Ideally we'd be able to add an overload for it so that self is _patch[MagicMock], # but that's impossible with the current type system. - def __init__( - self: _patch[_T], - getter: Callable[[], Any], - attribute: str, - new: _T, - spec: Any | None, - create: bool, - spec_set: Any | None, - autospec: Any | None, - new_callable: Any | None, - kwargs: Mapping[str, Any], - ) -> None: ... + if sys.version_info >= (3, 10): + def __init__( + self: _patch[_T], + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + *, + unsafe: bool = ..., + ) -> None: ... + else: + def __init__( + self: _patch[_T], + getter: Callable[[], Any], + attribute: str, + new: _T, + spec: Any | None, + create: bool, + spec_set: Any | None, + autospec: Any | None, + new_callable: Any | None, + kwargs: Mapping[str, Any], + ) -> None: ... + def copy(self) -> _patch[_T]: ... + @overload + def __call__(self, func: _TT) -> _TT: ... + @overload def __call__(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + if sys.version_info >= (3, 8): + def decoration_helper(self, patched, args, keywargs): ... + def decorate_class(self, klass: _TT) -> _TT: ... - def decorate_callable(self, func: _F) -> _F: ... - def get_original(self) -> Tuple[Any, bool]: ... + def decorate_callable(self, func: Callable[..., _R]) -> Callable[..., _R]: ... + if sys.version_info >= (3, 8): + def decorate_async_callable(self, func: Callable[..., Awaitable[_R]]) -> Callable[..., Awaitable[_R]]: ... + + def get_original(self) -> tuple[Any, bool]: ... target: Any temp_original: Any is_local: bool @@ -202,13 +274,13 @@ class _patch_dict: class _patcher: TEST_PREFIX: str - dict: Type[_patch_dict] + dict: type[_patch_dict] if sys.version_info >= (3, 8): # This overload also covers the case, where new==DEFAULT. In this case, the return type is _patch[Any]. # Ideally we'd be able to add an overload for it so that the return type is _patch[MagicMock], # but that's impossible with the current type system. @overload - def __call__( # type: ignore + def __call__( # type: ignore[misc] self, target: Any, new: _T, @@ -220,7 +292,7 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload - def __call__( # type: ignore + def __call__( self, target: Any, *, @@ -233,7 +305,7 @@ class _patcher: ) -> _patch[MagicMock | AsyncMock]: ... else: @overload - def __call__( # type: ignore + def __call__( # type: ignore[misc] self, target: Any, new: _T, @@ -245,7 +317,7 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload - def __call__( # type: ignore + def __call__( self, target: Any, *, @@ -258,7 +330,7 @@ class _patcher: ) -> _patch[MagicMock]: ... if sys.version_info >= (3, 8): @overload - def object( # type: ignore + def object( # type: ignore[misc] self, target: Any, attribute: str, @@ -271,7 +343,7 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload - def object( # type: ignore + def object( self, target: Any, attribute: str, @@ -285,7 +357,7 @@ class _patcher: ) -> _patch[MagicMock | AsyncMock]: ... else: @overload - def object( # type: ignore + def object( # type: ignore[misc] self, target: Any, attribute: str, @@ -298,7 +370,7 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload - def object( # type: ignore + def object( self, target: Any, attribute: str, @@ -310,6 +382,7 @@ class _patcher: new_callable: Any | None = ..., **kwargs: Any, ) -> _patch[MagicMock]: ... + def multiple( self, target: Any, @@ -318,8 +391,8 @@ class _patcher: spec_set: Any | None = ..., autospec: Any | None = ..., new_callable: Any | None = ..., - **kwargs: _T, - ) -> _patch[_T]: ... + **kwargs: Any, + ) -> _patch[Any]: ... def stopall(self) -> None: ... patch: _patcher @@ -348,27 +421,44 @@ if sys.version_info >= (3, 8): await_count: int await_args: _Call | None await_args_list: _CallList + class AsyncMagicMixin(MagicMixin): def __init__(self, *args: Any, **kw: Any) -> None: ... - class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... + + class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... # type: ignore # argument disparities between base classes class MagicProxy: name: Any parent: Any - def __init__(self, name: Any, parent: Any) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - def create_mock(self) -> Any: ... - def __get__(self, obj: Any, _type: Any | None = ...) -> Any: ... + def __init__(self, name, parent) -> None: ... + if sys.version_info < (3, 8): + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + + def create_mock(self): ... + def __get__(self, obj, _type: Any | None = ...): ... class _ANY: - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> Literal[True]: ... + def __ne__(self, other: object) -> Literal[False]: ... ANY: Any -def create_autospec( - spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Any | None = ..., _name: Any | None = ..., **kwargs: Any -) -> Any: ... +if sys.version_info >= (3, 10): + def create_autospec( + spec: Any, + spec_set: Any = ..., + instance: Any = ..., + _parent: Any | None = ..., + _name: Any | None = ..., + *, + unsafe: bool = ..., + **kwargs: Any, + ) -> Any: ... + +else: + def create_autospec( + spec: Any, spec_set: Any = ..., instance: Any = ..., _parent: Any | None = ..., _name: Any | None = ..., **kwargs: Any + ) -> Any: ... class _SpecState: spec: Any diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/result.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/result.pyi index 676c0cd4aeda..1c79f8ab648c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/result.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/result.pyi @@ -1,19 +1,22 @@ import unittest.case from types import TracebackType -from typing import Any, Callable, TextIO, Tuple, Type, TypeVar, Union +from typing import Any, Callable, TextIO, TypeVar, Union -_SysExcInfoType = Union[Tuple[Type[BaseException], BaseException, TracebackType], Tuple[None, None, None]] +_SysExcInfoType = Union[tuple[type[BaseException], BaseException, TracebackType], tuple[None, None, None]] _F = TypeVar("_F", bound=Callable[..., Any]) +STDOUT_LINE: str +STDERR_LINE: str + # undocumented def failfast(method: _F) -> _F: ... class TestResult: - errors: list[Tuple[unittest.case.TestCase, str]] - failures: list[Tuple[unittest.case.TestCase, str]] - skipped: list[Tuple[unittest.case.TestCase, str]] - expectedFailures: list[Tuple[unittest.case.TestCase, str]] + errors: list[tuple[unittest.case.TestCase, str]] + failures: list[tuple[unittest.case.TestCase, str]] + skipped: list[tuple[unittest.case.TestCase, str]] + expectedFailures: list[tuple[unittest.case.TestCase, str]] unexpectedSuccesses: list[unittest.case.TestCase] shouldStop: bool testsRun: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/runner.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/runner.pyi index 128909b2090b..479a9f2c304c 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/runner.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/runner.pyi @@ -1,7 +1,7 @@ import unittest.case import unittest.result import unittest.suite -from typing import Callable, TextIO, Tuple, Type +from typing import Callable, Iterable, TextIO _ResultClassType = Callable[[TextIO, bool, int], unittest.result.TestResult] @@ -10,14 +10,14 @@ class TextTestResult(unittest.result.TestResult): dots: bool # undocumented separator1: str separator2: str - showall: bool # undocumented + showAll: bool # undocumented stream: TextIO # undocumented def __init__(self, stream: TextIO, descriptions: bool, verbosity: int) -> None: ... def getDescription(self, test: unittest.case.TestCase) -> str: ... def printErrors(self) -> None: ... - def printErrorList(self, flavour: str, errors: Tuple[unittest.case.TestCase, str]) -> None: ... + def printErrorList(self, flavour: str, errors: Iterable[tuple[unittest.case.TestCase, str]]) -> None: ... -class TextTestRunner(object): +class TextTestRunner: resultclass: _ResultClassType def __init__( self, @@ -27,7 +27,7 @@ class TextTestRunner(object): failfast: bool = ..., buffer: bool = ..., resultclass: _ResultClassType | None = ..., - warnings: Type[Warning] | None = ..., + warnings: type[Warning] | None = ..., *, tb_locals: bool = ..., ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/signals.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/signals.pyi index 375b7d736a35..e6f5f95e1eb1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/signals.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/signals.pyi @@ -11,4 +11,4 @@ def removeResult(result: unittest.result.TestResult) -> bool: ... @overload def removeHandler(method: None = ...) -> None: ... @overload -def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... # type: ignore +def removeHandler(method: Callable[_P, _T]) -> Callable[_P, _T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/suite.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/suite.pyi index 396b46eadf5a..f1e168a674a7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/suite.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/suite.pyi @@ -15,6 +15,7 @@ class BaseTestSuite(Iterable[_TestType]): def debug(self) -> None: ... def countTestCases(self) -> int: ... def __iter__(self) -> Iterator[_TestType]: ... + def __eq__(self, other: object) -> bool: ... class TestSuite(BaseTestSuite): def run(self, result: unittest.result.TestResult, debug: bool = ...) -> unittest.result.TestResult: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi index ffce5d52677c..30ab6061b100 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/unittest/util.pyi @@ -1,7 +1,7 @@ -from typing import Any, Sequence, Tuple, TypeVar +from typing import Any, Sequence, TypeVar _T = TypeVar("_T") -_Mismatch = Tuple[_T, _T, int] +_Mismatch = tuple[_T, _T, int] _MAX_LENGTH: int _PLACEHOLDER_LEN: int @@ -11,11 +11,11 @@ _MIN_COMMON_LEN: int _MIN_DIFF_LEN: int def _shorten(s: str, prefixlen: int, suffixlen: int) -> str: ... -def _common_shorten_repr(*args: str) -> Tuple[str]: ... +def _common_shorten_repr(*args: str) -> tuple[str, ...]: ... def safe_repr(obj: object, short: bool = ...) -> str: ... def strclass(cls: type) -> str: ... -def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> Tuple[list[_T], list[_T]]: ... -def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> Tuple[list[_T], list[_T]]: ... +def sorted_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... +def unorderable_list_difference(expected: Sequence[_T], actual: Sequence[_T]) -> tuple[list[_T], list[_T]]: ... def three_way_cmp(x: Any, y: Any) -> int: ... def _count_diff_all_purpose(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... def _count_diff_hashable(actual: Sequence[_T], expected: Sequence[_T]) -> list[_Mismatch[_T]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/error.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/error.pyi index 557f84a40159..954dd4fa0599 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/error.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/error.pyi @@ -1,8 +1,8 @@ from email.message import Message -from typing import IO, Tuple +from typing import IO from urllib.response import addinfourl -# Stubs for urllib.error +__all__ = ["URLError", "HTTPError", "ContentTooShortError"] class URLError(IOError): reason: str | BaseException @@ -13,5 +13,5 @@ class HTTPError(URLError, addinfourl): def __init__(self, url: str, code: int, msg: str, hdrs: Message, fp: IO[bytes] | None) -> None: ... class ContentTooShortError(URLError): - content: Tuple[str, Message] - def __init__(self, message: str, content: Tuple[str, Message]) -> None: ... + content: tuple[str, Message] + def __init__(self, message: str, content: tuple[str, Message]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/parse.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/parse.pyi index 49a3dd1cedf4..a044e60ee41a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/parse.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/parse.pyi @@ -1,9 +1,33 @@ import sys -from typing import Any, AnyStr, Callable, Generic, Mapping, NamedTuple, Sequence, Tuple, Union, overload +from typing import Any, AnyStr, Callable, Generic, Mapping, NamedTuple, Sequence, Union, overload if sys.version_info >= (3, 9): from types import GenericAlias +__all__ = [ + "urlparse", + "urlunparse", + "urljoin", + "urldefrag", + "urlsplit", + "urlunsplit", + "urlencode", + "parse_qs", + "parse_qsl", + "quote", + "quote_plus", + "quote_from_bytes", + "unquote", + "unquote_plus", + "unquote_to_bytes", + "DefragResult", + "ParseResult", + "SplitResult", + "DefragResultBytes", + "ParseResultBytes", + "SplitResultBytes", +] + _Str = Union[bytes, str] uses_relative: list[str] @@ -21,7 +45,7 @@ class _ResultMixinBase(Generic[AnyStr]): class _ResultMixinStr(_ResultMixinBase[str]): def encode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinBytes: ... -class _ResultMixinBytes(_ResultMixinBase[str]): +class _ResultMixinBytes(_ResultMixinBase[bytes]): def decode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinStr: ... class _NetlocResultMixinBase(Generic[AnyStr]): @@ -35,7 +59,7 @@ class _NetlocResultMixinBase(Generic[AnyStr]): class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ... class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ... -class _DefragResultBase(Tuple[Any, ...], Generic[AnyStr]): +class _DefragResultBase(tuple[Any, ...], Generic[AnyStr]): url: AnyStr fragment: AnyStr @@ -96,7 +120,7 @@ def parse_qsl( errors: str = ..., max_num_fields: int | None = ..., separator: str = ..., -) -> list[Tuple[AnyStr, AnyStr]]: ... +) -> list[tuple[AnyStr, AnyStr]]: ... @overload def quote(string: str, safe: _Str = ..., encoding: str | None = ..., errors: str | None = ...) -> str: ... @overload @@ -114,12 +138,12 @@ def urldefrag(url: str) -> DefragResult: ... @overload def urldefrag(url: bytes | None) -> DefragResultBytes: ... def urlencode( - query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[Tuple[Any, Any]] | Sequence[Tuple[Any, Sequence[Any]]], + query: Mapping[Any, Any] | Mapping[Any, Sequence[Any]] | Sequence[tuple[Any, Any]] | Sequence[tuple[Any, Sequence[Any]]], doseq: bool = ..., - safe: AnyStr = ..., + safe: _Str = ..., encoding: str = ..., errors: str = ..., - quote_via: Callable[[str, AnyStr, str, str], str] = ..., + quote_via: Callable[[AnyStr, _Str, str, str], str] = ..., ) -> str: ... def urljoin(base: AnyStr, url: AnyStr | None, allow_fragments: bool = ...) -> AnyStr: ... @overload @@ -132,11 +156,11 @@ def urlsplit(url: str, scheme: str | None = ..., allow_fragments: bool = ...) -> def urlsplit(url: bytes | None, scheme: bytes | None = ..., allow_fragments: bool = ...) -> SplitResultBytes: ... @overload def urlunparse( - components: Tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] + components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None] ) -> AnyStr: ... @overload def urlunparse(components: Sequence[AnyStr | None]) -> AnyStr: ... @overload -def urlunsplit(components: Tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None]) -> AnyStr: ... +def urlunsplit(components: tuple[AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None, AnyStr | None]) -> AnyStr: ... @overload def urlunsplit(components: Sequence[AnyStr | None]) -> AnyStr: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi index 9ac320c680db..265ef2196715 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/request.pyi @@ -1,19 +1,57 @@ import ssl import sys -from _typeshed import StrOrBytesPath +from _typeshed import StrOrBytesPath, SupportsRead from email.message import Message from http.client import HTTPMessage, HTTPResponse, _HTTPConnectionProtocol from http.cookiejar import CookieJar -from typing import IO, Any, Callable, ClassVar, Mapping, NoReturn, Pattern, Sequence, Tuple, TypeVar, overload +from typing import IO, Any, Callable, ClassVar, Iterable, Mapping, MutableMapping, NoReturn, Pattern, Sequence, TypeVar, overload from urllib.error import HTTPError from urllib.response import addclosehook, addinfourl +__all__ = [ + "Request", + "OpenerDirector", + "BaseHandler", + "HTTPDefaultErrorHandler", + "HTTPRedirectHandler", + "HTTPCookieProcessor", + "ProxyHandler", + "HTTPPasswordMgr", + "HTTPPasswordMgrWithDefaultRealm", + "HTTPPasswordMgrWithPriorAuth", + "AbstractBasicAuthHandler", + "HTTPBasicAuthHandler", + "ProxyBasicAuthHandler", + "AbstractDigestAuthHandler", + "HTTPDigestAuthHandler", + "ProxyDigestAuthHandler", + "HTTPHandler", + "FileHandler", + "FTPHandler", + "CacheFTPHandler", + "DataHandler", + "UnknownHandler", + "HTTPErrorProcessor", + "urlopen", + "install_opener", + "build_opener", + "pathname2url", + "url2pathname", + "getproxies", + "urlretrieve", + "urlcleanup", + "URLopener", + "FancyURLopener", + "HTTPSHandler", +] + _T = TypeVar("_T") _UrlopenRet = Any +_DataType = bytes | SupportsRead[bytes] | Iterable[bytes] | None def urlopen( url: str | Request, - data: bytes | None = ..., + data: _DataType | None = ..., timeout: float | None = ..., *, cafile: str | None = ..., @@ -51,8 +89,8 @@ class Request: host: str origin_req_host: str selector: str - data: bytes | None - headers: dict[str, str] + data: _DataType + headers: MutableMapping[str, str] unredirected_hdrs: dict[str, str] unverifiable: bool method: str | None @@ -60,8 +98,8 @@ class Request: def __init__( self, url: str, - data: bytes | None = ..., - headers: dict[str, str] = ..., + data: _DataType = ..., + headers: MutableMapping[str, str] = ..., origin_req_host: str | None = ..., unverifiable: bool = ..., method: str | None = ..., @@ -77,13 +115,13 @@ class Request: def get_header(self, header_name: str) -> str | None: ... @overload def get_header(self, header_name: str, default: _T) -> str | _T: ... - def header_items(self) -> list[Tuple[str, str]]: ... + def header_items(self) -> list[tuple[str, str]]: ... def has_proxy(self) -> bool: ... class OpenerDirector: - addheaders: list[Tuple[str, str]] + addheaders: list[tuple[str, str]] def add_handler(self, handler: BaseHandler) -> None: ... - def open(self, fullurl: str | Request, data: bytes | None = ..., timeout: float | None = ...) -> _UrlopenRet: ... + def open(self, fullurl: str | Request, data: _DataType = ..., timeout: float | None = ...) -> _UrlopenRet: ... def error(self, proto: str, *args: Any) -> _UrlopenRet: ... def close(self) -> None: ... @@ -92,6 +130,7 @@ class BaseHandler: parent: OpenerDirector def add_parent(self, parent: OpenerDirector) -> None: ... def close(self) -> None: ... + def __lt__(self, other: object) -> bool: ... class HTTPDefaultErrorHandler(BaseHandler): def http_error_default( @@ -125,13 +164,13 @@ class ProxyHandler(BaseHandler): class HTTPPasswordMgr: def add_password(self, realm: str, uri: str | Sequence[str], user: str, passwd: str) -> None: ... - def find_user_password(self, realm: str, authuri: str) -> Tuple[str | None, str | None]: ... + def find_user_password(self, realm: str, authuri: str) -> tuple[str | None, str | None]: ... def is_suburi(self, base: str, test: str) -> bool: ... # undocumented def reduce_uri(self, uri: str, default_port: bool = ...) -> str: ... # undocumented class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr): def add_password(self, realm: str | None, uri: str | Sequence[str], user: str, passwd: str) -> None: ... - def find_user_password(self, realm: str | None, authuri: str) -> Tuple[str | None, str | None]: ... + def find_user_password(self, realm: str | None, authuri: str) -> tuple[str | None, str | None]: ... class HTTPPasswordMgrWithPriorAuth(HTTPPasswordMgrWithDefaultRealm): def add_password( @@ -167,7 +206,7 @@ class AbstractDigestAuthHandler: def retry_http_digest_auth(self, req: Request, auth: str) -> _UrlopenRet | None: ... def get_cnonce(self, nonce: str) -> str: ... def get_authorization(self, req: Request, chal: Mapping[str, str]) -> str: ... - def get_algorithm_impls(self, algorithm: str) -> Tuple[Callable[[str], str], Callable[[str, str], str]]: ... + def get_algorithm_impls(self, algorithm: str) -> tuple[Callable[[str], str], Callable[[str, str], str]]: ... def get_entity_digest(self, data: bytes | None, chal: Mapping[str, str]) -> str | None: ... class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler): @@ -196,9 +235,9 @@ class HTTPSHandler(AbstractHTTPHandler): def https_request(self, request: Request) -> Request: ... # undocumented class FileHandler(BaseHandler): - names: ClassVar[Tuple[str, ...] | None] # undocumented + names: ClassVar[tuple[str, ...] | None] # undocumented def file_open(self, req: Request) -> addinfourl: ... - def get_names(self) -> Tuple[str, ...]: ... # undocumented + def get_names(self) -> tuple[str, ...]: ... # undocumented def open_local_file(self, req: Request) -> addinfourl: ... # undocumented class DataHandler(BaseHandler): @@ -213,7 +252,7 @@ class ftpwrapper: # undocumented def file_close(self) -> None: ... def init(self) -> None: ... def real_close(self) -> None: ... - def retrfile(self, file: str, type: str) -> Tuple[addclosehook, int]: ... + def retrfile(self, file: str, type: str) -> tuple[addclosehook, int]: ... class FTPHandler(BaseHandler): def ftp_open(self, req: Request) -> addinfourl: ... @@ -241,8 +280,8 @@ def urlretrieve( url: str, filename: StrOrBytesPath | None = ..., reporthook: Callable[[int, int, int], None] | None = ..., - data: bytes | None = ..., -) -> Tuple[str, HTTPMessage]: ... + data: _DataType = ..., +) -> tuple[str, HTTPMessage]: ... def urlcleanup() -> None: ... class URLopener: @@ -256,8 +295,8 @@ class URLopener: filename: str | None = ..., reporthook: Callable[[int, int, int], None] | None = ..., data: bytes | None = ..., - ) -> Tuple[str, Message | None]: ... - def addheader(self, *args: Tuple[str, str]) -> None: ... # undocumented + ) -> tuple[str, Message | None]: ... + def addheader(self, *args: tuple[str, str]) -> None: ... # undocumented def cleanup(self) -> None: ... # undocumented def close(self) -> None: ... # undocumented def http_error( @@ -275,8 +314,8 @@ class URLopener: def open_unknown_proxy(self, proxy: str, fullurl: str, data: bytes | None = ...) -> None: ... # undocumented class FancyURLopener(URLopener): - def prompt_user_passwd(self, host: str, realm: str) -> Tuple[str, str]: ... - def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> Tuple[str, str]: ... # undocumented + def prompt_user_passwd(self, host: str, realm: str) -> tuple[str, str]: ... + def get_user_passwd(self, host: str, realm: str, clear_cache: int = ...) -> tuple[str, str]: ... # undocumented def http_error_301( self, url: str, fp: IO[bytes], errcode: int, errmsg: str, headers: HTTPMessage, data: bytes | None = ... ) -> _UrlopenRet | addinfourl | None: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/response.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/response.pyi index dd8a80833ba3..2efec0d47d44 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/response.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/response.pyi @@ -1,18 +1,19 @@ +import sys from _typeshed import Self from email.message import Message from types import TracebackType -from typing import IO, Any, BinaryIO, Callable, Iterable, Tuple, Type, TypeVar +from typing import IO, Any, BinaryIO, Callable, Iterable -_AIUT = TypeVar("_AIUT", bound=addbase) +__all__ = ["addbase", "addclosehook", "addinfo", "addinfourl"] class addbase(BinaryIO): fp: IO[bytes] def __init__(self, fp: IO[bytes]) -> None: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, type: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... - def __iter__(self: _AIUT) -> _AIUT: ... + def __iter__(self: Self) -> Self: ... def __next__(self) -> bytes: ... def close(self) -> None: ... # These methods don't actually exist, but the class inherits at runtime from @@ -36,7 +37,7 @@ class addbase(BinaryIO): class addclosehook(addbase): closehook: Callable[..., object] - hookargs: Tuple[Any, ...] + hookargs: tuple[Any, ...] def __init__(self, fp: IO[bytes], closehook: Callable[..., object], *hookargs: Any) -> None: ... class addinfo(addbase): @@ -46,7 +47,11 @@ class addinfo(addbase): class addinfourl(addinfo): url: str - code: int + code: int | None + if sys.version_info >= (3, 9): + @property + def status(self) -> int | None: ... + def __init__(self, fp: IO[bytes], headers: Message, url: str, code: int | None = ...) -> None: ... def geturl(self) -> str: ... - def getcode(self) -> int: ... + def getcode(self) -> int | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/robotparser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/robotparser.pyi index 361126327993..d1d69546db42 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/urllib/robotparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/urllib/robotparser.pyi @@ -1,7 +1,9 @@ import sys from typing import Iterable, NamedTuple -class _RequestRate(NamedTuple): +__all__ = ["RobotFileParser"] + +class RequestRate(NamedTuple): requests: int seconds: int @@ -14,6 +16,6 @@ class RobotFileParser: def mtime(self) -> int: ... def modified(self) -> None: ... def crawl_delay(self, useragent: str) -> str | None: ... - def request_rate(self, useragent: str) -> _RequestRate | None: ... + def request_rate(self, useragent: str) -> RequestRate | None: ... if sys.version_info >= (3, 8): def site_maps(self) -> list[str] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/uu.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/uu.pyi index aacd458c02c7..05aa4afef2a7 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/uu.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/uu.pyi @@ -1,6 +1,8 @@ import sys from typing import BinaryIO, Union +__all__ = ["Error", "encode", "decode"] + _File = Union[str, BinaryIO] class Error(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi index da13d819fbdf..4d46e89beddd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/uuid.pyi @@ -1,13 +1,13 @@ import sys -from typing import Any, Tuple # Because UUID has properties called int and bytes we need to rename these temporarily. _Int = int _Bytes = bytes -_FieldsType = Tuple[int, int, int, int, int, int] +_FieldsType = tuple[int, int, int, int, int, int] if sys.version_info >= (3, 7): from enum import Enum + class SafeUUID(Enum): safe: int unsafe: int @@ -38,6 +38,7 @@ class UUID: int: _Int | None = ..., version: _Int | None = ..., ) -> None: ... + @property def bytes(self) -> _Bytes: ... @property @@ -71,11 +72,11 @@ class UUID: @property def version(self) -> _Int | None: ... def __int__(self) -> _Int: ... - def __eq__(self, other: Any) -> bool: ... - def __lt__(self, other: Any) -> bool: ... - def __le__(self, other: Any) -> bool: ... - def __gt__(self, other: Any) -> bool: ... - def __ge__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: UUID) -> bool: ... + def __le__(self, other: UUID) -> bool: ... + def __gt__(self, other: UUID) -> bool: ... + def __ge__(self, other: UUID) -> bool: ... def getnode() -> int: ... def uuid1(node: _Int | None = ..., clock_seq: _Int | None = ...) -> UUID: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/venv/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/venv/__init__.pyi index 25cf615a3243..815490a205ab 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/venv/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/venv/__init__.pyi @@ -3,6 +3,9 @@ from _typeshed import StrOrBytesPath from types import SimpleNamespace from typing import Sequence +if sys.version_info >= (3, 9): + CORE_VENV_DEPS: tuple[str, ...] + class EnvBuilder: system_site_packages: bool clear: bool @@ -32,6 +35,7 @@ class EnvBuilder: with_pip: bool = ..., prompt: str | None = ..., ) -> None: ... + def create(self, env_dir: StrOrBytesPath) -> None: ... def clear_directory(self, path: StrOrBytesPath) -> None: ... # undocumented def ensure_directories(self, env_dir: StrOrBytesPath) -> SimpleNamespace: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/warnings.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/warnings.pyi index 62c41c871853..1799d69f5ba6 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/warnings.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/warnings.pyi @@ -1,26 +1,43 @@ +from _warnings import warn as warn, warn_explicit as warn_explicit from types import ModuleType, TracebackType -from typing import Any, Sequence, TextIO, Type, overload +from typing import Any, Sequence, TextIO, overload from typing_extensions import Literal -from _warnings import warn as warn, warn_explicit as warn_explicit +__all__ = [ + "warn", + "warn_explicit", + "showwarning", + "formatwarning", + "filterwarnings", + "simplefilter", + "resetwarnings", + "catch_warnings", +] + +_ActionKind = Literal["default", "error", "ignore", "always", "module", "once"] -filters: Sequence[tuple[str, str | None, Type[Warning], str | None, int]] # undocumented, do not mutate +filters: Sequence[tuple[str, str | None, type[Warning], str | None, int]] # undocumented, do not mutate def showwarning( - message: Warning | str, category: Type[Warning], filename: str, lineno: int, file: TextIO | None = ..., line: str | None = ... + message: Warning | str, category: type[Warning], filename: str, lineno: int, file: TextIO | None = ..., line: str | None = ... ) -> None: ... -def formatwarning(message: Warning | str, category: Type[Warning], filename: str, lineno: int, line: str | None = ...) -> str: ... +def formatwarning(message: Warning | str, category: type[Warning], filename: str, lineno: int, line: str | None = ...) -> str: ... def filterwarnings( - action: str, message: str = ..., category: Type[Warning] = ..., module: str = ..., lineno: int = ..., append: bool = ... + action: _ActionKind, + message: str = ..., + category: type[Warning] = ..., + module: str = ..., + lineno: int = ..., + append: bool = ..., ) -> None: ... -def simplefilter(action: str, category: Type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... +def simplefilter(action: _ActionKind, category: type[Warning] = ..., lineno: int = ..., append: bool = ...) -> None: ... def resetwarnings() -> None: ... class _OptionError(Exception): ... class WarningMessage: message: Warning | str - category: Type[Warning] + category: type[Warning] filename: str lineno: int file: TextIO | None @@ -29,7 +46,7 @@ class WarningMessage: def __init__( self, message: Warning | str, - category: Type[Warning], + category: type[Warning], filename: str, lineno: int, file: TextIO | None = ..., @@ -46,7 +63,7 @@ class catch_warnings: def __new__(cls, *, record: bool, module: ModuleType | None = ...) -> catch_warnings: ... def __enter__(self) -> list[WarningMessage] | None: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... class _catch_warnings_without_records(catch_warnings): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wave.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wave.pyi index 3ce1b88a6835..13fa12348cb3 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wave.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wave.pyi @@ -1,12 +1,18 @@ import sys -from _typeshed import Self -from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, Union +from _typeshed import ReadableBuffer, Self +from typing import IO, Any, BinaryIO, NamedTuple, NoReturn, Union, overload +from typing_extensions import Literal + +if sys.version_info >= (3, 9): + __all__ = ["open", "Error", "Wave_read", "Wave_write"] +else: + __all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"] _File = Union[str, IO[bytes]] class Error(Exception): ... -WAVE_FORMAT_PCM: int +WAVE_FORMAT_PCM: Literal[1] class _wave_params(NamedTuple): nchannels: int @@ -57,12 +63,15 @@ class Wave_write: def getmark(self, id: Any) -> NoReturn: ... def getmarkers(self) -> None: ... def tell(self) -> int: ... - # should be any bytes-like object after 3.4, but we don't have a type for that - def writeframesraw(self, data: bytes) -> None: ... - def writeframes(self, data: bytes) -> None: ... + def writeframesraw(self, data: ReadableBuffer) -> None: ... + def writeframes(self, data: ReadableBuffer) -> None: ... def close(self) -> None: ... -# Returns a Wave_read if mode is rb and Wave_write if mode is wb +@overload +def open(f: _File, mode: Literal["r", "rb"]) -> Wave_read: ... +@overload +def open(f: _File, mode: Literal["w", "wb"]) -> Wave_write: ... +@overload def open(f: _File, mode: str | None = ...) -> Any: ... if sys.version_info < (3, 9): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/weakref.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/weakref.pyi index 12158ee6c8f4..de771114aa8a 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/weakref.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/weakref.pyi @@ -1,6 +1,8 @@ -import types +import sys +from _typeshed import Self, SupportsKeysAndGetItem from _weakrefset import WeakSet as WeakSet -from typing import Any, Callable, Generic, Iterable, Iterator, Mapping, MutableMapping, Tuple, Type, TypeVar, overload +from typing import Any, Callable, Generic, Iterable, Iterator, Mapping, MutableMapping, TypeVar, overload +from typing_extensions import ParamSpec from _weakref import ( CallableProxyType as CallableProxyType, @@ -12,66 +14,119 @@ from _weakref import ( ref as ref, ) -_S = TypeVar("_S") +__all__ = [ + "ref", + "proxy", + "getweakrefcount", + "getweakrefs", + "WeakKeyDictionary", + "ReferenceType", + "ProxyType", + "CallableProxyType", + "ProxyTypes", + "WeakValueDictionary", + "WeakSet", + "WeakMethod", + "finalize", +] + _T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") _KT = TypeVar("_KT") _VT = TypeVar("_VT") +_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) +_P = ParamSpec("_P") -ProxyTypes: Tuple[Type[Any], ...] +ProxyTypes: tuple[type[Any], ...] -class WeakMethod(ref[types.MethodType]): - def __new__(cls, meth: types.MethodType, callback: Callable[[types.MethodType], Any] | None = ...) -> WeakMethod: ... - def __call__(self) -> types.MethodType | None: ... +class WeakMethod(ref[_CallableT], Generic[_CallableT]): + def __new__(cls: type[Self], meth: _CallableT, callback: Callable[[_CallableT], object] | None = ...) -> Self: ... + def __call__(self) -> _CallableT | None: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... class WeakValueDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self) -> None: ... @overload - def __init__(self, __other: Mapping[_KT, _VT] | Iterable[Tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + def __init__(self: WeakValueDictionary[_KT, _VT], __other: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__( + self: WeakValueDictionary[str, _VT], __other: Mapping[str, _VT] | Iterable[tuple[str, _VT]] = ..., **kwargs: _VT + ) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... - def __str__(self) -> str: ... def copy(self) -> WeakValueDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self: Self, memo: Any) -> Self: ... # These are incompatible with Mapping - def keys(self) -> Iterator[_KT]: ... # type: ignore - def values(self) -> Iterator[_VT]: ... # type: ignore - def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def itervaluerefs(self) -> Iterator[KeyedRef[_KT, _VT]]: ... def valuerefs(self) -> list[KeyedRef[_KT, _VT]]: ... + def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakValueDictionary[_KT | _T1, _VT | _T2]: ... + # WeakValueDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, value: Iterable[tuple[_KT, _VT]]) -> Self: ... class KeyedRef(ref[_T], Generic[_KT, _T]): key: _KT # This __new__ method uses a non-standard name for the "cls" parameter - def __new__(type, ob: _T, callback: Callable[[_T], Any], key: _KT) -> KeyedRef[_KT, _T]: ... # type: ignore + def __new__(type: type[Self], ob: _T, callback: Callable[[_T], Any], key: _KT) -> Self: ... # type: ignore def __init__(self, ob: _T, callback: Callable[[_T], Any], key: _KT) -> None: ... class WeakKeyDictionary(MutableMapping[_KT, _VT]): @overload def __init__(self, dict: None = ...) -> None: ... @overload - def __init__(self, dict: Mapping[_KT, _VT] | Iterable[Tuple[_KT, _VT]]) -> None: ... + def __init__(self, dict: Mapping[_KT, _VT] | Iterable[tuple[_KT, _VT]]) -> None: ... def __len__(self) -> int: ... def __getitem__(self, k: _KT) -> _VT: ... def __setitem__(self, k: _KT, v: _VT) -> None: ... def __delitem__(self, v: _KT) -> None: ... def __contains__(self, o: object) -> bool: ... def __iter__(self) -> Iterator[_KT]: ... - def __str__(self) -> str: ... def copy(self) -> WeakKeyDictionary[_KT, _VT]: ... + __copy__ = copy + def __deepcopy__(self: Self, memo: Any) -> Self: ... # These are incompatible with Mapping - def keys(self) -> Iterator[_KT]: ... # type: ignore - def values(self) -> Iterator[_VT]: ... # type: ignore - def items(self) -> Iterator[Tuple[_KT, _VT]]: ... # type: ignore + def keys(self) -> Iterator[_KT]: ... # type: ignore[override] + def values(self) -> Iterator[_VT]: ... # type: ignore[override] + def items(self) -> Iterator[tuple[_KT, _VT]]: ... # type: ignore[override] def keyrefs(self) -> list[ref[_KT]]: ... + def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> WeakKeyDictionary[_KT | _T1, _VT | _T2]: ... + # WeakKeyDictionary.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, value: Iterable[tuple[_KT, _VT]]) -> Self: ... -class finalize: - def __init__(self, __obj: object, __func: Callable[..., Any], *args: Any, **kwargs: Any) -> None: ... +class finalize: # TODO: This is a good candidate for to be a `Generic[_P, _T]` class + def __init__(self, __obj: object, __func: Callable[_P, Any], *args: _P.args, **kwargs: _P.kwargs) -> None: ... def __call__(self, _: Any = ...) -> Any | None: ... - def detach(self) -> Tuple[Any, Any, Tuple[Any, ...], dict[str, Any]] | None: ... - def peek(self) -> Tuple[Any, Any, Tuple[Any, ...], dict[str, Any]] | None: ... + def detach(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... + def peek(self) -> tuple[Any, Any, tuple[Any, ...], dict[str, Any]] | None: ... alive: bool atexit: bool diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/webbrowser.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/webbrowser.pyi index c85288cc562f..ce8fca262d2d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/webbrowser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/webbrowser.pyi @@ -1,5 +1,9 @@ import sys +from abc import abstractmethod from typing import Callable, Sequence +from typing_extensions import Literal + +__all__ = ["Error", "open", "open_new", "open_new_tab", "get", "register"] class Error(Exception): ... @@ -23,21 +27,19 @@ class BaseBrowser: name: str basename: str def __init__(self, name: str = ...) -> None: ... + @abstractmethod def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... def open_new(self, url: str) -> bool: ... def open_new_tab(self, url: str) -> bool: ... class GenericBrowser(BaseBrowser): - args: list[str] - name: str - basename: str def __init__(self, name: str | Sequence[str]) -> None: ... def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... -class BackgroundBrowser(GenericBrowser): - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... +class BackgroundBrowser(GenericBrowser): ... class UnixBrowser(BaseBrowser): + def open(self, url: str, new: Literal[0, 1, 2] = ..., autoraise: bool = ...) -> bool: ... # type: ignore[override] raise_opts: list[str] | None background: bool redirect_stdout: bool @@ -45,43 +47,15 @@ class UnixBrowser(BaseBrowser): remote_action: str remote_action_newwin: str remote_action_newtab: str - def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... -class Mozilla(UnixBrowser): - remote_args: list[str] - remote_action: str - remote_action_newwin: str - remote_action_newtab: str - background: bool +class Mozilla(UnixBrowser): ... class Galeon(UnixBrowser): raise_opts: list[str] - remote_args: list[str] - remote_action: str - remote_action_newwin: str - background: bool - -class Chrome(UnixBrowser): - remote_args: list[str] - remote_action: str - remote_action_newwin: str - remote_action_newtab: str - background: bool -class Opera(UnixBrowser): - remote_args: list[str] - remote_action: str - remote_action_newwin: str - remote_action_newtab: str - background: bool - -class Elinks(UnixBrowser): - remote_args: list[str] - remote_action: str - remote_action_newwin: str - remote_action_newtab: str - background: bool - redirect_stdout: bool +class Chrome(UnixBrowser): ... +class Opera(UnixBrowser): ... +class Elinks(UnixBrowser): ... class Konqueror(BaseBrowser): def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... @@ -95,9 +69,7 @@ if sys.platform == "win32": if sys.platform == "darwin": class MacOSX(BaseBrowser): - name: str - def __init__(self, name: str) -> None: ... def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... - class MacOSXOSAScript(BaseBrowser): - def __init__(self, name: str) -> None: ... + + class MacOSXOSAScript(BaseBrowser): # In runtime this class does not have `name` and `basename` def open(self, url: str, new: int = ..., autoraise: bool = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi index 3c7ab0113e04..1730c651c6b8 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/winreg.pyi @@ -1,99 +1,101 @@ +import sys from _typeshed import Self from types import TracebackType -from typing import Any, Tuple, Type, Union +from typing import Any, Union +from typing_extensions import Literal, final -_KeyType = Union[HKEYType, int] +if sys.platform == "win32": + _KeyType = Union[HKEYType, int] + def CloseKey(__hkey: _KeyType) -> None: ... + def ConnectRegistry(__computer_name: str | None, __key: _KeyType) -> HKEYType: ... + def CreateKey(__key: _KeyType, __sub_key: str | None) -> HKEYType: ... + def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = ..., access: int = ...) -> HKEYType: ... + def DeleteKey(__key: _KeyType, __sub_key: str) -> None: ... + def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = ..., reserved: int = ...) -> None: ... + def DeleteValue(__key: _KeyType, __value: str) -> None: ... + def EnumKey(__key: _KeyType, __index: int) -> str: ... + def EnumValue(__key: _KeyType, __index: int) -> tuple[str, Any, int]: ... + def ExpandEnvironmentStrings(__str: str) -> str: ... + def FlushKey(__key: _KeyType) -> None: ... + def LoadKey(__key: _KeyType, __sub_key: str, __file_name: str) -> None: ... + def OpenKey(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... + def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... + def QueryInfoKey(__key: _KeyType) -> tuple[int, int, int]: ... + def QueryValue(__key: _KeyType, __sub_key: str | None) -> str: ... + def QueryValueEx(__key: _KeyType, __name: str) -> tuple[Any, int]: ... + def SaveKey(__key: _KeyType, __file_name: str) -> None: ... + def SetValue(__key: _KeyType, __sub_key: str, __type: int, __value: str) -> None: ... + def SetValueEx( + __key: _KeyType, __value_name: str | None, __reserved: Any, __type: int, __value: str | int + ) -> None: ... # reserved is ignored + def DisableReflectionKey(__key: _KeyType) -> None: ... + def EnableReflectionKey(__key: _KeyType) -> None: ... + def QueryReflectionKey(__key: _KeyType) -> bool: ... + HKEY_CLASSES_ROOT: int + HKEY_CURRENT_USER: int + HKEY_LOCAL_MACHINE: int + HKEY_USERS: int + HKEY_PERFORMANCE_DATA: int + HKEY_CURRENT_CONFIG: int + HKEY_DYN_DATA: int -def CloseKey(__hkey: _KeyType) -> None: ... -def ConnectRegistry(__computer_name: str | None, __key: _KeyType) -> HKEYType: ... -def CreateKey(__key: _KeyType, __sub_key: str | None) -> HKEYType: ... -def CreateKeyEx(key: _KeyType, sub_key: str | None, reserved: int = ..., access: int = ...) -> HKEYType: ... -def DeleteKey(__key: _KeyType, __sub_key: str) -> None: ... -def DeleteKeyEx(key: _KeyType, sub_key: str, access: int = ..., reserved: int = ...) -> None: ... -def DeleteValue(__key: _KeyType, __value: str) -> None: ... -def EnumKey(__key: _KeyType, __index: int) -> str: ... -def EnumValue(__key: _KeyType, __index: int) -> Tuple[str, Any, int]: ... -def ExpandEnvironmentStrings(__str: str) -> str: ... -def FlushKey(__key: _KeyType) -> None: ... -def LoadKey(__key: _KeyType, __sub_key: str, __file_name: str) -> None: ... -def OpenKey(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... -def OpenKeyEx(key: _KeyType, sub_key: str, reserved: int = ..., access: int = ...) -> HKEYType: ... -def QueryInfoKey(__key: _KeyType) -> Tuple[int, int, int]: ... -def QueryValue(__key: _KeyType, __sub_key: str | None) -> str: ... -def QueryValueEx(__key: _KeyType, __name: str) -> Tuple[Any, int]: ... -def SaveKey(__key: _KeyType, __file_name: str) -> None: ... -def SetValue(__key: _KeyType, __sub_key: str, __type: int, __value: str) -> None: ... -def SetValueEx( - __key: _KeyType, __value_name: str | None, __reserved: Any, __type: int, __value: str | int -) -> None: ... # reserved is ignored -def DisableReflectionKey(__key: _KeyType) -> None: ... -def EnableReflectionKey(__key: _KeyType) -> None: ... -def QueryReflectionKey(__key: _KeyType) -> bool: ... + KEY_ALL_ACCESS: Literal[983103] + KEY_WRITE: Literal[131078] + KEY_READ: Literal[131097] + KEY_EXECUTE: Literal[131097] + KEY_QUERY_VALUE: Literal[1] + KEY_SET_VALUE: Literal[2] + KEY_CREATE_SUB_KEY: Literal[4] + KEY_ENUMERATE_SUB_KEYS: Literal[8] + KEY_NOTIFY: Literal[16] + KEY_CREATE_LINK: Literal[32] -HKEY_CLASSES_ROOT: int -HKEY_CURRENT_USER: int -HKEY_LOCAL_MACHINE: int -HKEY_USERS: int -HKEY_PERFORMANCE_DATA: int -HKEY_CURRENT_CONFIG: int -HKEY_DYN_DATA: int + KEY_WOW64_64KEY: Literal[256] + KEY_WOW64_32KEY: Literal[512] -KEY_ALL_ACCESS: int -KEY_WRITE: int -KEY_READ: int -KEY_EXECUTE: int -KEY_QUERY_VALUE: int -KEY_SET_VALUE: int -KEY_CREATE_SUB_KEY: int -KEY_ENUMERATE_SUB_KEYS: int -KEY_NOTIFY: int -KEY_CREATE_LINK: int + REG_BINARY: Literal[3] + REG_DWORD: Literal[4] + REG_DWORD_LITTLE_ENDIAN: Literal[4] + REG_DWORD_BIG_ENDIAN: Literal[5] + REG_EXPAND_SZ: Literal[2] + REG_LINK: Literal[6] + REG_MULTI_SZ: Literal[7] + REG_NONE: Literal[0] + REG_QWORD: Literal[11] + REG_QWORD_LITTLE_ENDIAN: Literal[11] + REG_RESOURCE_LIST: Literal[8] + REG_FULL_RESOURCE_DESCRIPTOR: Literal[9] + REG_RESOURCE_REQUIREMENTS_LIST: Literal[10] + REG_SZ: Literal[1] -KEY_WOW64_64KEY: int -KEY_WOW64_32KEY: int + REG_CREATED_NEW_KEY: int # undocumented + REG_LEGAL_CHANGE_FILTER: int # undocumented + REG_LEGAL_OPTION: int # undocumented + REG_NOTIFY_CHANGE_ATTRIBUTES: int # undocumented + REG_NOTIFY_CHANGE_LAST_SET: int # undocumented + REG_NOTIFY_CHANGE_NAME: int # undocumented + REG_NOTIFY_CHANGE_SECURITY: int # undocumented + REG_NO_LAZY_FLUSH: int # undocumented + REG_OPENED_EXISTING_KEY: int # undocumented + REG_OPTION_BACKUP_RESTORE: int # undocumented + REG_OPTION_CREATE_LINK: int # undocumented + REG_OPTION_NON_VOLATILE: int # undocumented + REG_OPTION_OPEN_LINK: int # undocumented + REG_OPTION_RESERVED: int # undocumented + REG_OPTION_VOLATILE: int # undocumented + REG_REFRESH_HIVE: int # undocumented + REG_WHOLE_HIVE_VOLATILE: int # undocumented -REG_BINARY: int -REG_DWORD: int -REG_DWORD_LITTLE_ENDIAN: int -REG_DWORD_BIG_ENDIAN: int -REG_EXPAND_SZ: int -REG_LINK: int -REG_MULTI_SZ: int -REG_NONE: int -REG_QWORD: int -REG_QWORD_LITTLE_ENDIAN: int -REG_RESOURCE_LIST: int -REG_FULL_RESOURCE_DESCRIPTOR: int -REG_RESOURCE_REQUIREMENTS_LIST: int -REG_SZ: int + error = OSError -REG_CREATED_NEW_KEY: int # undocumented -REG_LEGAL_CHANGE_FILTER: int # undocumented -REG_LEGAL_OPTION: int # undocumented -REG_NOTIFY_CHANGE_ATTRIBUTES: int # undocumented -REG_NOTIFY_CHANGE_LAST_SET: int # undocumented -REG_NOTIFY_CHANGE_NAME: int # undocumented -REG_NOTIFY_CHANGE_SECURITY: int # undocumented -REG_NO_LAZY_FLUSH: int # undocumented -REG_OPENED_EXISTING_KEY: int # undocumented -REG_OPTION_BACKUP_RESTORE: int # undocumented -REG_OPTION_CREATE_LINK: int # undocumented -REG_OPTION_NON_VOLATILE: int # undocumented -REG_OPTION_OPEN_LINK: int # undocumented -REG_OPTION_RESERVED: int # undocumented -REG_OPTION_VOLATILE: int # undocumented -REG_REFRESH_HIVE: int # undocumented -REG_WHOLE_HIVE_VOLATILE: int # undocumented - -error = OSError - -# Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason -class HKEYType: - def __bool__(self) -> bool: ... - def __int__(self) -> int: ... - def __enter__(self: Self) -> Self: ... - def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None - ) -> bool | None: ... - def Close(self) -> None: ... - def Detach(self) -> int: ... + # Though this class has a __name__ of PyHKEY, it's exposed as HKEYType for some reason + @final + class HKEYType: + def __bool__(self) -> bool: ... + def __int__(self) -> int: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> bool | None: ... + def Close(self) -> None: ... + def Detach(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/winsound.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/winsound.pyi index 3d79f3b043f2..588bd5969e98 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/winsound.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/winsound.pyi @@ -3,21 +3,21 @@ from typing import overload from typing_extensions import Literal if sys.platform == "win32": - SND_FILENAME: int - SND_ALIAS: int - SND_LOOP: int - SND_MEMORY: int - SND_PURGE: int - SND_ASYNC: int - SND_NODEFAULT: int - SND_NOSTOP: int - SND_NOWAIT: int + SND_FILENAME: Literal[131072] + SND_ALIAS: Literal[65536] + SND_LOOP: Literal[8] + SND_MEMORY: Literal[4] + SND_PURGE: Literal[64] + SND_ASYNC: Literal[1] + SND_NODEFAULT: Literal[2] + SND_NOSTOP: Literal[16] + SND_NOWAIT: Literal[8192] - MB_ICONASTERISK: int - MB_ICONEXCLAMATION: int - MB_ICONHAND: int - MB_ICONQUESTION: int - MB_OK: int + MB_ICONASTERISK: Literal[64] + MB_ICONEXCLAMATION: Literal[48] + MB_ICONHAND: Literal[16] + MB_ICONQUESTION: Literal[32] + MB_OK: Literal[0] def Beep(frequency: int, duration: int) -> None: ... # Can actually accept anything ORed with 4, and if not it's definitely str, but that's inexpressible @overload diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/handlers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/handlers.pyi index b9899389cd3b..731cb52ea9db 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/handlers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/handlers.pyi @@ -1,18 +1,20 @@ from abc import abstractmethod from types import TracebackType -from typing import IO, Callable, MutableMapping, Optional, Tuple, Type +from typing import IO, Callable, MutableMapping, Optional from .headers import Headers from .types import ErrorStream, InputStream, StartResponse, WSGIApplication, WSGIEnvironment from .util import FileWrapper -_exc_info = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] +__all__ = ["BaseHandler", "SimpleHandler", "BaseCGIHandler", "CGIHandler", "IISCGIHandler", "read_environ"] + +_exc_info = tuple[Optional[type[BaseException]], Optional[BaseException], Optional[TracebackType]] def format_date_time(timestamp: float | None) -> str: ... # undocumented def read_environ() -> dict[str, str]: ... class BaseHandler: - wsgi_version: Tuple[int, int] # undocumented + wsgi_version: tuple[int, int] # undocumented wsgi_multithread: bool wsgi_multiprocess: bool wsgi_run_once: bool @@ -23,12 +25,12 @@ class BaseHandler: os_environ: MutableMapping[str, str] - wsgi_file_wrapper: Type[FileWrapper] | None - headers_class: Type[Headers] # undocumented + wsgi_file_wrapper: type[FileWrapper] | None + headers_class: type[Headers] # undocumented traceback_limit: int | None error_status: str - error_headers: list[Tuple[str, str]] + error_headers: list[tuple[str, str]] error_body: bytes def run(self, application: WSGIApplication) -> None: ... def setup_environ(self) -> None: ... @@ -37,7 +39,7 @@ class BaseHandler: def set_content_length(self) -> None: ... def cleanup_headers(self) -> None: ... def start_response( - self, status: str, headers: list[Tuple[str, str]], exc_info: _exc_info | None = ... + self, status: str, headers: list[tuple[str, str]], exc_info: _exc_info | None = ... ) -> Callable[[bytes], None]: ... def send_preamble(self) -> None: ... def write(self, data: bytes) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/headers.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/headers.pyi index 531a521d3824..b62124a2a936 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/headers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/headers.pyi @@ -1,6 +1,6 @@ -from typing import List, Pattern, Tuple, overload +from typing import Pattern, overload -_HeaderList = List[Tuple[str, str]] +_HeaderList = list[tuple[str, str]] tspecials: Pattern[str] # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/simple_server.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/simple_server.pyi index 76d0b269793d..389d30c22db2 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/simple_server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/simple_server.pyi @@ -1,9 +1,11 @@ from http.server import BaseHTTPRequestHandler, HTTPServer -from typing import Type, TypeVar, overload +from typing import TypeVar, overload from .handlers import SimpleHandler from .types import ErrorStream, StartResponse, WSGIApplication, WSGIEnvironment +__all__ = ["WSGIServer", "WSGIRequestHandler", "demo_app", "make_server"] + server_version: str # undocumented sys_version: str # undocumented software_version: str # undocumented @@ -30,8 +32,8 @@ def demo_app(environ: WSGIEnvironment, start_response: StartResponse) -> list[by _S = TypeVar("_S", bound=WSGIServer) @overload -def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: Type[WSGIRequestHandler] = ...) -> WSGIServer: ... +def make_server(host: str, port: int, app: WSGIApplication, *, handler_class: type[WSGIRequestHandler] = ...) -> WSGIServer: ... @overload def make_server( - host: str, port: int, app: WSGIApplication, server_class: Type[_S], handler_class: Type[WSGIRequestHandler] = ... + host: str, port: int, app: WSGIApplication, server_class: type[_S], handler_class: type[WSGIRequestHandler] = ... ) -> _S: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/util.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/util.pyi index a7f710e8012c..f2c3135df786 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/util.pyi @@ -1,13 +1,18 @@ +import sys from typing import IO, Any, Callable from .types import WSGIEnvironment +__all__ = ["FileWrapper", "guess_scheme", "application_uri", "request_uri", "shift_path_info", "setup_testing_defaults"] + class FileWrapper: filelike: IO[bytes] blksize: int close: Callable[[], None] # only exists if filelike.close exists def __init__(self, filelike: IO[bytes], blksize: int = ...) -> None: ... - def __getitem__(self, key: Any) -> bytes: ... + if sys.version_info < (3, 11): + def __getitem__(self, key: Any) -> bytes: ... + def __iter__(self) -> FileWrapper: ... def __next__(self) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/validate.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/validate.pyi index 68e22727bc73..35491756c288 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/validate.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/wsgiref/validate.pyi @@ -1,6 +1,8 @@ from _typeshed.wsgi import ErrorStream, InputStream, WSGIApplication from typing import Any, Callable, Iterable, Iterator, NoReturn +__all__ = ["validator"] + class WSGIWarning(Warning): ... def validator(application: WSGIApplication) -> WSGIApplication: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xdrlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xdrlib.pyi index f59843f8ee9d..e9716e29014d 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xdrlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xdrlib.pyi @@ -1,5 +1,7 @@ from typing import Callable, Sequence, TypeVar +__all__ = ["Error", "Packer", "Unpacker", "ConversionError"] + _T = TypeVar("_T") class Error(Exception): diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/__init__.pyi index c5766c326c3e..e5b91bf2a795 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/__init__.pyi @@ -43,6 +43,7 @@ class IndexSizeErr(DOMException): ... class DomstringSizeErr(DOMException): ... class HierarchyRequestErr(DOMException): ... class WrongDocumentErr(DOMException): ... +class InvalidCharacterErr(DOMException): ... class NoDataAllowedErr(DOMException): ... class NoModificationAllowedErr(DOMException): ... class NotFoundErr(DOMException): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/domreg.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/domreg.pyi index 64c18ae80f0c..b9e2dd9eb263 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/domreg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/domreg.pyi @@ -1,8 +1,8 @@ from _typeshed.xml import DOMImplementation -from typing import Callable, Iterable, Tuple +from typing import Callable, Iterable well_known_implementations: dict[str, str] registered: dict[str, Callable[[], DOMImplementation]] def registerDOMImplementation(name: str, factory: Callable[[], DOMImplementation]) -> None: ... -def getDOMImplementation(name: str | None = ..., features: str | Iterable[Tuple[str, str | None]] = ...) -> DOMImplementation: ... +def getDOMImplementation(name: str | None = ..., features: str | Iterable[tuple[str, str | None]] = ...) -> DOMImplementation: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/expatbuilder.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/expatbuilder.pyi index 964e6fa3f426..58914e8fabf1 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/expatbuilder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/expatbuilder.pyi @@ -1,3 +1,99 @@ -from typing import Any +from typing import Any, NoReturn +from xml.dom.minidom import Document, DOMImplementation, Node, TypeInfo +from xml.dom.xmlbuilder import DOMBuilderFilter, Options -def __getattr__(name: str) -> Any: ... # incomplete +TEXT_NODE = Node.TEXT_NODE +CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE +DOCUMENT_NODE = Node.DOCUMENT_NODE +FILTER_ACCEPT = DOMBuilderFilter.FILTER_ACCEPT +FILTER_REJECT = DOMBuilderFilter.FILTER_REJECT +FILTER_SKIP = DOMBuilderFilter.FILTER_SKIP +FILTER_INTERRUPT = DOMBuilderFilter.FILTER_INTERRUPT +theDOMImplementation: DOMImplementation | None + +class ElementInfo: + tagName: Any + def __init__(self, tagName, model: Any | None = ...) -> None: ... + def getAttributeType(self, aname) -> TypeInfo: ... + def getAttributeTypeNS(self, namespaceURI, localName) -> TypeInfo: ... + def isElementContent(self) -> bool: ... + def isEmpty(self) -> bool: ... + def isId(self, aname) -> bool: ... + def isIdNS(self, euri, ename, auri, aname) -> bool: ... + +class ExpatBuilder: + document: Document # Created in self.reset() + curNode: Any # Created in self.reset() + def __init__(self, options: Options | None = ...) -> None: ... + def createParser(self): ... + def getParser(self): ... + def reset(self) -> None: ... + def install(self, parser) -> None: ... + def parseFile(self, file) -> Document: ... + def parseString(self, string: str) -> Document: ... + def start_doctype_decl_handler(self, doctypeName, systemId, publicId, has_internal_subset) -> None: ... + def end_doctype_decl_handler(self) -> None: ... + def pi_handler(self, target, data) -> None: ... + def character_data_handler_cdata(self, data) -> None: ... + def character_data_handler(self, data) -> None: ... + def start_cdata_section_handler(self) -> None: ... + def end_cdata_section_handler(self) -> None: ... + def entity_decl_handler(self, entityName, is_parameter_entity, value, base, systemId, publicId, notationName) -> None: ... + def notation_decl_handler(self, notationName, base, systemId, publicId) -> None: ... + def comment_handler(self, data) -> None: ... + def external_entity_ref_handler(self, context, base, systemId, publicId) -> int: ... + def first_element_handler(self, name, attributes) -> None: ... + def start_element_handler(self, name, attributes) -> None: ... + def end_element_handler(self, name) -> None: ... + def element_decl_handler(self, name, model) -> None: ... + def attlist_decl_handler(self, elem, name, type, default, required) -> None: ... + def xml_decl_handler(self, version, encoding, standalone) -> None: ... + +class FilterVisibilityController: + filter: DOMBuilderFilter + def __init__(self, filter: DOMBuilderFilter) -> None: ... + def startContainer(self, node: Node) -> int: ... + def acceptNode(self, node: Node) -> int: ... + +class FilterCrutch: + def __init__(self, builder) -> None: ... + +class Rejecter(FilterCrutch): + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class Skipper(FilterCrutch): + def start_element_handler(self, *args: Any) -> None: ... + def end_element_handler(self, *args: Any) -> None: ... + +class FragmentBuilder(ExpatBuilder): + fragment: Any | None + originalDocument: Any + context: Any + def __init__(self, context, options: Options | None = ...) -> None: ... + +class Namespaces: + def createParser(self): ... + def install(self, parser) -> None: ... + def start_namespace_decl_handler(self, prefix, uri) -> None: ... + def start_element_handler(self, name, attributes) -> None: ... + def end_element_handler(self, name) -> None: ... + +class ExpatBuilderNS(Namespaces, ExpatBuilder): ... +class FragmentBuilderNS(Namespaces, FragmentBuilder): ... +class ParseEscape(Exception): ... + +class InternalSubsetExtractor(ExpatBuilder): + subset: Any | None + def getSubset(self) -> Any | None: ... + def parseFile(self, file) -> None: ... # type: ignore[override] + def parseString(self, string: str) -> None: ... # type: ignore[override] + def start_doctype_decl_handler(self, name, publicId, systemId, has_internal_subset) -> None: ... # type: ignore[override] + def end_doctype_decl_handler(self) -> NoReturn: ... + def start_element_handler(self, name, attrs) -> NoReturn: ... + +def parse(file, namespaces: bool = ...): ... +def parseString(string: str, namespaces: bool = ...): ... +def parseFragment(file, context, namespaces: bool = ...): ... +def parseFragmentString(string: str, context, namespaces: bool = ...): ... +def makeBuilder(options: Options) -> ExpatBuilderNS | ExpatBuilder: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minicompat.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minicompat.pyi index e9b0395ab50d..411401d11ccd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minicompat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minicompat.pyi @@ -1,17 +1,19 @@ -from typing import Any, Iterable, List, Tuple, Type, TypeVar +from typing import Any, Iterable, TypeVar + +__all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"] _T = TypeVar("_T") -StringTypes: Tuple[Type[str]] +StringTypes: tuple[type[str]] -class NodeList(List[_T]): +class NodeList(list[_T]): length: int def item(self, index: int) -> _T | None: ... -class EmptyNodeList(Tuple[Any, ...]): +class EmptyNodeList(tuple[Any, ...]): length: int def item(self, index: int) -> None: ... - def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore + def __add__(self, other: Iterable[_T]) -> NodeList[_T]: ... # type: ignore[override] def __radd__(self, other: Iterable[_T]) -> NodeList[_T]: ... -def defproperty(klass: Type[Any], name: str, doc: str) -> None: ... +def defproperty(klass: type[Any], name: str, doc: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minidom.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minidom.pyi index 4d1d7a9d0faf..d8bcc299b991 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minidom.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/minidom.pyi @@ -1,11 +1,11 @@ import sys import xml.dom -from _typeshed import Self -from typing import IO, Any +from _typeshed import Self, SupportsRead +from typing import Any from xml.dom.xmlbuilder import DocumentLS, DOMImplementationLS from xml.sax.xmlreader import XMLReader -def parse(file: str | IO[Any], parser: XMLReader | None = ..., bufsize: int | None = ...): ... +def parse(file: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ...): ... def parseString(string: str | bytes, parser: XMLReader | None = ...): ... def getDOMImplementation(features=...) -> DOMImplementation | None: ... @@ -22,12 +22,14 @@ class Node(xml.dom.Node): def lastChild(self) -> Node | None: ... @property def localName(self) -> str | None: ... + def __bool__(self) -> bool: ... if sys.version_info >= (3, 9): def toxml(self, encoding: Any | None = ..., standalone: Any | None = ...): ... def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: Any | None = ..., standalone: Any | None = ...): ... else: def toxml(self, encoding: Any | None = ...): ... def toprettyxml(self, indent: str = ..., newl: str = ..., encoding: Any | None = ...): ... + def hasChildNodes(self) -> bool: ... def insertBefore(self, newChild, refChild): ... def appendChild(self, node): ... @@ -70,6 +72,10 @@ class Attr(Node): self, qName: str, namespaceURI: str | None = ..., localName: Any | None = ..., prefix: Any | None = ... ) -> None: ... def unlink(self) -> None: ... + @property + def isId(self) -> bool: ... + @property + def schemaType(self) -> Any: ... class NamedNodeMap: def __init__(self, attrs, attrsNS, ownerElement) -> None: ... @@ -82,7 +88,7 @@ class NamedNodeMap: def values(self): ... def get(self, name, value: Any | None = ...): ... def __len__(self) -> int: ... - def __eq__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> bool: ... def __ge__(self, other: Any) -> bool: ... def __gt__(self, other: Any) -> bool: ... def __le__(self, other: Any) -> bool: ... @@ -96,6 +102,8 @@ class NamedNodeMap: def setNamedItem(self, node): ... def setNamedItemNS(self, node): ... def __delitem__(self, attname_or_tuple) -> None: ... + @property + def length(self) -> int: ... AttributeList = NamedNodeMap @@ -110,6 +118,7 @@ class Element(Node): schemaType: Any parentNode: Any tagName: str + nodeName: str prefix: Any namespaceURI: str | None childNodes: Any @@ -139,6 +148,8 @@ class Element(Node): def setIdAttribute(self, name) -> None: ... def setIdAttributeNS(self, namespaceURI: str, localName) -> None: ... def setIdAttributeNode(self, idAttr) -> None: ... + @property + def attributes(self) -> NamedNodeMap: ... class Childless: attributes: Any @@ -173,6 +184,8 @@ class CharacterData(Childless, Node): def insertData(self, offset: int, arg: str) -> None: ... def deleteData(self, offset: int, count: int) -> None: ... def replaceData(self, offset: int, count: int, arg: str) -> None: ... + @property + def length(self) -> int: ... class Text(CharacterData): nodeType: Any @@ -182,6 +195,10 @@ class Text(CharacterData): def splitText(self, offset): ... def writexml(self, writer, indent: str = ..., addindent: str = ..., newl: str = ...) -> None: ... def replaceWholeText(self, content): ... + @property + def isWhitespaceInElementContent(self) -> bool: ... + @property + def wholeText(self) -> str: ... class Comment(CharacterData): nodeType: Any @@ -205,15 +222,17 @@ class ReadOnlySequentialNamedNodeMap: def removeNamedItemNS(self, namespaceURI: str, localName) -> None: ... def setNamedItem(self, node) -> None: ... def setNamedItemNS(self, node) -> None: ... + @property + def length(self) -> int: ... -class Identified: ... +class Identified: + publicId: Any + systemId: Any class DocumentType(Identified, Childless, Node): nodeType: Any nodeValue: Any name: Any - publicId: Any - systemId: Any internalSubset: Any entities: Any notations: Any @@ -312,4 +331,5 @@ class Document(Node, DocumentLS): def writexml( self, writer, indent: str = ..., addindent: str = ..., newl: str = ..., encoding: Any | None = ... ) -> None: ... + def renameNode(self, n, namespaceURI: str, name): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/pulldom.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/pulldom.pyi index ce8816b4a98a..7ae2dcf3eadd 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/pulldom.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/pulldom.pyi @@ -1,4 +1,6 @@ -from typing import IO, Any, Sequence, Tuple, Union +import sys +from _typeshed import SupportsRead +from typing import Any, Sequence, Union from typing_extensions import Literal from xml.dom.minidom import Document, DOMImplementation, Element, Text from xml.sax.handler import ContentHandler @@ -16,7 +18,7 @@ CHARACTERS: Literal["CHARACTERS"] _DocumentFactory = Union[DOMImplementation, None] _Node = Union[Document, Element, Text] -_Event = Tuple[ +_Event = tuple[ Literal[ Literal["START_ELEMENT"], Literal["END_ELEMENT"], @@ -61,12 +63,14 @@ class ErrorHandler: def fatalError(self, exception) -> None: ... class DOMEventStream: - stream: IO[bytes] + stream: SupportsRead[bytes] | SupportsRead[str] parser: XMLReader bufsize: int - def __init__(self, stream: IO[bytes], parser: XMLReader, bufsize: int) -> None: ... + def __init__(self, stream: SupportsRead[bytes] | SupportsRead[str], parser: XMLReader, bufsize: int) -> None: ... pulldom: Any - def __getitem__(self, pos): ... + if sys.version_info < (3, 11): + def __getitem__(self, pos): ... + def __next__(self): ... def __iter__(self): ... def getEvent(self) -> _Event: ... @@ -83,5 +87,7 @@ class SAX2DOM(PullDOM): default_bufsize: int -def parse(stream_or_string: str | IO[bytes], parser: XMLReader | None = ..., bufsize: int | None = ...) -> DOMEventStream: ... +def parse( + stream_or_string: str | SupportsRead[bytes] | SupportsRead[str], parser: XMLReader | None = ..., bufsize: int | None = ... +) -> DOMEventStream: ... def parseString(string: str, parser: XMLReader | None = ...) -> DOMEventStream: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/xmlbuilder.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/xmlbuilder.pyi index d8936bdc2ab4..0615edb8073e 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/xmlbuilder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/dom/xmlbuilder.pyi @@ -1,6 +1,109 @@ -from typing import Any +from typing import Any, NoReturn, Optional +from typing_extensions import Literal +from urllib.request import OpenerDirector +from xml.dom.expatbuilder import ExpatBuilder, ExpatBuilderNS +from xml.dom.minidom import Node -def __getattr__(name: str) -> Any: ... # incomplete +__all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"] -class DocumentLS(Any): ... # type: ignore -class DOMImplementationLS(Any): ... # type: ignore +# UNKNOWN TYPES: +# - `Options.errorHandler`. +# The same as `_DOMBuilderErrorHandlerType`? +# Maybe `xml.sax.handler.ErrorHandler`? +# - Return type of DOMBuilder.getFeature(). +# We could get rid of the `Any` if we knew more +# about `Options.errorHandler`. + +# ALIASES REPRESENTING MORE UNKNOWN TYPES: + +# probably the same as `Options.errorHandler`? +# Maybe `xml.sax.handler.ErrorHandler`? +_DOMBuilderErrorHandlerType = Optional[Any] +# probably some kind of IO... +_DOMInputSourceCharacterStreamType = Optional[Any] +# probably a string?? +_DOMInputSourceStringDataType = Optional[Any] +# probably a string?? +_DOMInputSourceEncodingType = Optional[Any] + +class Options: + namespaces: int + namespace_declarations: bool + validation: bool + external_parameter_entities: bool + external_general_entities: bool + external_dtd_subset: bool + validate_if_schema: bool + validate: bool + datatype_normalization: bool + create_entity_ref_nodes: bool + entities: bool + whitespace_in_element_content: bool + cdata_sections: bool + comments: bool + charset_overrides_xml_encoding: bool + infoset: bool + supported_mediatypes_only: bool + errorHandler: Any | None + filter: DOMBuilderFilter | None # a guess, but seems likely + +class DOMBuilder: + entityResolver: DOMEntityResolver | None # a guess, but seems likely + errorHandler: _DOMBuilderErrorHandlerType + filter: DOMBuilderFilter | None # a guess, but seems likely + ACTION_REPLACE: Literal[1] + ACTION_APPEND_AS_CHILDREN: Literal[2] + ACTION_INSERT_AFTER: Literal[3] + ACTION_INSERT_BEFORE: Literal[4] + def __init__(self) -> None: ... + def setFeature(self, name: str, state: int) -> None: ... + def supportsFeature(self, name: str) -> bool: ... + def canSetFeature(self, name: str, state: int) -> bool: ... + # getFeature could return any attribute from an instance of `Options` + def getFeature(self, name: str) -> Any: ... + def parseURI(self, uri: str) -> ExpatBuilder | ExpatBuilderNS: ... + def parse(self, input: DOMInputSource) -> ExpatBuilder | ExpatBuilderNS: ... + # `input` and `cnode` argtypes for `parseWithContext` are unknowable + # as the function does nothing with them, and always raises an exception. + # But `input` is *probably* `DOMInputSource`? + def parseWithContext(self, input: object, cnode: object, action: Literal[1, 2, 3, 4]) -> NoReturn: ... + +class DOMEntityResolver: + def resolveEntity(self, publicId: str | None, systemId: str) -> DOMInputSource: ... + +class DOMInputSource: + byteStream: OpenerDirector | None + characterStream: _DOMInputSourceCharacterStreamType + stringData: _DOMInputSourceStringDataType + encoding: _DOMInputSourceEncodingType + publicId: str | None + systemId: str | None + baseURI: str | None + +class DOMBuilderFilter: + FILTER_ACCEPT: Literal[1] + FILTER_REJECT: Literal[2] + FILTER_SKIP: Literal[3] + FILTER_INTERRUPT: Literal[4] + whatToShow: int + # The argtypes for acceptNode and startContainer appear to be irrelevant. + def acceptNode(self, element: object) -> Literal[1]: ... + def startContainer(self, element: object) -> Literal[1]: ... + +class DocumentLS: + async_: bool + def abort(self) -> NoReturn: ... + # `load()` and `loadXML()` always raise exceptions + # so the argtypes of `uri` and `source` are unknowable. + # `source` is *probably* `DOMInputSource`? + # `uri` is *probably* a str? (see DOMBuilder.parseURI()) + def load(self, uri: object) -> NoReturn: ... + def loadXML(self, source: object) -> NoReturn: ... + def saveXML(self, snode: Node | None) -> str: ... + +class DOMImplementationLS: + MODE_SYNCHRONOUS: Literal[1] + MODE_ASYNCHRONOUS: Literal[2] + def createDOMBuilder(self, mode: Literal[1], schemaType: None) -> DOMBuilder: ... + def createDOMWriter(self) -> NoReturn: ... + def createDOMInputSource(self) -> DOMInputSource: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi index 0ccccce4f3d0..5cd85cc21753 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementInclude.pyi @@ -6,6 +6,9 @@ XINCLUDE: str XINCLUDE_INCLUDE: str XINCLUDE_FALLBACK: str +if sys.version_info >= (3, 9): + DEFAULT_MAX_INCLUSION_DEPTH: int + class FatalIncludeError(SyntaxError): ... def default_loader(href: str | bytes | int, parse: str, encoding: str | None = ...) -> str | Element: ... @@ -18,5 +21,7 @@ if sys.version_info >= (3, 9): elem: Element, loader: Callable[..., str | Element] | None = ..., base_url: str | None = ..., max_depth: int | None = ... ) -> None: ... + class LimitedRecursiveIncludeError(FatalIncludeError): ... + else: def include(elem: Element, loader: Callable[..., str | Element] | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementPath.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementPath.pyi index db4bd6a4e958..5a2dd69c1bee 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementPath.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementPath.pyi @@ -1,11 +1,11 @@ -from typing import Callable, Generator, List, Pattern, Tuple, TypeVar +from typing import Callable, Generator, Pattern, TypeVar from xml.etree.ElementTree import Element xpath_tokenizer_re: Pattern[str] -_token = Tuple[str, str] +_token = tuple[str, str] _next = Callable[[], _token] -_callback = Callable[[_SelectorContext, List[Element]], Generator[Element, None, None]] +_callback = Callable[[_SelectorContext, list[Element]], Generator[Element, None, None]] def xpath_tokenizer(pattern: str, namespaces: dict[str, str] | None = ...) -> Generator[_token, None, None]: ... def get_parent_map(context: _SelectorContext) -> dict[Element, Element]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi index b9ecf52edf58..aef8285eec5b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/ElementTree.pyi @@ -1,34 +1,119 @@ import sys -from _typeshed import FileDescriptor, StrOrBytesPath, SupportsWrite +from _typeshed import FileDescriptor, StrOrBytesPath, SupportsRead, SupportsWrite from typing import ( - IO, Any, Callable, - Dict, Generator, ItemsView, Iterable, Iterator, KeysView, + Mapping, MutableSequence, Sequence, - Tuple, TypeVar, Union, overload, ) -from typing_extensions import Literal +from typing_extensions import Literal, SupportsIndex, TypeGuard + +if sys.version_info >= (3, 9): + __all__ = [ + "Comment", + "dump", + "Element", + "ElementTree", + "fromstring", + "fromstringlist", + "indent", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", + "canonicalize", + "C14NWriterTarget", + ] +elif sys.version_info >= (3, 8): + __all__ = [ + "Comment", + "dump", + "Element", + "ElementTree", + "fromstring", + "fromstringlist", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", + "canonicalize", + "C14NWriterTarget", + ] +else: + __all__ = [ + "Comment", + "dump", + "Element", + "ElementTree", + "fromstring", + "fromstringlist", + "iselement", + "iterparse", + "parse", + "ParseError", + "PI", + "ProcessingInstruction", + "QName", + "SubElement", + "tostring", + "tostringlist", + "TreeBuilder", + "VERSION", + "XML", + "XMLID", + "XMLParser", + "XMLPullParser", + "register_namespace", + ] _T = TypeVar("_T") -_File = Union[StrOrBytesPath, FileDescriptor, IO[Any]] +_FileRead = Union[StrOrBytesPath, FileDescriptor, SupportsRead[bytes], SupportsRead[str]] +_FileWriteC14N = Union[StrOrBytesPath, FileDescriptor, SupportsWrite[bytes]] +_FileWrite = Union[_FileWriteC14N, SupportsWrite[str]] VERSION: str class ParseError(SyntaxError): code: int - position: Tuple[int, int] + position: tuple[int, int] -def iselement(element: object) -> bool: ... +# In reality it works based on `.tag` attribute duck typing. +def iselement(element: object) -> TypeGuard[Element]: ... if sys.version_info >= (3, 8): @overload @@ -36,7 +121,7 @@ if sys.version_info >= (3, 8): xml_data: str | bytes | None = ..., *, out: None = ..., - from_file: _File | None = ..., + from_file: _FileRead | None = ..., with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -50,7 +135,7 @@ if sys.version_info >= (3, 8): xml_data: str | bytes | None = ..., *, out: SupportsWrite[str], - from_file: _File | None = ..., + from_file: _FileRead | None = ..., with_comments: bool = ..., strip_text: bool = ..., rewrite_prefixes: bool = ..., @@ -85,19 +170,22 @@ class Element(MutableSequence[Element]): def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... def itertext(self) -> Generator[str, None, None]: ... def keys(self) -> KeysView[str]: ... + # makeelement returns the type of self in Python impl, but not in C impl def makeelement(self, __tag: str, __attrib: dict[str, str]) -> Element: ... def remove(self, __subelement: Element) -> None: ... def set(self, __key: str, __value: str) -> None: ... - def __delitem__(self, i: int | slice) -> None: ... + def __copy__(self) -> Element: ... # returns the type of self in Python impl, but not in C impl + def __deepcopy__(self, __memo: Any) -> Element: ... # Only exists in C impl + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... @overload - def __getitem__(self, i: int) -> Element: ... + def __getitem__(self, __i: SupportsIndex) -> Element: ... @overload - def __getitem__(self, s: slice) -> MutableSequence[Element]: ... + def __getitem__(self, __s: slice) -> MutableSequence[Element]: ... def __len__(self) -> int: ... @overload - def __setitem__(self, i: int, o: Element) -> None: ... + def __setitem__(self, __i: SupportsIndex, __o: Element) -> None: ... @overload - def __setitem__(self, s: slice, o: Iterable[Element]) -> None: ... + def __setitem__(self, __s: slice, __o: Iterable[Element]) -> None: ... if sys.version_info < (3, 9): def getchildren(self) -> list[Element]: ... def getiterator(self, tag: str | None = ...) -> list[Element]: ... @@ -111,14 +199,20 @@ PI: Callable[..., Element] class QName: text: str def __init__(self, text_or_uri: str, tag: str | None = ...) -> None: ... + def __lt__(self, other: QName | str) -> bool: ... + def __le__(self, other: QName | str) -> bool: ... + def __gt__(self, other: QName | str) -> bool: ... + def __ge__(self, other: QName | str) -> bool: ... + def __eq__(self, other: object) -> bool: ... class ElementTree: - def __init__(self, element: Element | None = ..., file: _File | None = ...) -> None: ... + def __init__(self, element: Element | None = ..., file: _FileRead | None = ...) -> None: ... def getroot(self) -> Element: ... - def parse(self, source: _File, parser: XMLParser | None = ...) -> Element: ... + def parse(self, source: _FileRead, parser: XMLParser | None = ...) -> Element: ... def iter(self, tag: str | None = ...) -> Generator[Element, None, None]: ... if sys.version_info < (3, 9): def getiterator(self, tag: str | None = ...) -> list[Element]: ... + def find(self, path: str, namespaces: dict[str, str] | None = ...) -> Element | None: ... @overload def findtext(self, path: str, default: None = ..., namespaces: dict[str, str] | None = ...) -> str | None: ... @@ -128,7 +222,7 @@ class ElementTree: def iterfind(self, path: str, namespaces: dict[str, str] | None = ...) -> Generator[Element, None, None]: ... def write( self, - file_or_filename: _File, + file_or_filename: _FileWrite, encoding: str | None = ..., xml_declaration: bool | None = ..., default_namespace: str | None = ..., @@ -136,7 +230,7 @@ class ElementTree: *, short_empty_elements: bool = ..., ) -> None: ... - def write_c14n(self, file: _File) -> None: ... + def write_c14n(self, file: _FileWriteC14N) -> None: ... def register_namespace(prefix: str, uri: str) -> None: ... @@ -231,17 +325,19 @@ def dump(elem: Element) -> None: ... if sys.version_info >= (3, 9): def indent(tree: Element | ElementTree, space: str = ..., level: int = ...) -> None: ... -def parse(source: _File, parser: XMLParser | None = ...) -> ElementTree: ... -def iterparse(source: _File, events: Sequence[str] | None = ..., parser: XMLParser | None = ...) -> Iterator[Tuple[str, Any]]: ... +def parse(source: _FileRead, parser: XMLParser | None = ...) -> ElementTree: ... +def iterparse( + source: _FileRead, events: Sequence[str] | None = ..., parser: XMLParser | None = ... +) -> Iterator[tuple[str, Any]]: ... class XMLPullParser: def __init__(self, events: Sequence[str] | None = ..., *, _parser: XMLParser | None = ...) -> None: ... def feed(self, data: bytes) -> None: ... def close(self) -> None: ... - def read_events(self) -> Iterator[Tuple[str, Element]]: ... + def read_events(self) -> Iterator[tuple[str, Element]]: ... def XML(text: str | bytes, parser: XMLParser | None = ...) -> Element: ... -def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> Tuple[Element, dict[str, Element]]: ... +def XMLID(text: str | bytes, parser: XMLParser | None = ...) -> tuple[Element, dict[str, Element]]: ... # This is aliased to XML in the source. fromstring = XML @@ -257,14 +353,33 @@ def fromstringlist(sequence: Sequence[str | bytes], parser: XMLParser | None = . # TreeBuilder is called by client code (they could pass strs, bytes or whatever); # but we don't want to use a too-broad type, or it would be too hard to write # elementfactories. -_ElementFactory = Callable[[Any, Dict[Any, Any]], Element] +_ElementFactory = Callable[[Any, dict[Any, Any]], Element] class TreeBuilder: - def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... + if sys.version_info >= (3, 8): + # comment_factory can take None because passing None to Comment is not an error + def __init__( + self, + element_factory: _ElementFactory | None = ..., + *, + comment_factory: Callable[[str | None], Element] | None = ..., + pi_factory: Callable[[str, str | None], Element] | None = ..., + insert_comments: bool = ..., + insert_pis: bool = ..., + ) -> None: ... + insert_comments: bool + insert_pis: bool + else: + def __init__(self, element_factory: _ElementFactory | None = ...) -> None: ... + def close(self) -> Element: ... def data(self, __data: str | bytes) -> None: ... def start(self, __tag: str | bytes, __attrs: dict[str | bytes, str | bytes]) -> Element: ... def end(self, __tag: str | bytes) -> Element: ... + if sys.version_info >= (3, 8): + # These two methods have pos-only parameters in the C implementation + def comment(self, __text: str | None) -> Element: ... + def pi(self, __target: str, __text: str | None = ...) -> Element: ... if sys.version_info >= (3, 8): class C14NWriterTarget: @@ -280,6 +395,12 @@ if sys.version_info >= (3, 8): exclude_attrs: Iterable[str] | None = ..., exclude_tags: Iterable[str] | None = ..., ) -> None: ... + def data(self, data: str) -> None: ... + def start_ns(self, prefix: str, uri: str) -> None: ... + def start(self, tag: str, attrs: Mapping[str, str]) -> None: ... + def end(self, tag: str) -> None: ... + def comment(self, text: str) -> None: ... + def pi(self, target: str, data: str) -> None: ... class XMLParser: parser: Any @@ -292,5 +413,6 @@ class XMLParser: else: def __init__(self, html: int = ..., target: Any = ..., encoding: str | None = ...) -> None: ... def doctype(self, __name: str, __pubid: str, __system: str) -> None: ... + def close(self) -> Any: ... def feed(self, __data: str | bytes) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/cElementTree.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/cElementTree.pyi index c41e2bee0eb1..02272d803c18 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/cElementTree.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/etree/cElementTree.pyi @@ -1 +1 @@ -from xml.etree.ElementTree import * # noqa: F403 +from xml.etree.ElementTree import * diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/__init__.pyi index a123e7e894e2..418164aa887f 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/__init__.pyi @@ -1,8 +1,12 @@ import sys -from typing import IO, Any, Iterable, NoReturn +from _typeshed import SupportsRead, _T_co +from typing import Any, Iterable, NoReturn, Protocol from xml.sax.handler import ContentHandler, ErrorHandler from xml.sax.xmlreader import Locator, XMLReader +class _SupportsReadClose(SupportsRead[_T_co], Protocol[_T_co]): + def close(self) -> None: ... + class SAXException(Exception): def __init__(self, msg: str, exception: Exception | None = ...) -> None: ... def getMessage(self) -> str: ... @@ -28,6 +32,8 @@ if sys.version_info >= (3, 8): else: def make_parser(parser_list: list[str] = ...) -> XMLReader: ... -def parse(source: str | IO[str] | IO[bytes], handler: ContentHandler, errorHandler: ErrorHandler = ...) -> None: ... +def parse( + source: str | _SupportsReadClose[bytes] | _SupportsReadClose[str], handler: ContentHandler, errorHandler: ErrorHandler = ... +) -> None: ... def parseString(string: bytes | str, handler: ContentHandler, errorHandler: ErrorHandler | None = ...) -> None: ... def _create_parser(parser_name: str) -> XMLReader: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/xmlreader.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/xmlreader.pyi index 8afc566b16a1..684e9cef1f42 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/xmlreader.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xml/sax/xmlreader.pyi @@ -1,4 +1,4 @@ -from typing import Mapping, Tuple +from typing import Mapping class XMLReader: def __init__(self) -> None: ... @@ -64,7 +64,7 @@ class AttributesImpl: def values(self): ... class AttributesNSImpl(AttributesImpl): - def __init__(self, attrs: Mapping[Tuple[str, str], str], qnames: Mapping[Tuple[str, str], str]) -> None: ... + def __init__(self, attrs: Mapping[tuple[str, str], str], qnames: Mapping[tuple[str, str], str]) -> None: ... def getValueByQName(self, name): ... def getNameByQName(self, name): ... def getQNameByName(self, name): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/client.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/client.pyi index d0144e732bb7..0f9c21b75836 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/client.pyi @@ -6,16 +6,16 @@ from _typeshed import Self, SupportsRead, SupportsWrite from datetime import datetime from io import BytesIO from types import TracebackType -from typing import Any, Callable, Dict, Iterable, List, Mapping, Protocol, Tuple, Type, Union, overload +from typing import Any, Callable, Iterable, Mapping, Protocol, Union, overload from typing_extensions import Literal class _SupportsTimeTuple(Protocol): def timetuple(self) -> time.struct_time: ... _DateTimeComparable = Union[DateTime, datetime, str, _SupportsTimeTuple] -_Marshallable = Union[None, bool, int, float, str, bytes, Tuple[Any, ...], List[Any], Dict[Any, Any], datetime, DateTime, Binary] -_XMLDate = Union[int, datetime, Tuple[int, ...], time.struct_time] -_HostType = Union[Tuple[str, Dict[str, str]], str] +_Marshallable = Union[None, bool, int, float, str, bytes, tuple[Any, ...], list[Any], dict[Any, Any], datetime, DateTime, Binary] +_XMLDate = Union[int, datetime, tuple[int, ...], time.struct_time] +_HostType = Union[tuple[str, dict[str, str]], str] def escape(s: str) -> str: ... # undocumented @@ -63,13 +63,13 @@ def _strftime(value: _XMLDate) -> str: ... # undocumented class DateTime: value: str # undocumented - def __init__(self, value: int | str | datetime | time.struct_time | Tuple[int, ...] = ...) -> None: ... + def __init__(self, value: int | str | datetime | time.struct_time | tuple[int, ...] = ...) -> None: ... def __lt__(self, other: _DateTimeComparable) -> bool: ... def __le__(self, other: _DateTimeComparable) -> bool: ... def __gt__(self, other: _DateTimeComparable) -> bool: ... def __ge__(self, other: _DateTimeComparable) -> bool: ... - def __eq__(self, other: _DateTimeComparable) -> bool: ... # type: ignore - def make_comparable(self, other: _DateTimeComparable) -> Tuple[str, str]: ... # undocumented + def __eq__(self, other: _DateTimeComparable) -> bool: ... # type: ignore[override] + def make_comparable(self, other: _DateTimeComparable) -> tuple[str, str]: ... # undocumented def timetuple(self) -> time.struct_time: ... # undocumented def decode(self, data: Any) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... @@ -83,10 +83,11 @@ class Binary: def __init__(self, data: bytes | None = ...) -> None: ... def decode(self, data: bytes) -> None: ... def encode(self, out: SupportsWrite[str]) -> None: ... + def __eq__(self, other: object) -> bool: ... def _binary(data: bytes) -> Binary: ... # undocumented -WRAPPERS: Tuple[Type[DateTime], Type[Binary]] # undocumented +WRAPPERS: tuple[type[DateTime], type[Binary]] # undocumented class ExpatParser: # undocumented def __init__(self, target: Unmarshaller) -> None: ... @@ -96,7 +97,7 @@ class ExpatParser: # undocumented class Marshaller: dispatch: dict[ - Type[Any], Callable[[Marshaller, Any, Callable[[str], Any]], None] + type[Any], Callable[[Marshaller, Any, Callable[[str], Any]], None] ] # TODO: Replace 'Any' with some kind of binding memo: dict[Any, None] @@ -135,7 +136,7 @@ class Unmarshaller: _use_datetime: bool _use_builtin_types: bool def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... - def close(self) -> Tuple[_Marshallable, ...]: ... + def close(self) -> tuple[_Marshallable, ...]: ... def getmethodname(self) -> str | None: ... def xml(self, encoding: str, standalone: Any) -> None: ... # Standalone is ignored def start(self, tag: str, attrs: dict[str, str]) -> None: ... @@ -159,9 +160,9 @@ class Unmarshaller: class _MultiCallMethod: # undocumented - __call_list: list[Tuple[str, Tuple[_Marshallable, ...]]] + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] __name: str - def __init__(self, call_list: list[Tuple[str, _Marshallable]], name: str) -> None: ... + def __init__(self, call_list: list[tuple[str, _Marshallable]], name: str) -> None: ... def __getattr__(self, name: str) -> _MultiCallMethod: ... def __call__(self, *args: _Marshallable) -> None: ... @@ -174,7 +175,7 @@ class MultiCallIterator: # undocumented class MultiCall: __server: ServerProxy - __call_list: list[Tuple[str, Tuple[_Marshallable, ...]]] + __call_list: list[tuple[str, tuple[_Marshallable, ...]]] def __init__(self, server: ServerProxy) -> None: ... def __getattr__(self, item: str) -> _MultiCallMethod: ... def __call__(self) -> MultiCallIterator: ... @@ -184,15 +185,15 @@ FastMarshaller: Marshaller | None FastParser: ExpatParser | None FastUnmarshaller: Unmarshaller | None -def getparser(use_datetime: bool = ..., use_builtin_types: bool = ...) -> Tuple[ExpatParser, Unmarshaller]: ... +def getparser(use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[ExpatParser, Unmarshaller]: ... def dumps( - params: Fault | Tuple[_Marshallable, ...], + params: Fault | tuple[_Marshallable, ...], methodname: str | None = ..., methodresponse: bool | None = ..., encoding: str | None = ..., allow_none: bool = ..., ) -> str: ... -def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> Tuple[Tuple[_Marshallable, ...], str | None]: ... +def loads(data: str, use_datetime: bool = ..., use_builtin_types: bool = ...) -> tuple[tuple[_Marshallable, ...], str | None]: ... def gzip_encode(data: bytes) -> bytes: ... # undocumented def gzip_decode(data: bytes, max_decode: int = ...) -> bytes: ... # undocumented @@ -204,9 +205,9 @@ class GzipDecodedResponse(gzip.GzipFile): # undocumented class _Method: # undocumented - __send: Callable[[str, Tuple[_Marshallable, ...]], _Marshallable] + __send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable] __name: str - def __init__(self, send: Callable[[str, Tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... + def __init__(self, send: Callable[[str, tuple[_Marshallable, ...]], _Marshallable], name: str) -> None: ... def __getattr__(self, name: str) -> _Method: ... def __call__(self, *args: _Marshallable) -> _Marshallable: ... @@ -218,28 +219,29 @@ class Transport: _use_datetime: bool _use_builtin_types: bool - _connection: Tuple[_HostType | None, http.client.HTTPConnection | None] - _headers: list[Tuple[str, str]] - _extra_headers: list[Tuple[str, str]] + _connection: tuple[_HostType | None, http.client.HTTPConnection | None] + _headers: list[tuple[str, str]] + _extra_headers: list[tuple[str, str]] if sys.version_info >= (3, 8): def __init__( - self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, headers: Iterable[Tuple[str, str]] = ... + self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, headers: Iterable[tuple[str, str]] = ... ) -> None: ... else: def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ...) -> None: ... - def request(self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ...) -> Tuple[_Marshallable, ...]: ... + + def request(self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ...) -> tuple[_Marshallable, ...]: ... def single_request( self, host: _HostType, handler: str, request_body: bytes, verbose: bool = ... - ) -> Tuple[_Marshallable, ...]: ... - def getparser(self) -> Tuple[ExpatParser, Unmarshaller]: ... - def get_host_info(self, host: _HostType) -> Tuple[str, list[Tuple[str, str]], dict[str, str]]: ... + ) -> tuple[_Marshallable, ...]: ... + def getparser(self) -> tuple[ExpatParser, Unmarshaller]: ... + def get_host_info(self, host: _HostType) -> tuple[str, list[tuple[str, str]], dict[str, str]]: ... def make_connection(self, host: _HostType) -> http.client.HTTPConnection: ... def close(self) -> None: ... def send_request(self, host: _HostType, handler: str, request_body: bytes, debug: bool) -> http.client.HTTPConnection: ... - def send_headers(self, connection: http.client.HTTPConnection, headers: list[Tuple[str, str]]) -> None: ... + def send_headers(self, connection: http.client.HTTPConnection, headers: list[tuple[str, str]]) -> None: ... def send_content(self, connection: http.client.HTTPConnection, request_body: bytes) -> None: ... - def parse_response(self, response: http.client.HTTPResponse) -> Tuple[_Marshallable, ...]: ... + def parse_response(self, response: http.client.HTTPResponse) -> tuple[_Marshallable, ...]: ... class SafeTransport(Transport): @@ -249,11 +251,12 @@ class SafeTransport(Transport): use_datetime: bool = ..., use_builtin_types: bool = ..., *, - headers: Iterable[Tuple[str, str]] = ..., + headers: Iterable[tuple[str, str]] = ..., context: Any | None = ..., ) -> None: ... else: def __init__(self, use_datetime: bool = ..., use_builtin_types: bool = ..., *, context: Any | None = ...) -> None: ... + def make_connection(self, host: _HostType) -> http.client.HTTPSConnection: ... class ServerProxy: @@ -276,7 +279,7 @@ class ServerProxy: use_datetime: bool = ..., use_builtin_types: bool = ..., *, - headers: Iterable[Tuple[str, str]] = ..., + headers: Iterable[tuple[str, str]] = ..., context: Any | None = ..., ) -> None: ... else: @@ -292,6 +295,7 @@ class ServerProxy: *, context: Any | None = ..., ) -> None: ... + def __getattr__(self, name: str) -> _Method: ... @overload def __call__(self, attr: Literal["close"]) -> Callable[[], None]: ... @@ -301,9 +305,9 @@ class ServerProxy: def __call__(self, attr: str) -> Callable[[], None] | Transport: ... def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def __close(self) -> None: ... # undocumented - def __request(self, methodname: str, params: Tuple[_Marshallable, ...]) -> Tuple[_Marshallable, ...]: ... # undocumented + def __request(self, methodname: str, params: tuple[_Marshallable, ...]) -> tuple[_Marshallable, ...]: ... # undocumented Server = ServerProxy diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/server.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/server.pyi index 3240181a0467..0a07424949df 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xmlrpc/server.pyi @@ -3,11 +3,11 @@ import pydoc import socketserver import sys from datetime import datetime -from typing import Any, Callable, Dict, Iterable, List, Mapping, Pattern, Protocol, Tuple, Type, Union +from typing import Any, Callable, Iterable, Mapping, Pattern, Protocol, Union from xmlrpc.client import Fault # TODO: Recursive type on tuple, list, dict -_Marshallable = Union[None, bool, int, float, str, bytes, Tuple[Any, ...], List[Any], Dict[Any, Any], datetime] +_Marshallable = Union[None, bool, int, float, str, bytes, tuple[Any, ...], list[Any], dict[Any, Any], datetime] # The dispatch accepts anywhere from 0 to N arguments, no easy way to allow this in mypy class _DispatchArity0(Protocol): @@ -48,12 +48,13 @@ class SimpleXMLRPCDispatcher: # undocumented def register_function(self, function: _DispatchProtocol | None = ..., name: str | None = ...) -> Callable[..., Any]: ... else: def register_function(self, function: _DispatchProtocol, name: str | None = ...) -> Callable[..., Any]: ... + def register_introspection_functions(self) -> None: ... def register_multicall_functions(self) -> None: ... def _marshaled_dispatch( self, data: str, - dispatch_method: Callable[[str | None, Tuple[_Marshallable, ...]], Fault | Tuple[_Marshallable, ...]] | None = ..., + dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = ..., path: Any | None = ..., ) -> str: ... # undocumented def system_listMethods(self) -> list[str]: ... # undocumented @@ -64,7 +65,7 @@ class SimpleXMLRPCDispatcher: # undocumented class SimpleXMLRPCRequestHandler(http.server.BaseHTTPRequestHandler): - rpc_paths: Tuple[str, str] + rpc_paths: tuple[str, str] encode_threshold: int # undocumented aepattern: Pattern[str] # undocumented def accept_encodings(self) -> dict[str, float]: ... @@ -80,8 +81,8 @@ class SimpleXMLRPCServer(socketserver.TCPServer, SimpleXMLRPCDispatcher): _send_traceback_handler: bool def __init__( self, - addr: Tuple[str, int], - requestHandler: Type[SimpleXMLRPCRequestHandler] = ..., + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., logRequests: bool = ..., allow_none: bool = ..., encoding: str | None = ..., @@ -96,8 +97,8 @@ class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented encoding: str def __init__( self, - addr: Tuple[str, int], - requestHandler: Type[SimpleXMLRPCRequestHandler] = ..., + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., logRequests: bool = ..., allow_none: bool = ..., encoding: str | None = ..., @@ -109,7 +110,7 @@ class MultiPathXMLRPCServer(SimpleXMLRPCServer): # undocumented def _marshaled_dispatch( self, data: str, - dispatch_method: Callable[[str | None, Tuple[_Marshallable, ...]], Fault | Tuple[_Marshallable, ...]] | None = ..., + dispatch_method: Callable[[str | None, tuple[_Marshallable, ...]], Fault | tuple[_Marshallable, ...]] | None = ..., path: Any | None = ..., ) -> str: ... @@ -120,7 +121,16 @@ class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher): def handle_request(self, request_text: str | None = ...) -> None: ... class ServerHTMLDoc(pydoc.HTMLDoc): # undocumented - def docroutine(self, object: object, name: str, mod: str | None = ..., funcs: Mapping[str, str] = ..., classes: Mapping[str, str] = ..., methods: Mapping[str, str] = ..., cl: type | None = ...) -> str: ... # type: ignore + def docroutine( # type: ignore[override] + self, + object: object, + name: str, + mod: str | None = ..., + funcs: Mapping[str, str] = ..., + classes: Mapping[str, str] = ..., + methods: Mapping[str, str] = ..., + cl: type | None = ..., + ) -> str: ... def docserver(self, server_name: str, package_documentation: str, methods: dict[str, str]) -> str: ... class XMLRPCDocGenerator: # undocumented @@ -140,8 +150,8 @@ class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler): class DocXMLRPCServer(SimpleXMLRPCServer, XMLRPCDocGenerator): def __init__( self, - addr: Tuple[str, int], - requestHandler: Type[SimpleXMLRPCRequestHandler] = ..., + addr: tuple[str, int], + requestHandler: type[SimpleXMLRPCRequestHandler] = ..., logRequests: bool = ..., allow_none: bool = ..., encoding: str | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/xxlimited.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/xxlimited.pyi index 0dddbb876638..b2fb72ad2c0b 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/xxlimited.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/xxlimited.pyi @@ -1,8 +1,10 @@ import sys from typing import Any +from typing_extensions import final class Str: ... +@final class Xxo: def demo(self) -> None: ... @@ -15,4 +17,5 @@ if sys.version_info >= (3, 10): else: class error: ... class Null: ... + def roj(__b: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zipapp.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zipapp.pyi index 581d2b72a664..17c3503ad6eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zipapp.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zipapp.pyi @@ -2,6 +2,8 @@ import sys from pathlib import Path from typing import BinaryIO, Callable, Union +__all__ = ["ZipAppError", "create_archive", "get_interpreter"] + _Path = Union[str, Path, BinaryIO] class ZipAppError(ValueError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zipfile.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zipfile.pyi index 2335428549dc..3db178dceca5 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zipfile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zipfile.pyi @@ -1,12 +1,44 @@ import io import sys -from _typeshed import Self, StrPath +from _typeshed import Self, StrOrBytesPath, StrPath from os import PathLike from types import TracebackType -from typing import IO, Any, Callable, Iterable, Iterator, Protocol, Sequence, Tuple, Type, overload +from typing import IO, Any, Callable, Iterable, Iterator, Protocol, Sequence, overload from typing_extensions import Literal -_DateTuple = Tuple[int, int, int, int, int, int] +if sys.version_info >= (3, 8): + __all__ = [ + "BadZipFile", + "BadZipfile", + "error", + "ZIP_STORED", + "ZIP_DEFLATED", + "ZIP_BZIP2", + "ZIP_LZMA", + "is_zipfile", + "ZipInfo", + "ZipFile", + "PyZipFile", + "LargeZipFile", + "Path", + ] +else: + __all__ = [ + "BadZipFile", + "BadZipfile", + "error", + "ZIP_STORED", + "ZIP_DEFLATED", + "ZIP_BZIP2", + "ZIP_LZMA", + "is_zipfile", + "ZipInfo", + "ZipFile", + "PyZipFile", + "LargeZipFile", + ] + +_DateTuple = tuple[int, int, int, int, int, int] _ReadWriteMode = Literal["r", "w"] _ReadWriteBinaryMode = Literal["r", "w", "rb", "wb"] _ZipFileMode = Literal["r", "w", "x", "a"] @@ -25,6 +57,12 @@ class _ZipStream(Protocol): # def tell(self) -> int: ... # def seek(self, __n: int) -> object: ... +# Stream shape as required by _EndRecData() and _EndRecData64(). +class _SupportsReadSeekTell(Protocol): + def read(self, __n: int = ...) -> bytes: ... + def seek(self, __cookie: int, __whence: int) -> object: ... + def tell(self) -> int: ... + class _ClosableZipStream(_ZipStream, Protocol): def close(self) -> object: ... @@ -96,11 +134,11 @@ class ZipExtFile(io.BufferedIOBase): decrypter: Callable[[Sequence[int]], bytes] | None = ..., close_fileobj: Literal[False] = ..., ) -> None: ... + def read(self, n: int | None = ...) -> bytes: ... - def readline(self, limit: int = ...) -> bytes: ... # type: ignore - def __repr__(self) -> str: ... + def readline(self, limit: int = ...) -> bytes: ... # type: ignore[override] def peek(self, n: int = ...) -> bytes: ... - def read1(self, n: int | None) -> bytes: ... # type: ignore + def read1(self, n: int | None) -> bytes: ... # type: ignore[override] if sys.version_info >= (3, 7): def seek(self, offset: int, whence: int = ...) -> int: ... @@ -143,9 +181,10 @@ class ZipFile: def __init__( self, file: StrPath | IO[bytes], mode: _ZipFileMode = ..., compression: int = ..., allowZip64: bool = ... ) -> None: ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... def close(self) -> None: ... def getinfo(self, name: str) -> ZipInfo: ... @@ -211,10 +250,13 @@ class ZipInfo: def __init__(self, filename: str = ..., date_time: _DateTuple = ...) -> None: ... if sys.version_info >= (3, 8): @classmethod - def from_file(cls, filename: StrPath, arcname: StrPath | None = ..., *, strict_timestamps: bool = ...) -> ZipInfo: ... + def from_file( + cls: type[Self], filename: StrPath, arcname: StrPath | None = ..., *, strict_timestamps: bool = ... + ) -> Self: ... else: @classmethod - def from_file(cls, filename: StrPath, arcname: StrPath | None = ...) -> ZipInfo: ... + def from_file(cls: type[Self], filename: StrPath, arcname: StrPath | None = ...) -> Self: ... + def is_dir(self) -> bool: ... def FileHeader(self, zip64: bool | None = ...) -> bytes: ... @@ -230,12 +272,14 @@ if sys.version_info >= (3, 8): if sys.version_info >= (3, 10): @property def filename(self) -> PathLike[str]: ... # undocumented + def __init__(self, root: ZipFile | StrPath | IO[bytes], at: str = ...) -> None: ... if sys.version_info >= (3, 9): def open(self, mode: _ReadWriteBinaryMode = ..., *args: Any, pwd: bytes | None = ..., **kwargs: Any) -> IO[bytes]: ... else: @property def open(self) -> _PathOpenProtocol: ... + def iterdir(self) -> Iterator[Path]: ... def is_dir(self) -> bool: ... def is_file(self) -> bool: ... @@ -253,9 +297,10 @@ if sys.version_info >= (3, 8): def joinpath(self, *other: StrPath) -> Path: ... else: def joinpath(self, add: StrPath) -> Path: ... # undocumented + def __truediv__(self, add: StrPath) -> Path: ... -def is_zipfile(filename: StrPath | IO[bytes]) -> bool: ... +def is_zipfile(filename: StrOrBytesPath | _SupportsReadSeekTell) -> bool: ... ZIP_STORED: int ZIP_DEFLATED: int diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zipimport.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zipimport.pyi index 8ca97bf69845..a0e6d9e258dc 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zipimport.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zipimport.pyi @@ -1,24 +1,32 @@ import os import sys +from importlib.machinery import ModuleSpec from types import CodeType, ModuleType -from typing import Any, Tuple +from typing import Any if sys.version_info >= (3, 7): from importlib.abc import ResourceReader +if sys.version_info >= (3, 8): + __all__ = ["ZipImportError", "zipimporter"] + class ZipImportError(ImportError): ... -class zipimporter(object): +class zipimporter: archive: str prefix: str def __init__(self, path: str | bytes | os.PathLike[Any]) -> None: ... - def find_loader(self, fullname: str, path: str | None = ...) -> Tuple[zipimporter | None, list[str]]: ... # undocumented + def find_loader(self, fullname: str, path: str | None = ...) -> tuple[zipimporter | None, list[str]]: ... # undocumented def find_module(self, fullname: str, path: str | None = ...) -> zipimporter | None: ... def get_code(self, fullname: str) -> CodeType: ... def get_data(self, pathname: str) -> str: ... def get_filename(self, fullname: str) -> str: ... if sys.version_info >= (3, 7): def get_resource_reader(self, fullname: str) -> ResourceReader | None: ... # undocumented + def get_source(self, fullname: str) -> str | None: ... def is_package(self, fullname: str) -> bool: ... def load_module(self, fullname: str) -> ModuleType: ... + if sys.version_info >= (3, 10): + def find_spec(self, fullname: str, target: ModuleType | None = ...) -> ModuleSpec | None: ... + def invalidate_caches(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi index 5acc4190f1fe..cfd6784bb771 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zlib.pyi @@ -1,24 +1,30 @@ +import sys from array import array from typing import Any +from typing_extensions import Literal -DEFLATED: int -DEF_MEM_LEVEL: int +DEFLATED: Literal[8] +DEF_MEM_LEVEL: int # can change +DEF_BUF_SIZE: Literal[16384] MAX_WBITS: int -ZLIB_VERSION: str -Z_BEST_COMPRESSION: int -Z_BEST_SPEED: int -Z_DEFAULT_COMPRESSION: int -Z_DEFAULT_STRATEGY: int -Z_FILTERED: int -Z_FINISH: int -Z_FIXED: int -Z_FULL_FLUSH: int -Z_HUFFMAN_ONLY: int -Z_NO_FLUSH: int -Z_RLE: int -Z_SYNC_FLUSH: int -DEF_BUF_SIZE: int -ZLIB_RUNTIME_VERSION: str +ZLIB_VERSION: str # can change +ZLIB_RUNTIME_VERSION: str # can change +Z_NO_COMPRESSION: Literal[0] +Z_PARTIAL_FLUSH: Literal[1] +Z_BEST_COMPRESSION: Literal[9] +Z_BEST_SPEED: Literal[1] +Z_BLOCK: Literal[5] +Z_DEFAULT_COMPRESSION: Literal[-1] +Z_DEFAULT_STRATEGY: Literal[0] +Z_FILTERED: Literal[1] +Z_FINISH: Literal[4] +Z_FIXED: Literal[4] +Z_FULL_FLUSH: Literal[3] +Z_HUFFMAN_ONLY: Literal[2] +Z_NO_FLUSH: Literal[0] +Z_RLE: Literal[3] +Z_SYNC_FLUSH: Literal[2] +Z_TREES: Literal[6] class error(Exception): ... @@ -36,7 +42,13 @@ class _Decompress: def copy(self) -> _Decompress: ... def adler32(__data: bytes, __value: int = ...) -> int: ... -def compress(__data: bytes, level: int = ...) -> bytes: ... + +if sys.version_info >= (3, 11): + def compress(__data: bytes, level: int = ..., wbits: int = ...) -> bytes: ... + +else: + def compress(__data: bytes, level: int = ...) -> bytes: ... + def compressobj( level: int = ..., method: int = ..., wbits: int = ..., memLevel: int = ..., strategy: int = ..., zdict: bytes | None = ... ) -> _Compress: ... diff --git a/packages/pyright-internal/typeshed-fallback/stdlib/zoneinfo/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stdlib/zoneinfo/__init__.pyi index 2a188c7d0477..d766eab6b7ef 100644 --- a/packages/pyright-internal/typeshed-fallback/stdlib/zoneinfo/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stdlib/zoneinfo/__init__.pyi @@ -1,9 +1,8 @@ -import typing -from _typeshed import StrPath +from _typeshed import Self, StrPath from datetime import tzinfo -from typing import Any, Iterable, Protocol, Sequence, Set, Type +from typing import Any, Iterable, Protocol, Sequence -_T = typing.TypeVar("_T", bound="ZoneInfo") +__all__ = ["ZoneInfo", "reset_tzpath", "available_timezones", "TZPATH", "ZoneInfoNotFoundError", "InvalidTZPathWarning"] class _IOBytes(Protocol): def read(self, __size: int) -> bytes: ... @@ -14,9 +13,9 @@ class ZoneInfo(tzinfo): def key(self) -> str: ... def __init__(self, key: str) -> None: ... @classmethod - def no_cache(cls: Type[_T], key: str) -> _T: ... + def no_cache(cls: type[Self], key: str) -> Self: ... @classmethod - def from_file(cls: Type[_T], __fobj: _IOBytes, key: str | None = ...) -> _T: ... + def from_file(cls: type[Self], __fobj: _IOBytes, key: str | None = ...) -> Self: ... @classmethod def clear_cache(cls, *, only_keys: Iterable[str] = ...) -> None: ... @@ -24,7 +23,7 @@ class ZoneInfo(tzinfo): # a sequence of strings is required. This should be remedied if a solution # to this typing bug is found: https://github.com/python/typing/issues/256 def reset_tzpath(to: Sequence[StrPath] | None = ...) -> None: ... -def available_timezones() -> Set[str]: ... +def available_timezones() -> set[str]: ... TZPATH: Sequence[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/METADATA.toml index 1fbfc0a8e92d..f522021080f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/METADATA.toml @@ -1,3 +1,2 @@ -version = "0.1" -python2 = true +version = "1.2.*" requires = ["types-python-dateutil"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/datetimerange/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/datetimerange/__init__.pyi index b38cad3bf897..94e3179cc967 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/datetimerange/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/DateTimeRange/datetimerange/__init__.pyi @@ -1,9 +1,10 @@ import datetime +from _typeshed import Self from typing import Iterable from dateutil.relativedelta import relativedelta -class DateTimeRange(object): +class DateTimeRange: NOT_A_TIME_STR: str start_time_format: str end_time_format: str @@ -19,9 +20,9 @@ class DateTimeRange(object): def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... def __add__(self, other: datetime.timedelta) -> DateTimeRange: ... - def __iadd__(self, other: datetime.timedelta) -> DateTimeRange: ... + def __iadd__(self: Self, other: datetime.timedelta) -> Self: ... def __sub__(self, other: datetime.timedelta) -> DateTimeRange: ... - def __isub__(self, other: datetime.timedelta) -> DateTimeRange: ... + def __isub__(self: Self, other: datetime.timedelta) -> Self: ... def __contains__(self, x: datetime.timedelta | DateTimeRange | str) -> bool: ... @property def start_datetime(self) -> datetime.datetime: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml index 9f4230ca4525..1f0b2e4bf4a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/METADATA.toml @@ -1,3 +1,3 @@ -version = "1.2" +version = "1.2.*" python2 = true requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/classic.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/classic.pyi index b6f546235047..160968742162 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/classic.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/classic.pyi @@ -1,4 +1,4 @@ -from typing import Any, Callable, Type, TypeVar, overload +from typing import Any, Callable, TypeVar, overload from typing_extensions import Literal _F = TypeVar("_F", bound=Callable[..., Any]) @@ -8,9 +8,9 @@ class ClassicAdapter: reason: str version: str action: _Actions | None - category: Type[Warning] + category: type[Warning] def __init__( - self, reason: str = ..., version: str = ..., action: _Actions | None = ..., category: Type[Warning] = ... + self, reason: str = ..., version: str = ..., action: _Actions | None = ..., category: type[Warning] = ... ) -> None: ... def get_deprecated_msg(self, wrapped: Callable[..., Any], instance: object) -> str: ... def __call__(self, wrapped: _F) -> Callable[[_F], _F]: ... @@ -19,5 +19,5 @@ class ClassicAdapter: def deprecated(__wrapped: _F) -> _F: ... @overload def deprecated( - reason: str = ..., *, version: str = ..., action: _Actions | None = ..., category: Type[Warning] | None = ... + reason: str = ..., *, version: str = ..., action: _Actions | None = ..., category: type[Warning] | None = ... ) -> Callable[[_F], _F]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/sphinx.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/sphinx.pyi index e5acd9406e68..7e619964542c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/sphinx.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Deprecated/deprecated/sphinx.pyi @@ -1,4 +1,4 @@ -from typing import Any, Callable, Type, TypeVar +from typing import Any, Callable, TypeVar from typing_extensions import Literal from .classic import ClassicAdapter, _Actions @@ -10,14 +10,14 @@ class SphinxAdapter(ClassicAdapter): reason: str version: str action: _Actions | None - category: Type[Warning] + category: type[Warning] def __init__( self, directive: Literal["versionadded", "versionchanged", "deprecated"], reason: str = ..., version: str = ..., action: _Actions | None = ..., - category: Type[Warning] = ..., + category: type[Warning] = ..., ) -> None: ... def __call__(self, wrapped: _F) -> Callable[[_F], _F]: ... @@ -29,5 +29,5 @@ def deprecated( line_length: int = ..., *, action: _Actions | None = ..., - category: Type[Warning] | None = ..., + category: type[Warning] | None = ..., ) -> Callable[[_F], _F]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/METADATA.toml new file mode 100644 index 000000000000..4a8e90c0c89d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/METADATA.toml @@ -0,0 +1 @@ +version = "3.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/__init__.pyi new file mode 100644 index 000000000000..b0962a7393b2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/__init__.pyi @@ -0,0 +1,7 @@ +from logging import Logger + +from .decorator import cross_origin as cross_origin +from .extension import CORS as CORS +from .version import __version__ as __version__ + +rootlogger: Logger diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/core.pyi new file mode 100644 index 000000000000..b5d3c05691f3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/core.pyi @@ -0,0 +1,62 @@ +from datetime import timedelta +from logging import Logger +from typing import Any, Iterable, Pattern, TypeVar, overload +from typing_extensions import TypedDict + +_IterableT = TypeVar("_IterableT", bound=Iterable[Any]) +_T = TypeVar("_T") +_App = Any # flask is not part of typeshed +_Response = Any # flask is not part of typeshed +_MultiDict = Any # werkzeug is not part of typeshed + +class _Options(TypedDict, total=False): + resources: dict[str, dict[str, Any]] | list[str] | str | None + origins: str | list[str] | None + methods: str | list[str] | None + expose_headers: str | list[str] | None + allow_headers: str | list[str] | None + supports_credentials: bool | None + max_age: timedelta | int | str | None + send_wildcard: bool | None + vary_header: bool | None + automatic_options: bool | None + intercept_exceptions: bool | None + always_send: bool | None + +LOG: Logger +ACL_ORIGIN: str +ACL_METHODS: str +ACL_ALLOW_HEADERS: str +ACL_EXPOSE_HEADERS: str +ACL_CREDENTIALS: str +ACL_MAX_AGE: str +ACL_REQUEST_METHOD: str +ACL_REQUEST_HEADERS: str +ALL_METHODS: list[str] +CONFIG_OPTIONS: list[str] +FLASK_CORS_EVALUATED: str +RegexObject = Pattern[str] +DEFAULT_OPTIONS: _Options + +def parse_resources(resources: dict[str, _Options] | Iterable[str] | str | Pattern[str]) -> list[tuple[str, _Options]]: ... +def get_regexp_pattern(regexp: str | Pattern[str]) -> str: ... +def get_cors_origins(options: _Options, request_origin: str | None) -> list[str] | None: ... +def get_allow_headers(options: _Options, acl_request_headers: str | None) -> str | None: ... +def get_cors_headers(options: _Options, request_headers: dict[str, Any], request_method: str) -> _MultiDict: ... +def set_cors_headers(resp: _Response, options: _Options) -> _Response: ... +def probably_regex(maybe_regex: str | Pattern[str]) -> bool: ... +def re_fix(reg: str) -> str: ... +def try_match_any(inst: str, patterns: Iterable[str | Pattern[str]]) -> bool: ... +def try_match(request_origin: str, maybe_regex: str | Pattern[str]) -> bool: ... +def get_cors_options(appInstance: _App | None, *dicts: _Options) -> _Options: ... +def get_app_kwarg_dict(appInstance: _App | None = ...) -> _Options: ... +def flexible_str(obj: object) -> str | None: ... +def serialize_option(options_dict: _Options, key: str, upper: bool = ...) -> None: ... +@overload +def ensure_iterable(inst: str) -> list[str]: ... # type: ignore +@overload +def ensure_iterable(inst: _IterableT) -> _IterableT: ... # type: ignore +@overload +def ensure_iterable(inst: _T) -> list[_T]: ... +def sanitize_regex_param(param: str | list[str]) -> list[str]: ... +def serialize_options(opts: _Options) -> _Options: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/decorator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/decorator.pyi new file mode 100644 index 000000000000..86808c1fe6d7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/decorator.pyi @@ -0,0 +1,21 @@ +from datetime import timedelta +from logging import Logger +from typing import Any, Callable +from typing_extensions import ParamSpec + +_P = ParamSpec("_P") + +LOG: Logger + +def cross_origin( + *args: Any, + origins: str | list[str] | None = ..., + methods: str | list[str] | None = ..., + expose_headers: str | list[str] | None = ..., + allow_headers: str | list[str] | None = ..., + supports_credentials: bool | None = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool | None = ..., + vary_header: bool | None = ..., + automatic_options: bool | None = ..., +) -> Callable[[Callable[_P, Any]], Callable[_P, Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/extension.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/extension.pyi new file mode 100644 index 000000000000..4e2a630a79bd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/extension.pyi @@ -0,0 +1,41 @@ +from datetime import timedelta +from logging import Logger +from typing import Any, Callable, Iterable + +_App = Any # flask is not part of typeshed + +LOG: Logger + +class CORS: + def __init__( + self, + app: Any | None = ..., + *, + resources: dict[str, dict[str, Any]] | list[str] | str | None = ..., + origins: str | list[str] | None = ..., + methods: str | list[str] | None = ..., + expose_headers: str | list[str] | None = ..., + allow_headers: str | list[str] | None = ..., + supports_credentials: bool | None = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool | None = ..., + vary_header: bool | None = ..., + **kwargs: Any, + ) -> None: ... + def init_app( + self, + app: _App, + *, + resources: dict[str, dict[str, Any]] | list[str] | str = ..., + origins: str | list[str] = ..., + methods: str | list[str] = ..., + expose_headers: str | list[str] = ..., + allow_headers: str | list[str] = ..., + supports_credentials: bool = ..., + max_age: timedelta | int | str | None = ..., + send_wildcard: bool = ..., + vary_header: bool = ..., + **kwargs: Any, + ) -> None: ... + +def make_after_request_function(resources: Iterable[tuple[str, dict[str, Any]]]) -> Callable[..., Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/version.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/version.pyi new file mode 100644 index 000000000000..bda5b5a7f4cc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-Cors/flask_cors/version.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/METADATA.toml new file mode 100644 index 000000000000..43ea0392c659 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/METADATA.toml @@ -0,0 +1 @@ +version = "2.5.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi new file mode 100644 index 000000000000..c8cbad0716d6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/__init__.pyi @@ -0,0 +1,89 @@ +from typing import Any + +from . import utils as utils +from .model import DefaultMeta as DefaultMeta, Model as Model + +# SQLAlchemy is not part of typeshed +_Query = Any +_SessionBase = Any + +models_committed: Any +before_models_committed: Any + +class SignallingSession(_SessionBase): + app: Any + def __init__(self, db, autocommit: bool = ..., autoflush: bool = ..., **options) -> None: ... + def get_bind(self, mapper: Any | None = ..., clause: Any | None = ...): ... + +def get_debug_queries(): ... + +class Pagination: + query: Any + page: Any + per_page: Any + total: Any + items: Any + def __init__(self, query, page, per_page, total, items) -> None: ... + @property + def pages(self): ... + def prev(self, error_out: bool = ...): ... + @property + def prev_num(self): ... + @property + def has_prev(self): ... + def next(self, error_out: bool = ...): ... + @property + def has_next(self): ... + @property + def next_num(self): ... + def iter_pages( + self, left_edge: int = ..., left_current: int = ..., right_current: int = ..., right_edge: int = ... + ) -> None: ... + +class BaseQuery(_Query): + def get_or_404(self, ident, description: Any | None = ...): ... + def first_or_404(self, description: Any | None = ...): ... + def paginate( + self, page: Any | None = ..., per_page: Any | None = ..., error_out: bool = ..., max_per_page: Any | None = ... + ): ... + +def get_state(app): ... + +class SQLAlchemy: + Query: Any + use_native_unicode: Any + session: Any + Model: Any + app: Any + def __init__( + self, + app: Any | None = ..., + use_native_unicode: bool = ..., + session_options: Any | None = ..., + metadata: Any | None = ..., + query_class=..., + model_class=..., + engine_options: Any | None = ..., + ) -> None: ... + @property + def metadata(self): ... + def create_scoped_session(self, options: Any | None = ...): ... + def create_session(self, options): ... + def make_declarative_base(self, model, metadata: Any | None = ...): ... + def init_app(self, app): ... + def apply_pool_defaults(self, app, options): ... + def apply_driver_hacks(self, app, sa_url, options): ... + @property + def engine(self): ... + def make_connector(self, app: Any | None = ..., bind: Any | None = ...): ... + def get_engine(self, app: Any | None = ..., bind: Any | None = ...): ... + def create_engine(self, sa_url, engine_opts): ... + def get_app(self, reference_app: Any | None = ...): ... + def get_tables_for_bind(self, bind: Any | None = ...): ... + def get_binds(self, app: Any | None = ...): ... + def create_all(self, bind: str = ..., app: Any | None = ...) -> None: ... + def drop_all(self, bind: str = ..., app: Any | None = ...) -> None: ... + def reflect(self, bind: str = ..., app: Any | None = ...) -> None: ... + def __getattr__(self, name: str) -> Any: ... # exposes dynamically classes of SQLAlchemy + +class FSADeprecationWarning(DeprecationWarning): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi new file mode 100644 index 000000000000..7909be37691f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/model.pyi @@ -0,0 +1,22 @@ +from typing import Any, Pattern + +_DeclarativeMeta = Any # SQLAlchemy is not part of typeshed + +def should_set_tablename(cls: type) -> bool: ... + +camelcase_re: Pattern[str] + +def camel_to_snake_case(name: str) -> str: ... + +class NameMetaMixin(type): + def __init__(cls, name, bases, d) -> None: ... + def __table_cls__(cls, *args, **kwargs): ... + +class BindMetaMixin(type): + def __init__(cls, name, bases, d) -> None: ... + +class DefaultMeta(NameMetaMixin, BindMetaMixin, _DeclarativeMeta): ... + +class Model: + query_class: Any | None + query: Any | None diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi new file mode 100644 index 000000000000..190d72483895 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/Flask-SQLAlchemy/flask_sqlalchemy/utils.pyi @@ -0,0 +1,3 @@ +def parse_version(v: str) -> tuple[int, int, int]: ... +def sqlalchemy_version(op: str, val: str) -> bool: ... +def engine_config_warning(config, version: str, deprecated_config_key: str, engine_option) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Flask/METADATA.toml deleted file mode 100644 index cd91d47eebb4..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/METADATA.toml +++ /dev/null @@ -1,4 +0,0 @@ -version = "1.1" -python2 = true -requires = ["types-Jinja2", "types-Werkzeug", "types-click"] -obsolete_since = "2.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/__init__.pyi deleted file mode 100644 index 4e66d4d02191..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/__init__.pyi +++ /dev/null @@ -1,41 +0,0 @@ -from jinja2 import Markup as Markup, escape as escape -from werkzeug.exceptions import abort as abort -from werkzeug.utils import redirect as redirect - -from .app import Flask as Flask -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import ( - after_this_request as after_this_request, - copy_current_request_context as copy_current_request_context, - has_app_context as has_app_context, - has_request_context as has_request_context, -) -from .globals import current_app as current_app, g as g, request as request, session as session -from .helpers import ( - flash as flash, - get_flashed_messages as get_flashed_messages, - get_template_attribute as get_template_attribute, - make_response as make_response, - safe_join as safe_join, - send_file as send_file, - send_from_directory as send_from_directory, - stream_with_context as stream_with_context, - url_for as url_for, -) -from .json import jsonify as jsonify -from .signals import ( - appcontext_popped as appcontext_popped, - appcontext_pushed as appcontext_pushed, - appcontext_tearing_down as appcontext_tearing_down, - before_render_template as before_render_template, - got_request_exception as got_request_exception, - message_flashed as message_flashed, - request_finished as request_finished, - request_started as request_started, - request_tearing_down as request_tearing_down, - signals_available as signals_available, - template_rendered as template_rendered, -) -from .templating import render_template as render_template, render_template_string as render_template_string -from .wrappers import Request as Request, Response as Response diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/app.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/app.pyi deleted file mode 100644 index 9469bbbb981e..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/app.pyi +++ /dev/null @@ -1,194 +0,0 @@ -from datetime import timedelta -from logging import Logger -from types import TracebackType -from typing import ( - Any, - ByteString, - Callable, - ContextManager, - Dict, - Iterable, - List, - NoReturn, - Optional, - Text, - Tuple, - Type, - TypeVar, - Union, -) - -from .blueprints import Blueprint -from .config import Config -from .ctx import AppContext, RequestContext -from .helpers import _PackageBoundObject -from .testing import FlaskClient -from .wrappers import Response - -def setupmethod(f: Any): ... - -_T = TypeVar("_T") - -_ExcInfo = Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] -_StartResponse = Callable[[str, List[Tuple[str, str]], Optional[_ExcInfo]], Callable[[bytes], Any]] -_WSGICallable = Callable[[Dict[Text, Any], _StartResponse], Iterable[bytes]] - -_Status = Union[str, int] -_Headers = Union[Dict[Any, Any], List[Tuple[Any, Any]]] -_Body = Union[Text, ByteString, Dict[Text, Any], Response, _WSGICallable] -_ViewFuncReturnType = Union[_Body, Tuple[_Body, _Status, _Headers], Tuple[_Body, _Status], Tuple[_Body, _Headers]] - -_ViewFunc = Union[Callable[..., NoReturn], Callable[..., _ViewFuncReturnType]] -_VT = TypeVar("_VT", bound=_ViewFunc) - -class Flask(_PackageBoundObject): - request_class: type = ... - response_class: type = ... - jinja_environment: type = ... - app_ctx_globals_class: type = ... - config_class: Type[Config] = ... - testing: Any = ... - secret_key: Text | bytes | None = ... - session_cookie_name: Any = ... - permanent_session_lifetime: timedelta = ... - send_file_max_age_default: timedelta = ... - use_x_sendfile: Any = ... - json_encoder: Any = ... - json_decoder: Any = ... - jinja_options: Any = ... - default_config: Any = ... - url_rule_class: type = ... - test_client_class: type = ... - test_cli_runner_class: type = ... - session_interface: Any = ... - import_name: str = ... - template_folder: str = ... - root_path: str | Text = ... - static_url_path: Any = ... - static_folder: str | None = ... - instance_path: str | Text = ... - config: Config = ... - view_functions: Any = ... - error_handler_spec: Any = ... - url_build_error_handlers: Any = ... - before_request_funcs: dict[str | None, list[Callable[[], Any]]] = ... - before_first_request_funcs: list[Callable[[], None]] = ... - after_request_funcs: dict[str | None, list[Callable[[Response], Response]]] = ... - teardown_request_funcs: dict[str | None, list[Callable[[Exception | None], Any]]] = ... - teardown_appcontext_funcs: list[Callable[[Exception | None], Any]] = ... - url_value_preprocessors: Any = ... - url_default_functions: Any = ... - template_context_processors: Any = ... - shell_context_processors: Any = ... - blueprints: Any = ... - extensions: Any = ... - url_map: Any = ... - subdomain_matching: Any = ... - cli: Any = ... - def __init__( - self, - import_name: str, - static_url_path: str | None = ..., - static_folder: str | None = ..., - static_host: str | None = ..., - host_matching: bool = ..., - subdomain_matching: bool = ..., - template_folder: str = ..., - instance_path: str | None = ..., - instance_relative_config: bool = ..., - root_path: str | None = ..., - ) -> None: ... - @property - def name(self) -> str: ... - @property - def propagate_exceptions(self) -> bool: ... - @property - def preserve_context_on_exception(self): ... - @property - def logger(self) -> Logger: ... - @property - def jinja_env(self): ... - @property - def got_first_request(self) -> bool: ... - def make_config(self, instance_relative: bool = ...): ... - def auto_find_instance_path(self): ... - def open_instance_resource(self, resource: str | Text, mode: str = ...): ... - templates_auto_reload: Any = ... - def create_jinja_environment(self): ... - def create_global_jinja_loader(self): ... - def select_jinja_autoescape(self, filename: Any): ... - def update_template_context(self, context: Any) -> None: ... - def make_shell_context(self): ... - env: str | None = ... - debug: bool = ... - def run( - self, - host: str | None = ..., - port: int | str | None = ..., - debug: bool | None = ..., - load_dotenv: bool = ..., - **options: Any, - ) -> None: ... - def test_client(self, use_cookies: bool = ..., **kwargs: Any) -> FlaskClient[Response]: ... - def test_cli_runner(self, **kwargs: Any): ... - def open_session(self, request: Any): ... - def save_session(self, session: Any, response: Any): ... - def make_null_session(self): ... - def register_blueprint(self, blueprint: Blueprint, **options: Any) -> None: ... - def iter_blueprints(self): ... - def add_url_rule( - self, - rule: str, - endpoint: str | None = ..., - view_func: _ViewFunc = ..., - provide_automatic_options: bool | None = ..., - **options: Any, - ) -> None: ... - def route(self, rule: str, **options: Any) -> Callable[[_VT], _VT]: ... - def endpoint(self, endpoint: str) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... - def errorhandler(self, code_or_exception: int | Type[Exception]) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... - def register_error_handler(self, code_or_exception: int | Type[Exception], f: Callable[..., Any]) -> None: ... - def template_filter(self, name: Any | None = ...): ... - def add_template_filter(self, f: Any, name: Any | None = ...) -> None: ... - def template_test(self, name: Any | None = ...): ... - def add_template_test(self, f: Any, name: Any | None = ...) -> None: ... - def template_global(self, name: Any | None = ...): ... - def add_template_global(self, f: Any, name: Any | None = ...) -> None: ... - def before_request(self, f: Callable[[], _T]) -> Callable[[], _T]: ... - def before_first_request(self, f: Callable[[], _T]) -> Callable[[], _T]: ... - def after_request(self, f: Callable[[Response], Response]) -> Callable[[Response], Response]: ... - def teardown_request(self, f: Callable[[Exception | None], _T]) -> Callable[[Exception | None], _T]: ... - def teardown_appcontext(self, f: Callable[[Exception | None], _T]) -> Callable[[Exception | None], _T]: ... - def context_processor(self, f: Any): ... - def shell_context_processor(self, f: Any): ... - def url_value_preprocessor(self, f: Any): ... - def url_defaults(self, f: Any): ... - def handle_http_exception(self, e: Any): ... - def trap_http_exception(self, e: Any): ... - def handle_user_exception(self, e: Any): ... - def handle_exception(self, e: Any): ... - def log_exception(self, exc_info: Any) -> None: ... - def raise_routing_exception(self, request: Any) -> None: ... - def dispatch_request(self): ... - def full_dispatch_request(self): ... - def finalize_request(self, rv: Any, from_error_handler: bool = ...): ... - def try_trigger_before_first_request_functions(self): ... - def make_default_options_response(self): ... - def should_ignore_error(self, error: Any): ... - def make_response(self, rv: Any): ... - def create_url_adapter(self, request: Any): ... - def inject_url_defaults(self, endpoint: Any, values: Any) -> None: ... - def handle_url_build_error(self, error: Any, endpoint: Any, values: Any): ... - def preprocess_request(self): ... - def process_response(self, response: Any): ... - def do_teardown_request(self, exc: Any = ...) -> None: ... - def do_teardown_appcontext(self, exc: Any = ...) -> None: ... - def app_context(self) -> AppContext: ... - def request_context(self, environ: Any): ... - def test_request_context(self, *args: Any, **kwargs: Any) -> ContextManager[RequestContext]: ... - def wsgi_app(self, environ: Any, start_response: Any): ... - def __call__(self, environ: Any, start_response: Any): ... - # These are not preset at runtime but we add them since monkeypatching this - # class is quite common. - def __setattr__(self, name: str, value: Any): ... - def __getattr__(self, name: str): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/blueprints.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/blueprints.pyi deleted file mode 100644 index df795f1a3e75..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/blueprints.pyi +++ /dev/null @@ -1,78 +0,0 @@ -from typing import Any, Callable, Type, TypeVar - -from .app import _ViewFunc -from .helpers import _PackageBoundObject - -_T = TypeVar("_T") -_VT = TypeVar("_VT", bound=_ViewFunc) - -class _Sentinel(object): ... - -class BlueprintSetupState: - app: Any = ... - blueprint: Any = ... - options: Any = ... - first_registration: Any = ... - subdomain: Any = ... - url_prefix: Any = ... - url_defaults: Any = ... - def __init__(self, blueprint: Any, app: Any, options: Any, first_registration: Any) -> None: ... - def add_url_rule(self, rule: str, endpoint: str | None = ..., view_func: _ViewFunc = ..., **options: Any) -> None: ... - -class Blueprint(_PackageBoundObject): - warn_on_modifications: bool = ... - json_encoder: Any = ... - json_decoder: Any = ... - import_name: str = ... - template_folder: str | None = ... - root_path: str = ... - name: str = ... - url_prefix: str | None = ... - subdomain: str | None = ... - static_folder: str | None = ... - static_url_path: str | None = ... - deferred_functions: Any = ... - url_values_defaults: Any = ... - cli_group: str | None | _Sentinel = ... - def __init__( - self, - name: str, - import_name: str, - static_folder: str | None = ..., - static_url_path: str | None = ..., - template_folder: str | None = ..., - url_prefix: str | None = ..., - subdomain: str | None = ..., - url_defaults: Any | None = ..., - root_path: str | None = ..., - cli_group: str | None | _Sentinel = ..., - ) -> None: ... - def record(self, func: Any) -> None: ... - def record_once(self, func: Any): ... - def make_setup_state(self, app: Any, options: Any, first_registration: bool = ...): ... - def register(self, app: Any, options: Any, first_registration: bool = ...) -> None: ... - def route(self, rule: str, **options: Any) -> Callable[[_VT], _VT]: ... - def add_url_rule(self, rule: str, endpoint: str | None = ..., view_func: _ViewFunc = ..., **options: Any) -> None: ... - def endpoint(self, endpoint: str) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... - def app_template_filter(self, name: Any | None = ...): ... - def add_app_template_filter(self, f: Any, name: Any | None = ...) -> None: ... - def app_template_test(self, name: Any | None = ...): ... - def add_app_template_test(self, f: Any, name: Any | None = ...) -> None: ... - def app_template_global(self, name: Any | None = ...): ... - def add_app_template_global(self, f: Any, name: Any | None = ...) -> None: ... - def before_request(self, f: Any): ... - def before_app_request(self, f: Any): ... - def before_app_first_request(self, f: Any): ... - def after_request(self, f: Any): ... - def after_app_request(self, f: Any): ... - def teardown_request(self, f: Any): ... - def teardown_app_request(self, f: Any): ... - def context_processor(self, f: Any): ... - def app_context_processor(self, f: Any): ... - def app_errorhandler(self, code: Any): ... - def url_value_preprocessor(self, f: Any): ... - def url_defaults(self, f: Any): ... - def app_url_value_preprocessor(self, f: Any): ... - def app_url_defaults(self, f: Any): ... - def errorhandler(self, code_or_exception: int | Type[Exception]) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... - def register_error_handler(self, code_or_exception: int | Type[Exception], f: Callable[..., Any]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/cli.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/cli.pyi deleted file mode 100644 index 9f79483a75d6..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/cli.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any - -import click - -class NoAppException(click.UsageError): ... - -def find_best_app(script_info: Any, module: Any): ... -def call_factory(script_info: Any, app_factory: Any, arguments: Any = ...): ... -def find_app_by_string(script_info: Any, module: Any, app_name: Any): ... -def prepare_import(path: Any): ... -def locate_app(script_info: Any, module_name: Any, app_name: Any, raise_if_not_found: bool = ...): ... -def get_version(ctx: Any, param: Any, value: Any): ... - -version_option: Any - -class DispatchingApp: - loader: Any = ... - def __init__(self, loader: Any, use_eager_loading: bool = ...) -> None: ... - def __call__(self, environ: Any, start_response: Any): ... - -class ScriptInfo: - app_import_path: Any = ... - create_app: Any = ... - data: Any = ... - def __init__(self, app_import_path: Any | None = ..., create_app: Any | None = ...) -> None: ... - def load_app(self): ... - -pass_script_info: Any - -def with_appcontext(f: Any): ... - -class AppGroup(click.Group): - def command(self, *args: Any, **kwargs: Any): ... - def group(self, *args: Any, **kwargs: Any): ... - -class FlaskGroup(AppGroup): - create_app: Any = ... - load_dotenv: Any = ... - def __init__( - self, - add_default_commands: bool = ..., - create_app: Any | None = ..., - add_version_option: bool = ..., - load_dotenv: bool = ..., - **extra: Any, - ) -> None: ... - def get_command(self, ctx: Any, name: Any): ... - def list_commands(self, ctx: Any): ... - def main(self, *args: Any, **kwargs: Any): ... - -def load_dotenv(path: Any | None = ...): ... -def show_server_banner(env: Any, debug: Any, app_import_path: Any, eager_loading: Any): ... - -class CertParamType(click.ParamType): - name: str = ... - path_type: Any = ... - def __init__(self) -> None: ... - def convert(self, value: Any, param: Any, ctx: Any): ... - -def run_command( - info: Any, host: Any, port: Any, reload: Any, debugger: Any, eager_loading: Any, with_threads: Any, cert: Any -) -> None: ... -def shell_command() -> None: ... -def routes_command(sort: Any, all_methods: Any): ... - -cli: Any - -def main(as_module: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/config.pyi deleted file mode 100644 index b33ddd0c00cd..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/config.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Any, Dict - -class ConfigAttribute: - __name__: Any = ... - get_converter: Any = ... - def __init__(self, name: Any, get_converter: Any | None = ...) -> None: ... - def __get__(self, obj: Any, type: Any | None = ...): ... - def __set__(self, obj: Any, value: Any) -> None: ... - -class Config(Dict[str, Any]): - root_path: Any = ... - def __init__(self, root_path: Any, defaults: Any | None = ...) -> None: ... - def from_envvar(self, variable_name: Any, silent: bool = ...): ... - def from_pyfile(self, filename: Any, silent: bool = ...): ... - def from_object(self, obj: Any) -> None: ... - def from_json(self, filename: Any, silent: bool = ...): ... - def from_mapping(self, *mapping: Any, **kwargs: Any): ... - def get_namespace(self, namespace: Any, lowercase: bool = ..., trim_namespace: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/ctx.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/ctx.pyi deleted file mode 100644 index 2246ca79b4a5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/ctx.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Any - -class _AppCtxGlobals: - def get(self, name: Any, default: Any | None = ...): ... - def pop(self, name: Any, default: Any = ...): ... - def setdefault(self, name: Any, default: Any | None = ...): ... - def __contains__(self, item: Any): ... - def __iter__(self): ... - -def after_this_request(f: Any): ... -def copy_current_request_context(f: Any): ... -def has_request_context(): ... -def has_app_context(): ... - -class AppContext: - app: Any = ... - url_adapter: Any = ... - g: Any = ... - def __init__(self, app: Any) -> None: ... - def push(self) -> None: ... - def pop(self, exc: Any = ...) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - -class RequestContext: - app: Any = ... - request: Any = ... - url_adapter: Any = ... - flashes: Any = ... - session: Any = ... - preserved: bool = ... - def __init__(self, app: Any, environ: Any, request: Any | None = ...) -> None: ... - g: Any = ... - def copy(self): ... - def match_request(self) -> None: ... - def push(self) -> None: ... - def pop(self, exc: Any = ...) -> None: ... - def auto_pop(self, exc: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/debughelpers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/debughelpers.pyi deleted file mode 100644 index 45255a955d2f..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/debughelpers.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from typing import Any - -class UnexpectedUnicodeError(AssertionError, UnicodeError): ... - -class DebugFilesKeyError(KeyError, AssertionError): - msg: Any = ... - def __init__(self, request: Any, key: Any) -> None: ... - -class FormDataRoutingRedirect(AssertionError): - def __init__(self, request: Any) -> None: ... - -def attach_enctype_error_multidict(request: Any): ... -def explain_template_loading_attempts(app: Any, template: Any, attempts: Any) -> None: ... -def explain_ignored_app_run() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/globals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/globals.pyi deleted file mode 100644 index c6043c726b8a..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/globals.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any - -from werkzeug.local import LocalStack - -from .app import Flask -from .wrappers import Request - -class _FlaskLocalProxy(Flask): - def _get_current_object(self) -> Flask: ... - -_request_ctx_stack: LocalStack -_app_ctx_stack: LocalStack -current_app: _FlaskLocalProxy -request: Request -session: Any -g: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/helpers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/helpers.pyi deleted file mode 100644 index bf9a6982f8ab..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/helpers.pyi +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Any - -from .cli import AppGroup -from .wrappers import Response - -def get_env(): ... -def get_debug_flag(): ... -def get_load_dotenv(default: bool = ...): ... -def stream_with_context(generator_or_function: Any): ... -def make_response(*args: Any) -> Response: ... -def url_for(endpoint: str, **values: Any) -> str: ... -def get_template_attribute(template_name: Any, attribute: Any): ... -def flash(message: Any, category: str = ...) -> None: ... -def get_flashed_messages(with_categories: bool = ..., category_filter: Any = ...): ... -def send_file( - filename_or_fp: Any, - mimetype: Any | None = ..., - as_attachment: bool = ..., - attachment_filename: Any | None = ..., - add_etags: bool = ..., - cache_timeout: Any | None = ..., - conditional: bool = ..., - last_modified: Any | None = ..., -) -> Response: ... -def safe_join(directory: Any, *pathnames: Any): ... -def send_from_directory(directory: Any, filename: Any, **options: Any) -> Response: ... -def get_root_path(import_name: Any): ... -def find_package(import_name: Any): ... - -class locked_cached_property: - __name__: Any = ... - __module__: Any = ... - __doc__: Any = ... - func: Any = ... - lock: Any = ... - def __init__(self, func: Any, name: Any | None = ..., doc: Any | None = ...) -> None: ... - def __get__(self, obj: Any, type: Any | None = ...): ... - -class _PackageBoundObject: - import_name: Any = ... - template_folder: Any = ... - root_path: Any = ... - cli: AppGroup = ... - def __init__(self, import_name: Any, template_folder: Any | None = ..., root_path: Any | None = ...) -> None: ... - static_folder: Any = ... - static_url_path: Any = ... - @property - def has_static_folder(self): ... - def jinja_loader(self): ... - def get_send_file_max_age(self, filename: Any): ... - def send_static_file(self, filename: Any) -> Response: ... - def open_resource(self, resource: Any, mode: str = ...): ... - -def total_seconds(td: Any): ... -def is_ip(value: Any): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/__init__.pyi deleted file mode 100644 index efdc8974f018..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/__init__.pyi +++ /dev/null @@ -1,19 +0,0 @@ -import json as _json -from typing import Any - -from jinja2 import Markup - -class JSONEncoder(_json.JSONEncoder): - def default(self, o: Any): ... - -class JSONDecoder(_json.JSONDecoder): ... - -def detect_encoding(data: bytes) -> str: ... # undocumented -def dumps(obj: Any, **kwargs: Any): ... -def dump(obj: Any, fp: Any, **kwargs: Any) -> None: ... -def loads(s: Any, **kwargs: Any): ... -def load(fp: Any, **kwargs: Any): ... -def htmlsafe_dumps(obj: Any, **kwargs: Any): ... -def htmlsafe_dump(obj: Any, fp: Any, **kwargs: Any) -> None: ... -def jsonify(*args: Any, **kwargs: Any): ... -def tojson_filter(obj: Any, **kwargs: Any) -> Markup: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/tag.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/tag.pyi deleted file mode 100644 index 4b9d15a65244..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/json/tag.pyi +++ /dev/null @@ -1,67 +0,0 @@ -from typing import Any - -class JSONTag: - key: Any = ... - serializer: Any = ... - def __init__(self, serializer: Any) -> None: ... - def check(self, value: Any) -> None: ... - def to_json(self, value: Any) -> None: ... - def to_python(self, value: Any) -> None: ... - def tag(self, value: Any): ... - -class TagDict(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class PassDict(JSONTag): - def check(self, value: Any): ... - def to_json(self, value: Any): ... - tag: Any = ... - -class TagTuple(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class PassList(JSONTag): - def check(self, value: Any): ... - def to_json(self, value: Any): ... - tag: Any = ... - -class TagBytes(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class TagMarkup(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class TagUUID(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class TagDateTime(JSONTag): - key: str = ... - def check(self, value: Any): ... - def to_json(self, value: Any): ... - def to_python(self, value: Any): ... - -class TaggedJSONSerializer: - default_tags: Any = ... - tags: Any = ... - order: Any = ... - def __init__(self) -> None: ... - def register(self, tag_class: Any, force: bool = ..., index: Any | None = ...) -> None: ... - def tag(self, value: Any): ... - def untag(self, value: Any): ... - def dumps(self, value: Any): ... - def loads(self, value: Any): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/logging.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/logging.pyi deleted file mode 100644 index 9a69c75e2869..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/logging.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from _typeshed.wsgi import ErrorStream -from logging import Handler, Logger - -from .app import Flask - -wsgi_errors_stream: ErrorStream - -def has_level_handler(logger: Logger) -> bool: ... - -default_handler: Handler - -def create_logger(app: Flask) -> Logger: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/sessions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/sessions.pyi deleted file mode 100644 index 84c3fcc90edb..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/sessions.pyi +++ /dev/null @@ -1,57 +0,0 @@ -from abc import ABCMeta -from typing import Any, MutableMapping - -from werkzeug.datastructures import CallbackDict - -class SessionMixin(MutableMapping[str, Any], metaclass=ABCMeta): - @property - def permanent(self): ... - @permanent.setter - def permanent(self, value: Any) -> None: ... - new: bool = ... - modified: bool = ... - accessed: bool = ... - -class SecureCookieSession(CallbackDict[str, Any], SessionMixin): - modified: bool = ... - accessed: bool = ... - def __init__(self, initial: Any | None = ...) -> None: ... - def __getitem__(self, key: Any): ... - def get(self, key: Any, default: Any | None = ...): ... - def setdefault(self, key: Any, default: Any | None = ...): ... - -class NullSession(SecureCookieSession): - __setitem__: Any = ... - __delitem__: Any = ... - clear: Any = ... - pop: Any = ... - popitem: Any = ... - update: Any = ... - setdefault: Any = ... - -class SessionInterface: - null_session_class: Any = ... - pickle_based: bool = ... - def make_null_session(self, app: Any): ... - def is_null_session(self, obj: Any): ... - def get_cookie_domain(self, app: Any): ... - def get_cookie_path(self, app: Any): ... - def get_cookie_httponly(self, app: Any): ... - def get_cookie_secure(self, app: Any): ... - def get_cookie_samesite(self, app: Any): ... - def get_expiration_time(self, app: Any, session: Any): ... - def should_set_cookie(self, app: Any, session: Any): ... - def open_session(self, app: Any, request: Any) -> None: ... - def save_session(self, app: Any, session: Any, response: Any) -> None: ... - -session_json_serializer: Any - -class SecureCookieSessionInterface(SessionInterface): - salt: str = ... - digest_method: Any = ... - key_derivation: str = ... - serializer: Any = ... - session_class: Any = ... - def get_signing_serializer(self, app: Any): ... - def open_session(self, app: Any, request: Any): ... - def save_session(self, app: Any, session: Any, response: Any): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/signals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/signals.pyi deleted file mode 100644 index de7c1951faf3..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/signals.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any - -signals_available: bool - -class Namespace: - def signal(self, name: Any, doc: Any | None = ...): ... - -class _FakeSignal: - name: Any = ... - __doc__: Any = ... - def __init__(self, name: Any, doc: Any | None = ...) -> None: ... - send: Any = ... - connect: Any = ... - disconnect: Any = ... - has_receivers_for: Any = ... - receivers_for: Any = ... - temporarily_connected_to: Any = ... - connected_to: Any = ... - -template_rendered: Any -before_render_template: Any -request_started: Any -request_finished: Any -request_tearing_down: Any -got_request_exception: Any -appcontext_tearing_down: Any -appcontext_pushed: Any -appcontext_popped: Any -message_flashed: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/templating.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/templating.pyi deleted file mode 100644 index d9bf997cb6b8..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/templating.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Any, Iterable, Text - -from jinja2 import BaseLoader, Environment as BaseEnvironment - -class Environment(BaseEnvironment): - app: Any = ... - def __init__(self, app: Any, **options: Any) -> None: ... - -class DispatchingJinjaLoader(BaseLoader): - app: Any = ... - def __init__(self, app: Any) -> None: ... - def get_source(self, environment: Any, template: Any): ... - def list_templates(self): ... - -def render_template(template_name_or_list: Text | Iterable[Text], **context: Any) -> Text: ... -def render_template_string(source: Text, **context: Any) -> Text: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/testing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/testing.pyi deleted file mode 100644 index 08f5a0d080b3..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/testing.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from typing import IO, Any, Iterable, Mapping, Text, TypeVar - -from click import BaseCommand -from click.testing import CliRunner, Result -from werkzeug.test import Client, EnvironBuilder as WerkzeugEnvironBuilder - -# Response type for the client below. -# By default _R is Tuple[Iterable[Any], Text | int, werkzeug.datastructures.Headers], however -# most commonly it is wrapped in a Reponse object. -_R = TypeVar("_R") - -class FlaskClient(Client[_R]): - preserve_context: bool = ... - environ_base: Any = ... - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - def session_transaction(self, *args: Any, **kwargs: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - -class FlaskCliRunner(CliRunner): - app: Any = ... - def __init__(self, app: Any, **kwargs: Any) -> None: ... - def invoke( - self, - cli: BaseCommand | None = ..., - args: str | Iterable[str] | None = ..., - input: bytes | IO[Any] | Text | None = ..., - env: Mapping[str, str] | None = ..., - catch_exceptions: bool = ..., - color: bool = ..., - **extra: Any, - ) -> Result: ... - -class EnvironBuilder(WerkzeugEnvironBuilder): - app: Any - def __init__( - self, - app: Any, - path: str = ..., - base_url: Any | None = ..., - subdomain: Any | None = ..., - url_scheme: Any | None = ..., - *args: Any, - **kwargs: Any, - ) -> None: ... - def json_dumps(self, obj: Any, **kwargs: Any) -> str: ... - -def make_test_environ_builder( - app: Any, - path: str = ..., - base_url: Any | None = ..., - subdomain: Any | None = ..., - url_scheme: Any | None = ..., - *args: Any, - **kwargs: Any, -): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/views.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/views.pyi deleted file mode 100644 index e6112637569a..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/views.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from typing import Any - -http_method_funcs: Any - -class View: - methods: Any = ... - provide_automatic_options: Any = ... - decorators: Any = ... - def dispatch_request(self, *args: Any, **kwargs: Any) -> Any: ... - @classmethod - def as_view(cls, name: Any, *class_args: Any, **class_kwargs: Any): ... - -class MethodViewType(type): - def __init__(self, name: Any, bases: Any, d: Any) -> None: ... - -class MethodView(View, metaclass=MethodViewType): - def dispatch_request(self, *args: Any, **kwargs: Any) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/wrappers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/wrappers.pyi deleted file mode 100644 index e5a428655647..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Flask/flask/wrappers.pyi +++ /dev/null @@ -1,32 +0,0 @@ -from typing import Any - -from werkzeug.exceptions import HTTPException -from werkzeug.routing import Rule -from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase - -class JSONMixin: - @property - def is_json(self) -> bool: ... - @property - def json(self): ... - def get_json(self, force: bool = ..., silent: bool = ..., cache: bool = ...): ... - def on_json_loading_failed(self, e: Any) -> None: ... - -class Request(RequestBase, JSONMixin): - url_rule: Rule | None = ... - view_args: dict[str, Any] = ... - routing_exception: HTTPException | None = ... - # Request is making the max_content_length readonly, where it was not the - # case in its supertype. - # We would require something like https://github.com/python/typing/issues/241 - @property - def max_content_length(self) -> int | None: ... # type: ignore - @property - def endpoint(self) -> str | None: ... - @property - def blueprint(self) -> str | None: ... - -class Response(ResponseBase, JSONMixin): - default_mimetype: str | None = ... - @property - def max_cookie_size(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/METADATA.toml index 5f1541084942..432cfba3b0a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/METADATA.toml @@ -1 +1,2 @@ -version = "0.1" +version = "0.5.*" +stubtest_apt_dependencies = ["libjack-dev"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/jack/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/jack/__init__.pyi index 7ce4d21b61c3..5d83f6ad0d41 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/jack/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/JACK-Client/jack/__init__.pyi @@ -1,4 +1,5 @@ -from typing import Any, Callable, Generator, Iterable, Iterator, Sequence, Tuple, overload +from _typeshed import Self +from typing import Any, Callable, Generator, Iterable, Iterator, Sequence, overload _NDArray = Any # FIXME: no typings for numpy arrays @@ -50,6 +51,8 @@ class Client: servername: str | None = ..., session_id: str | None = ..., ) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, *args: object) -> None: ... @property def name(self) -> str: ... @property @@ -94,8 +97,8 @@ class Client: @transport_frame.setter def transport_frame(self, frame: int) -> None: ... def transport_locate(self, frame: int) -> None: ... - def transport_query(self) -> Tuple[TransportState, dict[str, Any]]: ... - def transport_query_struct(self) -> Tuple[TransportState, _JackPositionT]: ... + def transport_query(self) -> tuple[TransportState, dict[str, Any]]: ... + def transport_query_struct(self) -> tuple[TransportState, _JackPositionT]: ... def transport_reposition_struct(self, position: _JackPositionT) -> None: ... # TODO def set_sync_timeout(self, timeout: int) -> None: ... def set_freewheel(self, onoff: bool) -> None: ... @@ -196,7 +199,7 @@ class OwnMidiPort(MidiPort, OwnPort): def max_event_size(self) -> int: ... @property def lost_midi_events(self) -> int: ... - def incoming_midi_events(self) -> Generator[Tuple[int, _CBufferType], None, None]: ... + def incoming_midi_events(self) -> Generator[tuple[int, _CBufferType], None, None]: ... def clear_buffer(self) -> None: ... def write_midi_event(self, time: int, event: bytes | Sequence[int] | _CBufferType) -> None: ... def reserve_midi_event(self, time: int, size: int) -> _CBufferType: ... @@ -215,14 +218,14 @@ class RingBuffer: def write_space(self) -> int: ... def write(self, data: bytes | Iterable[int] | _CBufferType) -> int: ... @property - def write_buffers(self) -> Tuple[_CBufferType, _CBufferType]: ... + def write_buffers(self) -> tuple[_CBufferType, _CBufferType]: ... def write_advance(self, size: int) -> None: ... @property def read_space(self) -> int: ... def read(self, size: int) -> _CBufferType: ... def peek(self, size: int) -> _CBufferType: ... @property - def read_buffers(self) -> Tuple[_CBufferType, _CBufferType]: ... + def read_buffers(self) -> tuple[_CBufferType, _CBufferType]: ... def read_advance(self, size: int) -> None: ... def mlock(self) -> None: ... def reset(self, size: int | None = ...) -> None: ... @@ -265,11 +268,11 @@ class TransportState: class CallbackExit(Exception): ... -def get_property(subject: int | str, key: str) -> Tuple[bytes, str] | None: ... -def get_properties(subject: int | str) -> dict[str, Tuple[bytes, str]]: ... -def get_all_properties() -> dict[str, dict[str, Tuple[bytes, str]]]: ... +def get_property(subject: int | str, key: str) -> tuple[bytes, str] | None: ... +def get_properties(subject: int | str) -> dict[str, tuple[bytes, str]]: ... +def get_all_properties() -> dict[str, dict[str, tuple[bytes, str]]]: ... def position2dict(pos: _JackPositionT) -> dict[str, Any]: ... -def version() -> Tuple[int, int, int, int]: ... +def version() -> tuple[int, int, int, int]: ... def version_string() -> str: ... def client_name_size() -> int: ... def port_name_size() -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/METADATA.toml deleted file mode 100644 index 0edeb45a5f10..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/METADATA.toml +++ /dev/null @@ -1,4 +0,0 @@ -version = "2.11" -python2 = true -requires = ["types-MarkupSafe"] -obsolete_since = "3.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/__init__.pyi deleted file mode 100644 index 1121f8dee408..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/__init__.pyi +++ /dev/null @@ -1,45 +0,0 @@ -from jinja2.bccache import ( - BytecodeCache as BytecodeCache, - FileSystemBytecodeCache as FileSystemBytecodeCache, - MemcachedBytecodeCache as MemcachedBytecodeCache, -) -from jinja2.environment import Environment as Environment, Template as Template -from jinja2.exceptions import ( - TemplateAssertionError as TemplateAssertionError, - TemplateError as TemplateError, - TemplateNotFound as TemplateNotFound, - TemplatesNotFound as TemplatesNotFound, - TemplateSyntaxError as TemplateSyntaxError, - UndefinedError as UndefinedError, -) -from jinja2.filters import ( - contextfilter as contextfilter, - environmentfilter as environmentfilter, - evalcontextfilter as evalcontextfilter, -) -from jinja2.loaders import ( - BaseLoader as BaseLoader, - ChoiceLoader as ChoiceLoader, - DictLoader as DictLoader, - FileSystemLoader as FileSystemLoader, - FunctionLoader as FunctionLoader, - ModuleLoader as ModuleLoader, - PackageLoader as PackageLoader, - PrefixLoader as PrefixLoader, -) -from jinja2.runtime import ( - DebugUndefined as DebugUndefined, - StrictUndefined as StrictUndefined, - Undefined as Undefined, - make_logging_undefined as make_logging_undefined, -) -from jinja2.utils import ( - Markup as Markup, - clear_caches as clear_caches, - contextfunction as contextfunction, - environmentfunction as environmentfunction, - escape as escape, - evalcontextfunction as evalcontextfunction, - is_undefined as is_undefined, - select_autoescape as select_autoescape, -) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_compat.pyi deleted file mode 100644 index f85bd0f2fb3f..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_compat.pyi +++ /dev/null @@ -1,36 +0,0 @@ -import sys -from typing import Any - -if sys.version_info >= (3,): - from urllib.parse import quote_from_bytes - - url_quote = quote_from_bytes -else: - import urllib - - url_quote = urllib.quote - -PY2: Any -PYPY: Any -unichr: Any -range_type: Any -text_type: Any -string_types: Any -integer_types: Any -iterkeys: Any -itervalues: Any -iteritems: Any -NativeStringIO: Any - -def reraise(tp, value, tb: Any | None = ...): ... - -ifilter: Any -imap: Any -izip: Any -intern: Any -implements_iterator: Any -implements_to_string: Any -encode_filename: Any -get_next: Any - -def with_metaclass(meta, *bases): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_stringdefs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_stringdefs.pyi deleted file mode 100644 index 060f8881c3a5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/_stringdefs.pyi +++ /dev/null @@ -1,40 +0,0 @@ -from typing import Any - -Cc: str -Cf: str -Cn: str -Co: str -Cs: Any -Ll: str -Lm: str -Lo: str -Lt: str -Lu: str -Mc: str -Me: str -Mn: str -Nd: str -Nl: str -No: str -Pc: str -Pd: str -Pe: str -Pf: str -Pi: str -Po: str -Ps: str -Sc: str -Sk: str -Sm: str -So: str -Zl: str -Zp: str -Zs: str -cats: Any - -def combine(*args): ... - -xid_start: str -xid_continue: str - -def allexcept(*args): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/bccache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/bccache.pyi deleted file mode 100644 index 2f39dce24ea5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/bccache.pyi +++ /dev/null @@ -1,44 +0,0 @@ -from typing import Any - -marshal_dump: Any -marshal_load: Any -bc_version: int -bc_magic: Any - -class Bucket: - environment: Any - key: Any - checksum: Any - def __init__(self, environment, key, checksum) -> None: ... - code: Any - def reset(self): ... - def load_bytecode(self, f): ... - def write_bytecode(self, f): ... - def bytecode_from_string(self, string): ... - def bytecode_to_string(self): ... - -class BytecodeCache: - def load_bytecode(self, bucket): ... - def dump_bytecode(self, bucket): ... - def clear(self): ... - def get_cache_key(self, name, filename: Any | None = ...): ... - def get_source_checksum(self, source): ... - def get_bucket(self, environment, name, filename, source): ... - def set_bucket(self, bucket): ... - -class FileSystemBytecodeCache(BytecodeCache): - directory: Any - pattern: Any - def __init__(self, directory: Any | None = ..., pattern: str = ...) -> None: ... - def load_bytecode(self, bucket): ... - def dump_bytecode(self, bucket): ... - def clear(self): ... - -class MemcachedBytecodeCache(BytecodeCache): - client: Any - prefix: Any - timeout: Any - ignore_memcache_errors: Any - def __init__(self, client, prefix: str = ..., timeout: Any | None = ..., ignore_memcache_errors: bool = ...) -> None: ... - def load_bytecode(self, bucket): ... - def dump_bytecode(self, bucket): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/compiler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/compiler.pyi deleted file mode 100644 index 43eb332b25ff..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/compiler.pyi +++ /dev/null @@ -1,177 +0,0 @@ -from keyword import iskeyword as is_python_keyword -from typing import Any - -from jinja2.visitor import NodeVisitor - -operators: Any -dict_item_iter: str - -unoptimize_before_dead_code: bool - -def generate(node, environment, name, filename, stream: Any | None = ..., defer_init: bool = ...): ... -def has_safe_repr(value): ... -def find_undeclared(nodes, names): ... - -class Identifiers: - declared: Any - outer_undeclared: Any - undeclared: Any - declared_locally: Any - declared_parameter: Any - def __init__(self) -> None: ... - def add_special(self, name): ... - def is_declared(self, name): ... - def copy(self): ... - -class Frame: - eval_ctx: Any - identifiers: Any - toplevel: bool - rootlevel: bool - require_output_check: Any - buffer: Any - block: Any - assigned_names: Any - parent: Any - def __init__(self, eval_ctx, parent: Any | None = ...) -> None: ... - def copy(self): ... - def inspect(self, nodes): ... - def find_shadowed(self, extra: Any = ...): ... - def inner(self): ... - def soft(self): ... - __copy__: Any - -class VisitorExit(RuntimeError): ... - -class DependencyFinderVisitor(NodeVisitor): - filters: Any - tests: Any - def __init__(self) -> None: ... - def visit_Filter(self, node): ... - def visit_Test(self, node): ... - def visit_Block(self, node): ... - -class UndeclaredNameVisitor(NodeVisitor): - names: Any - undeclared: Any - def __init__(self, names) -> None: ... - def visit_Name(self, node): ... - def visit_Block(self, node): ... - -class FrameIdentifierVisitor(NodeVisitor): - identifiers: Any - def __init__(self, identifiers) -> None: ... - def visit_Name(self, node): ... - def visit_If(self, node): ... - def visit_Macro(self, node): ... - def visit_Import(self, node): ... - def visit_FromImport(self, node): ... - def visit_Assign(self, node): ... - def visit_For(self, node): ... - def visit_CallBlock(self, node): ... - def visit_FilterBlock(self, node): ... - def visit_AssignBlock(self, node): ... - def visit_Scope(self, node): ... - def visit_Block(self, node): ... - -class CompilerExit(Exception): ... - -class CodeGenerator(NodeVisitor): - environment: Any - name: Any - filename: Any - stream: Any - created_block_context: bool - defer_init: Any - import_aliases: Any - blocks: Any - extends_so_far: int - has_known_extends: bool - code_lineno: int - tests: Any - filters: Any - debug_info: Any - def __init__(self, environment, name, filename, stream: Any | None = ..., defer_init: bool = ...) -> None: ... - def fail(self, msg, lineno): ... - def temporary_identifier(self): ... - def buffer(self, frame): ... - def return_buffer_contents(self, frame): ... - def indent(self): ... - def outdent(self, step: int = ...): ... - def start_write(self, frame, node: Any | None = ...): ... - def end_write(self, frame): ... - def simple_write(self, s, frame, node: Any | None = ...): ... - def blockvisit(self, nodes, frame): ... - def write(self, x): ... - def writeline(self, x, node: Any | None = ..., extra: int = ...): ... - def newline(self, node: Any | None = ..., extra: int = ...): ... - def signature(self, node, frame, extra_kwargs: Any | None = ...): ... - def pull_locals(self, frame): ... - def pull_dependencies(self, nodes): ... - def unoptimize_scope(self, frame): ... - def push_scope(self, frame, extra_vars: Any = ...): ... - def pop_scope(self, aliases, frame): ... - def function_scoping(self, node, frame, children: Any | None = ..., find_special: bool = ...): ... - def macro_body(self, node, frame, children: Any | None = ...): ... - def macro_def(self, node, frame): ... - def position(self, node): ... - def visit_Template(self, node, frame: Any | None = ...): ... - def visit_Block(self, node, frame): ... - def visit_Extends(self, node, frame): ... - def visit_Include(self, node, frame): ... - def visit_Import(self, node, frame): ... - def visit_FromImport(self, node, frame): ... - def visit_For(self, node, frame): ... - def visit_If(self, node, frame): ... - def visit_Macro(self, node, frame): ... - def visit_CallBlock(self, node, frame): ... - def visit_FilterBlock(self, node, frame): ... - def visit_ExprStmt(self, node, frame): ... - def visit_Output(self, node, frame): ... - def make_assignment_frame(self, frame): ... - def export_assigned_vars(self, frame, assignment_frame): ... - def visit_Assign(self, node, frame): ... - def visit_AssignBlock(self, node, frame): ... - def visit_Name(self, node, frame): ... - def visit_Const(self, node, frame): ... - def visit_TemplateData(self, node, frame): ... - def visit_Tuple(self, node, frame): ... - def visit_List(self, node, frame): ... - def visit_Dict(self, node, frame): ... - def binop(self, interceptable: bool = ...): ... - def uaop(self, interceptable: bool = ...): ... - visit_Add: Any - visit_Sub: Any - visit_Mul: Any - visit_Div: Any - visit_FloorDiv: Any - visit_Pow: Any - visit_Mod: Any - visit_And: Any - visit_Or: Any - visit_Pos: Any - visit_Neg: Any - visit_Not: Any - def visit_Concat(self, node, frame): ... - def visit_Compare(self, node, frame): ... - def visit_Operand(self, node, frame): ... - def visit_Getattr(self, node, frame): ... - def visit_Getitem(self, node, frame): ... - def visit_Slice(self, node, frame): ... - def visit_Filter(self, node, frame): ... - def visit_Test(self, node, frame): ... - def visit_CondExpr(self, node, frame): ... - def visit_Call(self, node, frame, forward_caller: bool = ...): ... - def visit_Keyword(self, node, frame): ... - def visit_MarkSafe(self, node, frame): ... - def visit_MarkSafeIfAutoescape(self, node, frame): ... - def visit_EnvironmentAttribute(self, node, frame): ... - def visit_ExtensionAttribute(self, node, frame): ... - def visit_ImportedName(self, node, frame): ... - def visit_InternalName(self, node, frame): ... - def visit_ContextReference(self, node, frame): ... - def visit_Continue(self, node, frame): ... - def visit_Break(self, node, frame): ... - def visit_Scope(self, node, frame): ... - def visit_EvalContextModifier(self, node, frame): ... - def visit_ScopedEvalContextModifier(self, node, frame): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/constants.pyi deleted file mode 100644 index 55ea3ea5e481..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/constants.pyi +++ /dev/null @@ -1 +0,0 @@ -LOREM_IPSUM_WORDS: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/debug.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/debug.pyi deleted file mode 100644 index 5bffa43f5969..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/debug.pyi +++ /dev/null @@ -1,37 +0,0 @@ -from typing import Any - -tproxy: Any -raise_helper: str - -class TracebackFrameProxy: - tb: Any - def __init__(self, tb) -> None: ... - @property - def tb_next(self): ... - def set_next(self, next): ... - @property - def is_jinja_frame(self): ... - def __getattr__(self, name): ... - -def make_frame_proxy(frame): ... - -class ProcessedTraceback: - exc_type: Any - exc_value: Any - frames: Any - def __init__(self, exc_type, exc_value, frames) -> None: ... - def render_as_text(self, limit: Any | None = ...): ... - def render_as_html(self, full: bool = ...): ... - @property - def is_template_syntax_error(self): ... - @property - def exc_info(self): ... - @property - def standard_exc_info(self): ... - -def make_traceback(exc_info, source_hint: Any | None = ...): ... -def translate_syntax_error(error, source: Any | None = ...): ... -def translate_exception(exc_info, initial_skip: int = ...): ... -def fake_exc_info(exc_info, filename, lineno): ... - -tb_set_next: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/defaults.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/defaults.pyi deleted file mode 100644 index 8ab2a32429a7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/defaults.pyi +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Any, Dict - -from jinja2.filters import FILTERS -from jinja2.tests import TESTS - -DEFAULT_FILTERS = FILTERS -DEFAULT_TESTS = TESTS - -BLOCK_START_STRING: str -BLOCK_END_STRING: str -VARIABLE_START_STRING: str -VARIABLE_END_STRING: str -COMMENT_START_STRING: str -COMMENT_END_STRING: str -LINE_STATEMENT_PREFIX: str | None -LINE_COMMENT_PREFIX: str | None -TRIM_BLOCKS: bool -LSTRIP_BLOCKS: bool -NEWLINE_SEQUENCE: str -KEEP_TRAILING_NEWLINE: bool -DEFAULT_NAMESPACE: dict[str, Any] -DEFAULT_POLICIES = Dict[str, Any] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/environment.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/environment.pyi deleted file mode 100644 index 6633375b3c0d..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/environment.pyi +++ /dev/null @@ -1,214 +0,0 @@ -import sys -from typing import Any, Callable, Iterator, Sequence, Text, Type - -from .bccache import BytecodeCache -from .loaders import BaseLoader -from .runtime import Context, Undefined - -if sys.version_info >= (3, 6): - from typing import AsyncIterator, Awaitable - -def get_spontaneous_environment(*args): ... -def create_cache(size): ... -def copy_cache(cache): ... -def load_extensions(environment, extensions): ... - -class Environment: - sandboxed: bool - overlayed: bool - linked_to: Any - shared: bool - exception_handler: Any - exception_formatter: Any - code_generator_class: Any - context_class: Any - block_start_string: Text - block_end_string: Text - variable_start_string: Text - variable_end_string: Text - comment_start_string: Text - comment_end_string: Text - line_statement_prefix: Text - line_comment_prefix: Text - trim_blocks: bool - lstrip_blocks: Any - newline_sequence: Text - keep_trailing_newline: bool - undefined: Type[Undefined] - optimized: bool - finalize: Callable[..., Any] - autoescape: Any - filters: Any - tests: Any - globals: dict[str, Any] - loader: BaseLoader - cache: Any - bytecode_cache: BytecodeCache - auto_reload: bool - extensions: list[Any] - def __init__( - self, - block_start_string: Text = ..., - block_end_string: Text = ..., - variable_start_string: Text = ..., - variable_end_string: Text = ..., - comment_start_string: Any = ..., - comment_end_string: Text = ..., - line_statement_prefix: Text = ..., - line_comment_prefix: Text = ..., - trim_blocks: bool = ..., - lstrip_blocks: bool = ..., - newline_sequence: Text = ..., - keep_trailing_newline: bool = ..., - extensions: list[Any] = ..., - optimized: bool = ..., - undefined: Type[Undefined] = ..., - finalize: Callable[..., Any] | None = ..., - autoescape: bool | Callable[[str], bool] = ..., - loader: BaseLoader | None = ..., - cache_size: int = ..., - auto_reload: bool = ..., - bytecode_cache: BytecodeCache | None = ..., - enable_async: bool = ..., - ) -> None: ... - def add_extension(self, extension): ... - def extend(self, **attributes): ... - def overlay( - self, - block_start_string: Text = ..., - block_end_string: Text = ..., - variable_start_string: Text = ..., - variable_end_string: Text = ..., - comment_start_string: Any = ..., - comment_end_string: Text = ..., - line_statement_prefix: Text = ..., - line_comment_prefix: Text = ..., - trim_blocks: bool = ..., - lstrip_blocks: bool = ..., - extensions: list[Any] = ..., - optimized: bool = ..., - undefined: Type[Undefined] = ..., - finalize: Callable[..., Any] = ..., - autoescape: bool = ..., - loader: BaseLoader | None = ..., - cache_size: int = ..., - auto_reload: bool = ..., - bytecode_cache: BytecodeCache | None = ..., - ): ... - lexer: Any - def iter_extensions(self): ... - def getitem(self, obj, argument): ... - def getattr(self, obj, attribute): ... - def call_filter( - self, name, value, args: Any | None = ..., kwargs: Any | None = ..., context: Any | None = ..., eval_ctx: Any | None = ... - ): ... - def call_test(self, name, value, args: Any | None = ..., kwargs: Any | None = ...): ... - def parse(self, source, name: Any | None = ..., filename: Any | None = ...): ... - def lex(self, source, name: Any | None = ..., filename: Any | None = ...): ... - def preprocess(self, source: Text, name: Any | None = ..., filename: Any | None = ...): ... - def compile(self, source, name: Any | None = ..., filename: Any | None = ..., raw: bool = ..., defer_init: bool = ...): ... - def compile_expression(self, source: Text, undefined_to_none: bool = ...): ... - def compile_templates( - self, - target, - extensions: Any | None = ..., - filter_func: Any | None = ..., - zip: str = ..., - log_function: Any | None = ..., - ignore_errors: bool = ..., - py_compile: bool = ..., - ): ... - def list_templates(self, extensions: Any | None = ..., filter_func: Any | None = ...): ... - def handle_exception(self, exc_info: Any | None = ..., rendered: bool = ..., source_hint: Any | None = ...): ... - def join_path(self, template: Template | Text, parent: Text) -> Text: ... - def get_template(self, name: Template | Text, parent: Text | None = ..., globals: Any | None = ...) -> Template: ... - def select_template( - self, names: Sequence[Template | Text], parent: Text | None = ..., globals: dict[str, Any] | None = ... - ) -> Template: ... - def get_or_select_template( - self, - template_name_or_list: Template | Text | Sequence[Template | Text], - parent: Text | None = ..., - globals: dict[str, Any] | None = ..., - ) -> Template: ... - def from_string( - self, source: Text, globals: dict[str, Any] | None = ..., template_class: Type[Template] | None = ... - ) -> Template: ... - def make_globals(self, d: dict[str, Any] | None) -> dict[str, Any]: ... - # Frequently added extensions are included here: - # from InternationalizationExtension: - def install_gettext_translations(self, translations: Any, newstyle: bool | None = ...): ... - def install_null_translations(self, newstyle: bool | None = ...): ... - def install_gettext_callables( - self, gettext: Callable[..., Any], ngettext: Callable[..., Any], newstyle: bool | None = ... - ): ... - def uninstall_gettext_translations(self, translations: Any): ... - def extract_translations(self, source: Any, gettext_functions: Any): ... - newstyle_gettext: bool - -class Template: - name: str | None - filename: str | None - def __new__( - cls, - source, - block_start_string: Any = ..., - block_end_string: Any = ..., - variable_start_string: Any = ..., - variable_end_string: Any = ..., - comment_start_string: Any = ..., - comment_end_string: Any = ..., - line_statement_prefix: Any = ..., - line_comment_prefix: Any = ..., - trim_blocks: Any = ..., - lstrip_blocks: Any = ..., - newline_sequence: Any = ..., - keep_trailing_newline: Any = ..., - extensions: Any = ..., - optimized: bool = ..., - undefined: Any = ..., - finalize: Any | None = ..., - autoescape: bool = ..., - ): ... - environment: Environment = ... - @classmethod - def from_code(cls, environment, code, globals, uptodate: Any | None = ...): ... - @classmethod - def from_module_dict(cls, environment, module_dict, globals): ... - def render(self, *args, **kwargs) -> Text: ... - def stream(self, *args, **kwargs) -> TemplateStream: ... - def generate(self, *args, **kwargs) -> Iterator[Text]: ... - def new_context( - self, vars: dict[str, Any] | None = ..., shared: bool = ..., locals: dict[str, Any] | None = ... - ) -> Context: ... - def make_module( - self, vars: dict[str, Any] | None = ..., shared: bool = ..., locals: dict[str, Any] | None = ... - ) -> Context: ... - @property - def module(self) -> Any: ... - def get_corresponding_lineno(self, lineno): ... - @property - def is_up_to_date(self) -> bool: ... - @property - def debug_info(self): ... - if sys.version_info >= (3, 6): - def render_async(self, *args, **kwargs) -> Awaitable[Text]: ... - def generate_async(self, *args, **kwargs) -> AsyncIterator[Text]: ... - -class TemplateModule: - __name__: Any - def __init__(self, template, context) -> None: ... - def __html__(self): ... - -class TemplateExpression: - def __init__(self, template, undefined_to_none) -> None: ... - def __call__(self, *args, **kwargs): ... - -class TemplateStream: - def __init__(self, gen) -> None: ... - def dump(self, fp, encoding: Text | None = ..., errors: Text = ...): ... - buffered: bool - def disable_buffering(self) -> None: ... - def enable_buffering(self, size: int = ...) -> None: ... - def __iter__(self): ... - def __next__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/exceptions.pyi deleted file mode 100644 index c227a9b0d539..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/exceptions.pyi +++ /dev/null @@ -1,31 +0,0 @@ -from typing import Any, Text - -class TemplateError(Exception): - def __init__(self, message: Text | None = ...) -> None: ... - @property - def message(self): ... - def __unicode__(self): ... - -class TemplateNotFound(IOError, LookupError, TemplateError): - message: Any - name: Any - templates: Any - def __init__(self, name, message: Text | None = ...) -> None: ... - -class TemplatesNotFound(TemplateNotFound): - templates: Any - def __init__(self, names: Any = ..., message: Text | None = ...) -> None: ... - -class TemplateSyntaxError(TemplateError): - lineno: int - name: Text - filename: Text - source: Text - translated: bool - def __init__(self, message: Text, lineno: int, name: Text | None = ..., filename: Text | None = ...) -> None: ... - -class TemplateAssertionError(TemplateSyntaxError): ... -class TemplateRuntimeError(TemplateError): ... -class UndefinedError(TemplateRuntimeError): ... -class SecurityError(TemplateRuntimeError): ... -class FilterArgumentError(TemplateRuntimeError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/ext.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/ext.pyi deleted file mode 100644 index 61ffa50d624b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/ext.pyi +++ /dev/null @@ -1,66 +0,0 @@ -from typing import Any - -GETTEXT_FUNCTIONS: Any - -class ExtensionRegistry(type): - def __new__(cls, name, bases, d): ... - -class Extension: - tags: Any - priority: int - environment: Any - def __init__(self, environment) -> None: ... - def bind(self, environment): ... - def preprocess(self, source, name, filename: Any | None = ...): ... - def filter_stream(self, stream): ... - def parse(self, parser): ... - def attr(self, name, lineno: Any | None = ...): ... - def call_method( - self, - name, - args: Any | None = ..., - kwargs: Any | None = ..., - dyn_args: Any | None = ..., - dyn_kwargs: Any | None = ..., - lineno: Any | None = ..., - ): ... - -class InternationalizationExtension(Extension): - tags: Any - def __init__(self, environment) -> None: ... - def parse(self, parser): ... - -class ExprStmtExtension(Extension): - tags: Any - def parse(self, parser): ... - -class LoopControlExtension(Extension): - tags: Any - def parse(self, parser): ... - -class WithExtension(Extension): - tags: Any - def parse(self, parser): ... - -class AutoEscapeExtension(Extension): - tags: Any - def parse(self, parser): ... - -def extract_from_ast(node, gettext_functions: Any = ..., babel_style: bool = ...): ... - -class _CommentFinder: - tokens: Any - comment_tags: Any - offset: int - last_lineno: int - def __init__(self, tokens, comment_tags) -> None: ... - def find_backwards(self, offset): ... - def find_comments(self, lineno): ... - -def babel_extract(fileobj, keywords, comment_tags, options): ... - -i18n: Any -do: Any -loopcontrols: Any -with_: Any -autoescape: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/filters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/filters.pyi deleted file mode 100644 index ac5614e26970..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/filters.pyi +++ /dev/null @@ -1,56 +0,0 @@ -from typing import Any, NamedTuple - -def contextfilter(f): ... -def evalcontextfilter(f): ... -def environmentfilter(f): ... -def make_attrgetter(environment, attribute): ... -def do_forceescape(value): ... -def do_urlencode(value): ... -def do_replace(eval_ctx, s, old, new, count: Any | None = ...): ... -def do_upper(s): ... -def do_lower(s): ... -def do_xmlattr(_eval_ctx, d, autospace: bool = ...): ... -def do_capitalize(s): ... -def do_title(s): ... -def do_dictsort(value, case_sensitive: bool = ..., by: str = ...): ... -def do_sort(environment, value, reverse: bool = ..., case_sensitive: bool = ..., attribute: Any | None = ...): ... -def do_default(value, default_value: str = ..., boolean: bool = ...): ... -def do_join(eval_ctx, value, d: str = ..., attribute: Any | None = ...): ... -def do_center(value, width: int = ...): ... -def do_first(environment, seq): ... -def do_last(environment, seq): ... -def do_random(environment, seq): ... -def do_filesizeformat(value, binary: bool = ...): ... -def do_pprint(value, verbose: bool = ...): ... -def do_urlize(eval_ctx, value, trim_url_limit: Any | None = ..., nofollow: bool = ..., target: Any | None = ...): ... -def do_indent(s, width: int = ..., indentfirst: bool = ...): ... -def do_truncate(s, length: int = ..., killwords: bool = ..., end: str = ...): ... -def do_wordwrap(environment, s, width: int = ..., break_long_words: bool = ..., wrapstring: Any | None = ...): ... -def do_wordcount(s): ... -def do_int(value, default: int = ..., base: int = ...): ... -def do_float(value, default: float = ...): ... -def do_format(value, *args, **kwargs): ... -def do_trim(value): ... -def do_striptags(value): ... -def do_slice(value, slices, fill_with: Any | None = ...): ... -def do_batch(value, linecount, fill_with: Any | None = ...): ... -def do_round(value, precision: int = ..., method: str = ...): ... -def do_groupby(environment, value, attribute): ... - -class _GroupTuple(NamedTuple): - grouper: Any - list: Any - -def do_sum(environment, iterable, attribute: Any | None = ..., start: int = ...): ... -def do_list(value): ... -def do_mark_safe(value): ... -def do_mark_unsafe(value): ... -def do_reverse(value): ... -def do_attr(environment, obj, name): ... -def do_map(*args, **kwargs): ... -def do_select(*args, **kwargs): ... -def do_reject(*args, **kwargs): ... -def do_selectattr(*args, **kwargs): ... -def do_rejectattr(*args, **kwargs): ... - -FILTERS: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/lexer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/lexer.pyi deleted file mode 100644 index 714f76f92232..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/lexer.pyi +++ /dev/null @@ -1,117 +0,0 @@ -from typing import Any, Tuple - -whitespace_re: Any -string_re: Any -integer_re: Any -name_re: Any -float_re: Any -newline_re: Any -TOKEN_ADD: Any -TOKEN_ASSIGN: Any -TOKEN_COLON: Any -TOKEN_COMMA: Any -TOKEN_DIV: Any -TOKEN_DOT: Any -TOKEN_EQ: Any -TOKEN_FLOORDIV: Any -TOKEN_GT: Any -TOKEN_GTEQ: Any -TOKEN_LBRACE: Any -TOKEN_LBRACKET: Any -TOKEN_LPAREN: Any -TOKEN_LT: Any -TOKEN_LTEQ: Any -TOKEN_MOD: Any -TOKEN_MUL: Any -TOKEN_NE: Any -TOKEN_PIPE: Any -TOKEN_POW: Any -TOKEN_RBRACE: Any -TOKEN_RBRACKET: Any -TOKEN_RPAREN: Any -TOKEN_SEMICOLON: Any -TOKEN_SUB: Any -TOKEN_TILDE: Any -TOKEN_WHITESPACE: Any -TOKEN_FLOAT: Any -TOKEN_INTEGER: Any -TOKEN_NAME: Any -TOKEN_STRING: Any -TOKEN_OPERATOR: Any -TOKEN_BLOCK_BEGIN: Any -TOKEN_BLOCK_END: Any -TOKEN_VARIABLE_BEGIN: Any -TOKEN_VARIABLE_END: Any -TOKEN_RAW_BEGIN: Any -TOKEN_RAW_END: Any -TOKEN_COMMENT_BEGIN: Any -TOKEN_COMMENT_END: Any -TOKEN_COMMENT: Any -TOKEN_LINESTATEMENT_BEGIN: Any -TOKEN_LINESTATEMENT_END: Any -TOKEN_LINECOMMENT_BEGIN: Any -TOKEN_LINECOMMENT_END: Any -TOKEN_LINECOMMENT: Any -TOKEN_DATA: Any -TOKEN_INITIAL: Any -TOKEN_EOF: Any -operators: Any -reverse_operators: Any -operator_re: Any -ignored_tokens: Any -ignore_if_empty: Any - -def describe_token(token): ... -def describe_token_expr(expr): ... -def count_newlines(value): ... -def compile_rules(environment): ... - -class Failure: - message: Any - error_class: Any - def __init__(self, message, cls: Any = ...) -> None: ... - def __call__(self, lineno, filename): ... - -class Token(Tuple[int, Any, Any]): - lineno: Any - type: Any - value: Any - def __new__(cls, lineno, type, value): ... - def test(self, expr): ... - def test_any(self, *iterable): ... - -class TokenStreamIterator: - stream: Any - def __init__(self, stream) -> None: ... - def __iter__(self): ... - def __next__(self): ... - -class TokenStream: - name: Any - filename: Any - closed: bool - current: Any - def __init__(self, generator, name, filename) -> None: ... - def __iter__(self): ... - def __bool__(self): ... - __nonzero__: Any - eos: Any - def push(self, token): ... - def look(self): ... - def skip(self, n: int = ...): ... - def next_if(self, expr): ... - def skip_if(self, expr): ... - def __next__(self): ... - def close(self): ... - def expect(self, expr): ... - -def get_lexer(environment): ... - -class Lexer: - newline_sequence: Any - keep_trailing_newline: Any - rules: Any - def __init__(self, environment) -> None: ... - def tokenize(self, source, name: Any | None = ..., filename: Any | None = ..., state: Any | None = ...): ... - def wrap(self, stream, name: Any | None = ..., filename: Any | None = ...): ... - def tokeniter(self, source, name, filename: Any | None = ..., state: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/loaders.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/loaders.pyi deleted file mode 100644 index f707c6b3d1c6..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/loaders.pyi +++ /dev/null @@ -1,78 +0,0 @@ -import sys -from types import ModuleType -from typing import Any, Callable, Iterable, Text, Tuple, Union - -from .environment import Environment - -if sys.version_info >= (3, 7): - from os import PathLike - - _SearchPath = Union[Text, PathLike[str], Iterable[Union[Text, PathLike[str]]]] -else: - _SearchPath = Union[Text, Iterable[Text]] - -def split_template_path(template: Text) -> list[Text]: ... - -class BaseLoader: - has_source_access: bool - def get_source(self, environment, template): ... - def list_templates(self): ... - def load(self, environment, name, globals: Any | None = ...): ... - -class FileSystemLoader(BaseLoader): - searchpath: Text - encoding: Any - followlinks: Any - def __init__(self, searchpath: _SearchPath, encoding: Text = ..., followlinks: bool = ...) -> None: ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... - def list_templates(self): ... - -class PackageLoader(BaseLoader): - encoding: Text - manager: Any - filesystem_bound: Any - provider: Any - package_path: Any - def __init__(self, package_name: Text, package_path: Text = ..., encoding: Text = ...) -> None: ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... - def list_templates(self): ... - -class DictLoader(BaseLoader): - mapping: Any - def __init__(self, mapping) -> None: ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... - def list_templates(self): ... - -class FunctionLoader(BaseLoader): - load_func: Any - def __init__(self, load_func) -> None: ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text | None, Callable[..., Any] | None]: ... - -class PrefixLoader(BaseLoader): - mapping: Any - delimiter: Any - def __init__(self, mapping, delimiter: str = ...) -> None: ... - def get_loader(self, template): ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... - def load(self, environment, name, globals: Any | None = ...): ... - def list_templates(self): ... - -class ChoiceLoader(BaseLoader): - loaders: Any - def __init__(self, loaders) -> None: ... - def get_source(self, environment: Environment, template: Text) -> Tuple[Text, Text, Callable[..., Any]]: ... - def load(self, environment, name, globals: Any | None = ...): ... - def list_templates(self): ... - -class _TemplateModule(ModuleType): ... - -class ModuleLoader(BaseLoader): - has_source_access: bool - module: Any - package_name: Any - def __init__(self, path) -> None: ... - @staticmethod - def get_template_key(name): ... - @staticmethod - def get_module_filename(name): ... - def load(self, environment, name, globals: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/meta.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/meta.pyi deleted file mode 100644 index 8f0b50bec203..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/meta.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any - -from jinja2.compiler import CodeGenerator - -class TrackingCodeGenerator(CodeGenerator): - undeclared_identifiers: Any - def __init__(self, environment) -> None: ... - def write(self, x): ... - def pull_locals(self, frame): ... - -def find_undeclared_variables(ast): ... -def find_referenced_templates(ast): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/nodes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/nodes.pyi deleted file mode 100644 index 6346ea007716..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/nodes.pyi +++ /dev/null @@ -1,254 +0,0 @@ -from typing import Any - -class Impossible(Exception): ... - -class NodeType(type): - def __new__(cls, name, bases, d): ... - -class EvalContext: - environment: Any - autoescape: Any - volatile: bool - def __init__(self, environment, template_name: Any | None = ...) -> None: ... - def save(self): ... - def revert(self, old): ... - -def get_eval_context(node, ctx): ... - -class Node: - fields: Any - attributes: Any - abstract: bool - def __init__(self, *fields, **attributes) -> None: ... - def iter_fields(self, exclude: Any | None = ..., only: Any | None = ...): ... - def iter_child_nodes(self, exclude: Any | None = ..., only: Any | None = ...): ... - def find(self, node_type): ... - def find_all(self, node_type): ... - def set_ctx(self, ctx): ... - def set_lineno(self, lineno, override: bool = ...): ... - def set_environment(self, environment): ... - def __eq__(self, other): ... - def __ne__(self, other): ... - __hash__: Any - -class Stmt(Node): - abstract: bool - -class Helper(Node): - abstract: bool - -class Template(Node): - fields: Any - -class Output(Stmt): - fields: Any - -class Extends(Stmt): - fields: Any - -class For(Stmt): - fields: Any - -class If(Stmt): - fields: Any - -class Macro(Stmt): - fields: Any - name: str - args: list[Any] - defaults: list[Any] - body: list[Any] - -class CallBlock(Stmt): - fields: Any - -class FilterBlock(Stmt): - fields: Any - -class Block(Stmt): - fields: Any - -class Include(Stmt): - fields: Any - -class Import(Stmt): - fields: Any - -class FromImport(Stmt): - fields: Any - -class ExprStmt(Stmt): - fields: Any - -class Assign(Stmt): - fields: Any - -class AssignBlock(Stmt): - fields: Any - -class Expr(Node): - abstract: bool - def as_const(self, eval_ctx: Any | None = ...): ... - def can_assign(self): ... - -class BinExpr(Expr): - fields: Any - operator: Any - abstract: bool - def as_const(self, eval_ctx: Any | None = ...): ... - -class UnaryExpr(Expr): - fields: Any - operator: Any - abstract: bool - def as_const(self, eval_ctx: Any | None = ...): ... - -class Name(Expr): - fields: Any - def can_assign(self): ... - -class Literal(Expr): - abstract: bool - -class Const(Literal): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - @classmethod - def from_untrusted(cls, value, lineno: Any | None = ..., environment: Any | None = ...): ... - -class TemplateData(Literal): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Tuple(Literal): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - def can_assign(self): ... - -class List(Literal): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Dict(Literal): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Pair(Helper): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Keyword(Helper): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class CondExpr(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Filter(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Test(Expr): - fields: Any - -class Call(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Getitem(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - def can_assign(self): ... - -class Getattr(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - def can_assign(self): ... - -class Slice(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Concat(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Compare(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class Operand(Helper): - fields: Any - -class Mul(BinExpr): - operator: str - -class Div(BinExpr): - operator: str - -class FloorDiv(BinExpr): - operator: str - -class Add(BinExpr): - operator: str - -class Sub(BinExpr): - operator: str - -class Mod(BinExpr): - operator: str - -class Pow(BinExpr): - operator: str - -class And(BinExpr): - operator: str - def as_const(self, eval_ctx: Any | None = ...): ... - -class Or(BinExpr): - operator: str - def as_const(self, eval_ctx: Any | None = ...): ... - -class Not(UnaryExpr): - operator: str - -class Neg(UnaryExpr): - operator: str - -class Pos(UnaryExpr): - operator: str - -class EnvironmentAttribute(Expr): - fields: Any - -class ExtensionAttribute(Expr): - fields: Any - -class ImportedName(Expr): - fields: Any - -class InternalName(Expr): - fields: Any - def __init__(self) -> None: ... - -class MarkSafe(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class MarkSafeIfAutoescape(Expr): - fields: Any - def as_const(self, eval_ctx: Any | None = ...): ... - -class ContextReference(Expr): ... -class Continue(Stmt): ... -class Break(Stmt): ... - -class Scope(Stmt): - fields: Any - -class EvalContextModifier(Stmt): - fields: Any - -class ScopedEvalContextModifier(EvalContextModifier): - fields: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/optimizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/optimizer.pyi deleted file mode 100644 index d0056acf908e..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/optimizer.pyi +++ /dev/null @@ -1,30 +0,0 @@ -from typing import Any - -from jinja2.visitor import NodeTransformer - -def optimize(node, environment): ... - -class Optimizer(NodeTransformer): - environment: Any - def __init__(self, environment) -> None: ... - def visit_If(self, node): ... - def fold(self, node): ... - visit_Add: Any - visit_Sub: Any - visit_Mul: Any - visit_Div: Any - visit_FloorDiv: Any - visit_Pow: Any - visit_Mod: Any - visit_And: Any - visit_Or: Any - visit_Pos: Any - visit_Neg: Any - visit_Not: Any - visit_Compare: Any - visit_Getitem: Any - visit_Getattr: Any - visit_Call: Any - visit_Filter: Any - visit_Test: Any - visit_CondExpr: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/parser.pyi deleted file mode 100644 index ac9942592dde..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/parser.pyi +++ /dev/null @@ -1,68 +0,0 @@ -from typing import Any - -class Parser: - environment: Any - stream: Any - name: Any - filename: Any - closed: bool - extensions: Any - def __init__( - self, environment, source, name: Any | None = ..., filename: Any | None = ..., state: Any | None = ... - ) -> None: ... - def fail(self, msg, lineno: Any | None = ..., exc: Any = ...): ... - def fail_unknown_tag(self, name, lineno: Any | None = ...): ... - def fail_eof(self, end_tokens: Any | None = ..., lineno: Any | None = ...): ... - def is_tuple_end(self, extra_end_rules: Any | None = ...): ... - def free_identifier(self, lineno: Any | None = ...): ... - def parse_statement(self): ... - def parse_statements(self, end_tokens, drop_needle: bool = ...): ... - def parse_set(self): ... - def parse_for(self): ... - def parse_if(self): ... - def parse_block(self): ... - def parse_extends(self): ... - def parse_import_context(self, node, default): ... - def parse_include(self): ... - def parse_import(self): ... - def parse_from(self): ... - def parse_signature(self, node): ... - def parse_call_block(self): ... - def parse_filter_block(self): ... - def parse_macro(self): ... - def parse_print(self): ... - def parse_assign_target(self, with_tuple: bool = ..., name_only: bool = ..., extra_end_rules: Any | None = ...): ... - def parse_expression(self, with_condexpr: bool = ...): ... - def parse_condexpr(self): ... - def parse_or(self): ... - def parse_and(self): ... - def parse_not(self): ... - def parse_compare(self): ... - def parse_add(self): ... - def parse_sub(self): ... - def parse_concat(self): ... - def parse_mul(self): ... - def parse_div(self): ... - def parse_floordiv(self): ... - def parse_mod(self): ... - def parse_pow(self): ... - def parse_unary(self, with_filter: bool = ...): ... - def parse_primary(self): ... - def parse_tuple( - self, - simplified: bool = ..., - with_condexpr: bool = ..., - extra_end_rules: Any | None = ..., - explicit_parentheses: bool = ..., - ): ... - def parse_list(self): ... - def parse_dict(self): ... - def parse_postfix(self, node): ... - def parse_filter_expr(self, node): ... - def parse_subscript(self, node): ... - def parse_subscribed(self): ... - def parse_call(self, node): ... - def parse_filter(self, node, start_inline: bool = ...): ... - def parse_test(self, node): ... - def subparse(self, end_tokens: Any | None = ...): ... - def parse(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/runtime.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/runtime.pyi deleted file mode 100644 index d97f2c289d11..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/runtime.pyi +++ /dev/null @@ -1,132 +0,0 @@ -from typing import Any, Text - -from jinja2.environment import Environment -from jinja2.exceptions import TemplateNotFound as TemplateNotFound, TemplateRuntimeError as TemplateRuntimeError -from jinja2.utils import Markup as Markup, concat as concat, escape as escape, missing as missing - -to_string: Any -identity: Any - -def markup_join(seq): ... -def unicode_join(seq): ... - -class TemplateReference: - def __init__(self, context) -> None: ... - def __getitem__(self, name): ... - -class Context: - parent: Context | dict[str, Any] - vars: dict[str, Any] - environment: Environment - eval_ctx: Any - exported_vars: Any - name: Text - blocks: dict[str, Any] - def __init__( - self, environment: Environment, parent: Context | dict[str, Any], name: Text, blocks: dict[str, Any] - ) -> None: ... - def super(self, name, current): ... - def get(self, key, default: Any | None = ...): ... - def resolve(self, key): ... - def get_exported(self): ... - def get_all(self): ... - def call(__self, __obj, *args, **kwargs): ... - def derived(self, locals: Any | None = ...): ... - keys: Any - values: Any - items: Any - iterkeys: Any - itervalues: Any - iteritems: Any - def __contains__(self, name): ... - def __getitem__(self, key): ... - -class BlockReference: - name: Any - def __init__(self, name, context, stack, depth) -> None: ... - @property - def super(self): ... - def __call__(self): ... - -class LoopContext: - index0: int - depth0: Any - def __init__(self, iterable, recurse: Any | None = ..., depth0: int = ...) -> None: ... - def cycle(self, *args): ... - first: Any - last: Any - index: Any - revindex: Any - revindex0: Any - depth: Any - def __len__(self): ... - def __iter__(self): ... - def loop(self, iterable): ... - __call__: Any - @property - def length(self): ... - -class LoopContextIterator: - context: Any - def __init__(self, context) -> None: ... - def __iter__(self): ... - def __next__(self): ... - -class Macro: - name: Any - arguments: Any - defaults: Any - catch_kwargs: Any - catch_varargs: Any - caller: Any - def __init__(self, environment, func, name, arguments, defaults, catch_kwargs, catch_varargs, caller) -> None: ... - def __call__(self, *args, **kwargs): ... - -class Undefined: - def __init__(self, hint: Any | None = ..., obj: Any = ..., name: Any | None = ..., exc: Any = ...) -> None: ... - def __getattr__(self, name): ... - __add__: Any - __radd__: Any - __mul__: Any - __rmul__: Any - __div__: Any - __rdiv__: Any - __truediv__: Any - __rtruediv__: Any - __floordiv__: Any - __rfloordiv__: Any - __mod__: Any - __rmod__: Any - __pos__: Any - __neg__: Any - __call__: Any - __getitem__: Any - __lt__: Any - __le__: Any - __gt__: Any - __ge__: Any - __int__: Any - __float__: Any - __complex__: Any - __pow__: Any - __rpow__: Any - def __eq__(self, other): ... - def __ne__(self, other): ... - def __hash__(self): ... - def __len__(self): ... - def __iter__(self): ... - def __nonzero__(self): ... - __bool__: Any - -def make_logging_undefined(logger: Any | None = ..., base: Any | None = ...): ... - -class DebugUndefined(Undefined): ... - -class StrictUndefined(Undefined): - __iter__: Any - __len__: Any - __nonzero__: Any - __eq__: Any - __ne__: Any - __bool__: Any - __hash__: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/sandbox.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/sandbox.pyi deleted file mode 100644 index 1fc319cfd76c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/sandbox.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from typing import Any - -from jinja2.environment import Environment - -MAX_RANGE: int -UNSAFE_FUNCTION_ATTRIBUTES: Any -UNSAFE_METHOD_ATTRIBUTES: Any -UNSAFE_GENERATOR_ATTRIBUTES: Any - -def safe_range(*args): ... -def unsafe(f): ... -def is_internal_attribute(obj, attr): ... -def modifies_known_mutable(obj, attr): ... - -class SandboxedEnvironment(Environment): - sandboxed: bool - default_binop_table: Any - default_unop_table: Any - intercepted_binops: Any - intercepted_unops: Any - def intercept_unop(self, operator): ... - binop_table: Any - unop_table: Any - def __init__(self, *args, **kwargs) -> None: ... - def is_safe_attribute(self, obj, attr, value): ... - def is_safe_callable(self, obj): ... - def call_binop(self, context, operator, left, right): ... - def call_unop(self, context, operator, arg): ... - def getitem(self, obj, argument): ... - def getattr(self, obj, attribute): ... - def unsafe_undefined(self, obj, attribute): ... - def call(__self, __context, __obj, *args, **kwargs): ... - -class ImmutableSandboxedEnvironment(SandboxedEnvironment): - def is_safe_attribute(self, obj, attr, value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/tests.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/tests.pyi deleted file mode 100644 index 2645fe9ff41c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/tests.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Any - -number_re: Any -regex_type: Any -test_callable: Any - -def test_odd(value): ... -def test_even(value): ... -def test_divisibleby(value, num): ... -def test_defined(value): ... -def test_undefined(value): ... -def test_none(value): ... -def test_lower(value): ... -def test_upper(value): ... -def test_string(value): ... -def test_mapping(value): ... -def test_number(value): ... -def test_sequence(value): ... -def test_equalto(value, other): ... -def test_sameas(value, other): ... -def test_iterable(value): ... -def test_escaped(value): ... - -TESTS: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/utils.pyi deleted file mode 100644 index 41488c04139b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/utils.pyi +++ /dev/null @@ -1,84 +0,0 @@ -from _typeshed import StrOrBytesPath -from typing import IO, Any, Callable, Iterable, Protocol, Text, TypeVar -from typing_extensions import Literal - -from markupsafe import Markup as Markup, escape as escape, soft_unicode as soft_unicode - -missing: Any -internal_code: Any -concat: Any - -_CallableT = TypeVar("_CallableT", bound=Callable[..., Any]) - -class _ContextFunction(Protocol[_CallableT]): # type: ignore - contextfunction: Literal[True] - __call__: _CallableT - -class _EvalContextFunction(Protocol[_CallableT]): # type: ignore - evalcontextfunction: Literal[True] - __call__: _CallableT - -class _EnvironmentFunction(Protocol[_CallableT]): # type: ignore - environmentfunction: Literal[True] - __call__: _CallableT - -def contextfunction(f: _CallableT) -> _ContextFunction[_CallableT]: ... -def evalcontextfunction(f: _CallableT) -> _EvalContextFunction[_CallableT]: ... -def environmentfunction(f: _CallableT) -> _EnvironmentFunction[_CallableT]: ... -def internalcode(f: _CallableT) -> _CallableT: ... -def is_undefined(obj: object) -> bool: ... -def select_autoescape( - enabled_extensions: Iterable[str] = ..., - disabled_extensions: Iterable[str] = ..., - default_for_string: bool = ..., - default: bool = ..., -) -> Callable[[str], bool]: ... -def consume(iterable: Iterable[object]) -> None: ... -def clear_caches() -> None: ... -def import_string(import_name: str, silent: bool = ...) -> Any: ... -def open_if_exists(filename: StrOrBytesPath, mode: str = ...) -> IO[Any] | None: ... -def object_type_repr(obj: object) -> str: ... -def pformat(obj: object, verbose: bool = ...) -> str: ... -def urlize( - text: Markup | Text, trim_url_limit: int | None = ..., rel: Markup | Text | None = ..., target: Markup | Text | None = ... -) -> str: ... -def generate_lorem_ipsum(n: int = ..., html: bool = ..., min: int = ..., max: int = ...) -> Markup | str: ... -def unicode_urlencode(obj: object, charset: str = ..., for_qs: bool = ...) -> str: ... - -class LRUCache: - capacity: Any - def __init__(self, capacity) -> None: ... - def __getnewargs__(self): ... - def copy(self): ... - def get(self, key, default: Any | None = ...): ... - def setdefault(self, key, default: Any | None = ...): ... - def clear(self): ... - def __contains__(self, key): ... - def __len__(self): ... - def __getitem__(self, key): ... - def __setitem__(self, key, value): ... - def __delitem__(self, key): ... - def items(self): ... - def iteritems(self): ... - def values(self): ... - def itervalue(self): ... - def keys(self): ... - def iterkeys(self): ... - __iter__: Any - def __reversed__(self): ... - __copy__: Any - -class Cycler: - items: Any - def __init__(self, *items) -> None: ... - pos: int - def reset(self): ... - @property - def current(self): ... - def __next__(self): ... - -class Joiner: - sep: Any - used: bool - def __init__(self, sep: str = ...) -> None: ... - def __call__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/visitor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/visitor.pyi deleted file mode 100644 index ef34328dfe16..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Jinja2/jinja2/visitor.pyi +++ /dev/null @@ -1,8 +0,0 @@ -class NodeVisitor: - def get_visitor(self, node): ... - def visit(self, node, *args, **kwargs): ... - def generic_visit(self, node, *args, **kwargs): ... - -class NodeTransformer(NodeVisitor): - def generic_visit(self, node, *args, **kwargs): ... - def visit_list(self, node, *args, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml index ee7865e9a528..b713bbfd10a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/METADATA.toml @@ -1 +1 @@ -version = "3.3" +version = "3.3.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockparser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockparser.pyi index a747902f718e..e51db23729a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockparser.pyi @@ -1,4 +1,4 @@ -from typing import Any, Iterable, List, TypeVar +from typing import Any, Iterable, TypeVar from xml.etree.ElementTree import Element, ElementTree from . import Markdown @@ -6,7 +6,7 @@ from .util import Registry _T = TypeVar("_T") -class State(List[_T]): +class State(list[_T]): def set(self, state: _T) -> None: ... def reset(self) -> None: ... def isstate(self, state: _T) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockprocessors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockprocessors.pyi index fb771af83c05..c0879126e457 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockprocessors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/blockprocessors.pyi @@ -62,4 +62,8 @@ class HRProcessor(BlockProcessor): match: Match[str] class EmptyBlockProcessor(BlockProcessor): ... + +class ReferenceProcessor(BlockProcessor): + RE: Pattern[str] + class ParagraphProcessor(BlockProcessor): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/core.pyi index e43df00dfdb1..c1c527872139 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/core.pyi @@ -1,3 +1,4 @@ +from _typeshed import Self from typing import Any, BinaryIO, Callable, ClassVar, Mapping, Sequence, Text, TextIO from typing_extensions import Literal from xml.etree.ElementTree import Element @@ -30,7 +31,7 @@ class Markdown: def registerExtensions(self, extensions: Sequence[Extension | str], configs: Mapping[str, Mapping[str, Any]]) -> Markdown: ... def build_extension(self, ext_name: Text, configs: Mapping[str, str]) -> Extension: ... def registerExtension(self, extension: Extension) -> Markdown: ... - def reset(self: Markdown) -> Markdown: ... + def reset(self: Self) -> Self: ... def set_output_format(self, format: Literal["xhtml", "html"]) -> Markdown: ... def is_block_level(self, tag: str) -> bool: ... def convert(self, source: Text) -> Text: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/__init__.pyi index 88842b59b36e..8b6cd2a4795f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any, Mapping, Tuple +from typing import Any, Mapping from markdown.core import Markdown @@ -7,7 +7,7 @@ class Extension: def __init__(self, **kwargs: Any) -> None: ... def getConfig(self, key: str, default: Any = ...) -> Any: ... def getConfigs(self) -> dict[str, Any]: ... - def getConfigInfo(self) -> list[Tuple[str, str]]: ... + def getConfigInfo(self) -> list[tuple[str, str]]: ... def setConfig(self, key: str, value: Any) -> None: ... def setConfigs(self, items: Mapping[str, Any]) -> None: ... def extendMarkdown(self, md: Markdown) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/codehilite.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/codehilite.pyi index b5a59106212f..bcd43c347b94 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/codehilite.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/extensions/codehilite.pyi @@ -34,7 +34,7 @@ class CodeHilite: use_pygments: bool = ..., **options: Any, ) -> None: ... - def hilite(self): ... + def hilite(self, shebang: bool = ...) -> str: ... class HiliteTreeprocessor(Treeprocessor): def code_unescape(self, text): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/inlinepatterns.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/inlinepatterns.pyi index 01068e034120..f534661dc26f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/inlinepatterns.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Markdown/markdown/inlinepatterns.pyi @@ -1,4 +1,5 @@ -from typing import Any, Match, Tuple +import typing +from typing import Any, Match from xml.etree.ElementTree import Element def build_inlinepatterns(md, **kwargs): ... @@ -47,7 +48,7 @@ class Pattern: class InlineProcessor(Pattern): safe_mode: bool = ... def __init__(self, pattern, md: Any | None = ...) -> None: ... - def handleMatch(self, m: Match[str], data) -> Tuple[Element, int, int] | Tuple[None, None, None]: ... # type: ignore + def handleMatch(self, m: Match[str], data) -> tuple[Element, int, int] | tuple[None, None, None]: ... # type: ignore[override] class SimpleTextPattern(Pattern): ... class SimpleTextInlineProcessor(InlineProcessor): ... @@ -93,7 +94,7 @@ class LinkInlineProcessor(InlineProcessor): class ImageInlineProcessor(LinkInlineProcessor): ... class ReferenceInlineProcessor(LinkInlineProcessor): - NEWLINE_CLEANUP_RE: Pattern + NEWLINE_CLEANUP_RE: typing.Pattern def evalId(self, data, index, text): ... def makeTag(self, href, title, text): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/METADATA.toml deleted file mode 100644 index 2266533dffdd..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/METADATA.toml +++ /dev/null @@ -1,3 +0,0 @@ -version = "1.1" -python2 = true -obsolete_since = "2.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/__init__.pyi deleted file mode 100644 index 1b05a386f825..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/__init__.pyi +++ /dev/null @@ -1,56 +0,0 @@ -import string -import sys -from typing import Any, Callable, Iterable, Mapping, Sequence, Text, Tuple -from typing_extensions import SupportsIndex - -from markupsafe._compat import text_type -from markupsafe._native import escape as escape, escape_silent as escape_silent, soft_unicode as soft_unicode - -class Markup(text_type): - def __new__(cls, base: Text = ..., encoding: Text | None = ..., errors: Text = ...) -> Markup: ... - def __html__(self) -> Markup: ... - def __add__(self, other: text_type) -> Markup: ... - def __radd__(self, other: text_type) -> Markup: ... - def __mul__(self, num: int) -> Markup: ... # type: ignore - def __rmul__(self, num: int) -> Markup: ... # type: ignore - def __mod__(self, *args: Any) -> Markup: ... - def join(self, seq: Iterable[text_type]) -> Markup: ... - def split(self, sep: text_type | None = ..., maxsplit: SupportsIndex = ...) -> list[Markup]: ... # type: ignore - def rsplit(self, sep: text_type | None = ..., maxsplit: SupportsIndex = ...) -> list[Markup]: ... # type: ignore - def splitlines(self, keepends: bool = ...) -> list[Markup]: ... # type: ignore - def unescape(self) -> Text: ... - def striptags(self) -> Text: ... - @classmethod - def escape(cls, s: text_type) -> Markup: ... # noqa: F811 - def partition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... - def rpartition(self, sep: text_type) -> Tuple[Markup, Markup, Markup]: ... - def format(self, *args: Any, **kwargs: Any) -> Markup: ... - def __html_format__(self, format_spec: text_type) -> Markup: ... - def __getslice__(self, start: int, stop: int) -> Markup: ... - def __getitem__(self, i: int | slice) -> Markup: ... - def capitalize(self) -> Markup: ... - def title(self) -> Markup: ... - def lower(self) -> Markup: ... - def upper(self) -> Markup: ... - def swapcase(self) -> Markup: ... - def replace(self, old: text_type, new: text_type, count: SupportsIndex = ...) -> Markup: ... - def ljust(self, width: SupportsIndex, fillchar: text_type = ...) -> Markup: ... - def rjust(self, width: SupportsIndex, fillchar: text_type = ...) -> Markup: ... - def lstrip(self, chars: text_type | None = ...) -> Markup: ... - def rstrip(self, chars: text_type | None = ...) -> Markup: ... - def strip(self, chars: text_type | None = ...) -> Markup: ... - def center(self, width: SupportsIndex, fillchar: text_type = ...) -> Markup: ... - def zfill(self, width: SupportsIndex) -> Markup: ... - def translate(self, table: Mapping[int, int | text_type | None] | Sequence[int | text_type | None]) -> Markup: ... - if sys.version_info >= (3, 8): - def expandtabs(self, tabsize: SupportsIndex = ...) -> Markup: ... - else: - def expandtabs(self, tabsize: int = ...) -> Markup: ... - -class EscapeFormatter(string.Formatter): - escape: Callable[[text_type], Markup] - def __init__(self, escape: Callable[[text_type], Markup]) -> None: ... # noqa: F811 - def format_field(self, value: text_type, format_spec: text_type) -> Markup: ... - -if sys.version_info >= (3,): - soft_str = soft_unicode diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_compat.pyi deleted file mode 100644 index a3c2bc7fed25..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_compat.pyi +++ /dev/null @@ -1,21 +0,0 @@ -import sys -from typing import Iterator, Mapping, Tuple, TypeVar - -_K = TypeVar("_K") -_V = TypeVar("_V") - -PY2: bool - -def iteritems(d: Mapping[_K, _V]) -> Iterator[Tuple[_K, _V]]: ... - -if sys.version_info >= (3,): - text_type = str - string_types = (str,) - unichr = chr - int_types = (int,) -else: - from __builtin__ import unichr as unichr - - text_type = unicode - string_types = (str, unicode) - int_types = (int, long) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_constants.pyi deleted file mode 100644 index 81dc05effa1a..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_constants.pyi +++ /dev/null @@ -1,3 +0,0 @@ -from typing import Text - -HTML_ENTITIES: dict[Text, int] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_native.pyi b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_native.pyi deleted file mode 100644 index c9f993e01bfc..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_native.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Text - -from . import Markup -from ._compat import text_type - -def escape(s: Markup | Text) -> Markup: ... -def escape_silent(s: None | Markup | Text) -> Markup: ... -def soft_unicode(s: Text) -> text_type: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_speedups.pyi b/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_speedups.pyi deleted file mode 100644 index c9f993e01bfc..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/MarkupSafe/markupsafe/_speedups.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Text - -from . import Markup -from ._compat import text_type - -def escape(s: Markup | Text) -> Markup: ... -def escape_silent(s: None | Markup | Text) -> Markup: ... -def soft_unicode(s: Text) -> text_type: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/METADATA.toml index cb3899bf24c0..549fe9f6e3ba 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/METADATA.toml @@ -1 +1 @@ -version = "8.3" +version = "9.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BlpImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BlpImagePlugin.pyi index 3e004d9df65c..be0f721546d0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BlpImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BlpImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile, PyDecoder @@ -18,8 +19,8 @@ def decode_dxt5(data): ... class BLPFormatError(NotImplementedError): ... class BlpImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["BLP"]] + format_description: ClassVar[str] class _BLPBaseDecoder(PyDecoder): magic: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BmpImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BmpImagePlugin.pyi index c1712429e979..3ebe5817594b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BmpImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BmpImagePlugin.pyi @@ -1,16 +1,16 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile BIT2MODE: Any class BmpImageFile(ImageFile): - format_description: str - format: str + format_description: ClassVar[str] + format: ClassVar[Literal["BMP", "DIB", "CUR"]] COMPRESSIONS: Any class DibImageFile(BmpImageFile): - format: str - format_description: str + format: ClassVar[Literal["DIB"]] SAVE: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BufrStubImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BufrStubImagePlugin.pyi index 1fd49e1ba88e..812b4e6ffa61 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BufrStubImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/BufrStubImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import StubImageFile def register_handler(handler) -> None: ... class BufrStubImageFile(StubImageFile): - format: str - format_description: str + format: ClassVar[Literal["BUFR"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/CurImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/CurImagePlugin.pyi index a6085cb8b93f..09d50204f435 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/CurImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/CurImagePlugin.pyi @@ -1,5 +1,7 @@ +from typing import ClassVar +from typing_extensions import Literal + from .BmpImagePlugin import BmpImageFile class CurImageFile(BmpImageFile): - format: str - format_description: str + format: ClassVar[Literal["CUR"]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DcxImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DcxImagePlugin.pyi index 4b0389aae635..f8f64a9fd179 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DcxImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DcxImagePlugin.pyi @@ -1,12 +1,12 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .PcxImagePlugin import PcxImageFile MAGIC: int class DcxImageFile(PcxImageFile): - format: str - format_description: str + format: ClassVar[Literal["DCX"]] frame: Any fp: Any def seek(self, frame) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DdsImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DdsImagePlugin.pyi index 10974403b363..19893d9b6bb0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DdsImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/DdsImagePlugin.pyi @@ -1,3 +1,4 @@ +from typing import ClassVar from typing_extensions import Literal from .ImageFile import ImageFile @@ -65,6 +66,6 @@ DXGI_FORMAT_BC7_UNORM: int DXGI_FORMAT_BC7_UNORM_SRGB: int class DdsImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["DDS"]] + format_description: ClassVar[str] def load_seek(self, pos) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/EpsImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/EpsImagePlugin.pyi index 741f03ec810f..7e76e8681069 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/EpsImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/EpsImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -7,7 +8,7 @@ field: Any gs_windows_binary: Any def has_ghostscript(): ... -def Ghostscript(tile, size, fp, scale: int = ...): ... +def Ghostscript(tile, size, fp, scale: int = ..., transparency: bool = ...): ... class PSFile: fp: Any @@ -17,11 +18,11 @@ class PSFile: def readline(self): ... class EpsImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["EPS"]] + format_description: ClassVar[str] mode_map: Any im: Any mode: Any tile: Any - def load(self, scale: int = ...) -> None: ... + def load(self, scale: int = ..., transparency: bool = ...) -> None: ... def load_seek(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FitsStubImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FitsStubImagePlugin.pyi index ec94b35e37b0..cdd63750da96 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FitsStubImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FitsStubImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import StubImageFile def register_handler(handler) -> None: ... class FITSStubImageFile(StubImageFile): - format: str - format_description: str + format: ClassVar[Literal["FITS"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FliImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FliImagePlugin.pyi index 11f57d49d854..9859378fba31 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FliImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FliImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile class FliImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["FLI"]] + format_description: ClassVar[str] def seek(self, frame) -> None: ... def tell(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FpxImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FpxImagePlugin.pyi index 0f88b6639730..069cb6b20494 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FpxImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FpxImagePlugin.pyi @@ -1,11 +1,12 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile MODES: Any class FpxImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["FPX"]] + format_description: ClassVar[str] fp: Any def load(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FtexImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FtexImagePlugin.pyi index b123ffe2c3f0..3205880ef1f1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FtexImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/FtexImagePlugin.pyi @@ -1,3 +1,6 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile MAGIC: bytes @@ -5,6 +8,6 @@ FORMAT_DXT1: int FORMAT_UNCOMPRESSED: int class FtexImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["FTEX"]] + format_description: ClassVar[str] def load_seek(self, pos) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GbrImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GbrImagePlugin.pyi index 070d87d68a05..b5ede10c8644 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GbrImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GbrImagePlugin.pyi @@ -1,9 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile class GbrImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["GBR"]] + format_description: ClassVar[str] im: Any def load(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GdImageFile.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GdImageFile.pyi index 185d27b5de70..a50f3f3c3fb6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GdImageFile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GdImageFile.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile class GdImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["GD"]] + format_description: ClassVar[str] def open(fp, mode: str = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GifImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GifImagePlugin.pyi index 47827071ff50..ff064cc50eab 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GifImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GifImagePlugin.pyi @@ -1,10 +1,11 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile class GifImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["GIF"]] + format_description: ClassVar[str] global_palette: Any def data(self): ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GribStubImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GribStubImagePlugin.pyi index 34c773a25d1d..70ebaadc095c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GribStubImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/GribStubImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import StubImageFile def register_handler(handler) -> None: ... class GribStubImageFile(StubImageFile): - format: str - format_description: str + format: ClassVar[Literal["GRIB"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi index b036a17abca2..62760e41c107 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Hdf5StubImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import StubImageFile def register_handler(handler) -> None: ... class HDF5StubImageFile(StubImageFile): - format: str - format_description: str + format: ClassVar[Literal["HDF5"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcnsImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcnsImagePlugin.pyi index 8606fcf13cf3..42714148c49f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcnsImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcnsImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -22,8 +23,8 @@ class IcnsFile: def getimage(self, size: Any | None = ...): ... class IcnsImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["ICNS"]] + format_description: ClassVar[str] @property def size(self): ... @size.setter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcoImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcoImagePlugin.pyi index 536de0adee68..f84ed76d50e5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcoImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IcoImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -13,8 +14,8 @@ class IcoFile: def frame(self, idx): ... class IcoImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["ICO"]] + format_description: ClassVar[str] @property def size(self): ... @size.setter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImImagePlugin.pyi index b6c3d84b6c5c..09fd6841941f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -18,8 +19,8 @@ split: Any def number(s): ... class ImImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["IM"]] + format_description: ClassVar[str] @property def n_frames(self): ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Image.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Image.pyi index 4d7113496b60..d084d65ec502 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Image.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Image.pyi @@ -1,7 +1,7 @@ -from _typeshed import SupportsRead, SupportsWrite +from _typeshed import Self, SupportsRead, SupportsWrite from collections.abc import Iterable, Iterator, MutableMapping from pathlib import Path -from typing import Any, Callable, Dict, Protocol, Sequence, SupportsBytes, Tuple, Union +from typing import Any, Callable, ClassVar, Protocol, Sequence, SupportsBytes, Union from typing_extensions import Literal from ._imaging import ( @@ -16,13 +16,13 @@ from .ImagePalette import ImagePalette _Mode = Literal["1", "CMYK", "F", "HSV", "I", "L", "LAB", "P", "RGB", "RGBA", "RGBX", "YCbCr"] _Resample = Literal[0, 1, 2, 3, 4, 5] -_Size = Tuple[int, int] -_Box = Tuple[int, int, int, int] +_Size = tuple[int, int] +_Box = tuple[int, int, int, int] _ConversionMatrix = Union[ - Tuple[float, float, float, float], Tuple[float, float, float, float, float, float, float, float, float, float, float, float], + tuple[float, float, float, float], tuple[float, float, float, float, float, float, float, float, float, float, float, float], ] -_Color = Union[float, Tuple[float, ...]] +_Color = Union[float, tuple[float, ...]] class _Writeable(SupportsWrite[bytes], Protocol): def seek(self, __offset: int) -> Any: ... @@ -88,7 +88,7 @@ MODES: list[_Mode] def getmodebase(mode: _Mode) -> Literal["L", "RGB"]: ... def getmodetype(mode: _Mode) -> Literal["L", "I", "F"]: ... -def getmodebandnames(mode: _Mode) -> Tuple[str, ...]: ... +def getmodebandnames(mode: _Mode) -> tuple[str, ...]: ... def getmodebands(mode: _Mode) -> int: ... def preinit() -> None: ... def init() -> None: ... @@ -99,24 +99,26 @@ class _E: def __add__(self, other) -> _E: ... def __mul__(self, other) -> _E: ... -_ImageState = Tuple[Dict[str, Any], str, Tuple[int, int], Any, bytes] +_ImageState = tuple[dict[str, Any], str, tuple[int, int], Any, bytes] class Image: - format: Any - format_description: Any + format: ClassVar[str | None] + format_description: ClassVar[str | None] im: Any mode: str palette: Any info: dict[Any, Any] readonly: int pyaccess: Any + is_animated: bool # not present on all Image objects + n_frames: int # not present on all Image objects @property def width(self) -> int: ... @property def height(self) -> int: ... @property def size(self) -> tuple[int, int]: ... - def __enter__(self) -> Image: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... def close(self) -> None: ... def __eq__(self, other: object) -> bool: ... @@ -149,7 +151,7 @@ class Image: def crop(self, box: _Box | None = ...) -> Image: ... def draft(self, mode: str, size: _Size) -> None: ... def filter(self, filter: Filter | Callable[[], Filter]) -> Image: ... - def getbands(self) -> Tuple[str, ...]: ... + def getbands(self) -> tuple[str, ...]: ... def getbbox(self) -> tuple[int, int, int, int] | None: ... def getcolors(self, maxcolors: int = ...) -> list[tuple[int, int]]: ... def getdata(self, band: int | None = ...): ... @@ -196,8 +198,8 @@ class Image: **params: Any, ) -> None: ... def seek(self, frame: int) -> None: ... - def show(self, title: str | None = ..., command: str | None = ...) -> None: ... - def split(self) -> Tuple[Image, ...]: ... + def show(self, title: str | None = ...) -> None: ... + def split(self) -> tuple[Image, ...]: ... def getchannel(self, channel: int | str) -> Image: ... def tell(self) -> int: ... def thumbnail(self, size: tuple[int, int], resample: _Resample = ..., reducing_gap: float = ...) -> None: ... @@ -209,7 +211,7 @@ class Image: resample: _Resample = ..., fill: int = ..., fillcolor: _Color | int | None = ..., - ) -> None: ... + ) -> Image: ... def transpose(self, method: Literal[0, 1, 2, 3, 4, 5, 6]) -> Image: ... def effect_spread(self, distance: int) -> Image: ... def toqimage(self): ... @@ -218,14 +220,14 @@ class Image: class ImagePointHandler: ... class ImageTransformHandler: ... -def new(mode: _Mode, size: tuple[int, int], color: float | Tuple[float, ...] | str = ...) -> Image: ... +def new(mode: _Mode, size: tuple[int, int], color: float | tuple[float, ...] | str = ...) -> Image: ... def frombytes(mode: _Mode, size: tuple[int, int], data, decoder_name: str = ..., *args) -> Image: ... def frombuffer(mode: _Mode, size: tuple[int, int], data, decoder_name: str = ..., *args) -> Image: ... def fromarray(obj, mode: _Mode | None = ...) -> Image: ... def fromqimage(im) -> Image: ... def fromqpixmap(im) -> Image: ... def open( - fp: str | bytes | Path | SupportsRead[bytes], mode: Literal["r"] = ..., formats: list[str] | tuple[str] | None = ... + fp: str | bytes | Path | SupportsRead[bytes], mode: Literal["r"] = ..., formats: list[str] | tuple[str, ...] | None = ... ) -> Image: ... def alpha_composite(im1: Image, im2: Image) -> Image: ... def blend(im1: Image, im2: Image, alpha: float) -> Image: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageColor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageColor.pyi index d23db527d996..79aaba0ba7c1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageColor.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageColor.pyi @@ -1,6 +1,10 @@ -from typing import Any +from typing import Union -def getrgb(color): ... -def getcolor(color, mode): ... +_RGB = Union[tuple[int, int, int], tuple[int, int, int, int]] +_Ink = Union[str, int, _RGB] +_GreyScale = tuple[int, int] -colormap: Any +def getrgb(color: _Ink) -> _RGB: ... +def getcolor(color: _Ink, mode: str) -> _RGB | _GreyScale: ... + +colormap: dict[str, str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageDraw.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageDraw.pyi index b8f197368140..a5424aa829b1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageDraw.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageDraw.pyi @@ -1,12 +1,12 @@ from collections.abc import Container -from typing import Any, Sequence, Tuple, Union, overload +from typing import Any, Sequence, Union, overload from typing_extensions import Literal from .Image import Image +from .ImageColor import _Ink from .ImageFont import _Font -_Ink = Union[str, int, Tuple[int, int, int]] -_XY = Sequence[Union[float, Tuple[float, float]]] +_XY = Sequence[Union[float, tuple[float, float]]] _Outline = Any class ImageDraw: @@ -30,7 +30,7 @@ class ImageDraw: width: float = ..., ) -> None: ... def point(self, xy: _XY, fill: _Ink | None = ...) -> None: ... - def polygon(self, xy: _XY, fill: _Ink | None = ..., outline: _Ink | None = ...) -> None: ... + def polygon(self, xy: _XY, fill: _Ink | None = ..., outline: _Ink | None = ..., width: float = ...) -> None: ... def regular_polygon( self, bounding_circle: tuple[float, float] | tuple[float, float, float] | list[int], diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFile.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFile.pyi index 745509b49699..44560903ab78 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFile.pyi @@ -1,16 +1,14 @@ -from typing import Any, NoReturn, TypeVar +from _typeshed import Self +from typing import Any, NoReturn from .Image import Image -_T = TypeVar("_T") - MAXBLOCK: int SAFEBLOCK: Any LOAD_TRUNCATED_IMAGES: bool ERRORS: Any def raise_oserror(error) -> NoReturn: ... -def raise_ioerror(error) -> NoReturn: ... class ImageFile(Image): custom_mimetype: Any @@ -42,7 +40,7 @@ class Parser: def reset(self) -> None: ... decode: Any def feed(self, data) -> None: ... - def __enter__(self: _T) -> _T: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... def close(self) -> Image: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFilter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFilter.pyi index af03bce671dd..6ec69d53bbcb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFilter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageFilter.pyi @@ -1,10 +1,10 @@ from _typeshed import Self -from typing import Any, Callable, Iterable, Sequence, Tuple, Type +from typing import Any, Callable, Iterable, Sequence from typing_extensions import Literal from .Image import Image -_FilterArgs = Tuple[Sequence[int], int, int, Sequence[int]] +_FilterArgs = tuple[Sequence[int], int, int, Sequence[int]] # filter image parameters below are the C images, i.e. Image().im. @@ -121,7 +121,7 @@ class Color3DLUT(MultibandFilter): ) -> None: ... @classmethod def generate( - cls: Type[Self], + cls: type[Self], size: int | tuple[int, int, int], callback: Callable[[float, float, float], Iterable[float]], channels: int = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageOps.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageOps.pyi index bff43f61bb18..2c84e4dc6ba1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageOps.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageOps.pyi @@ -1,6 +1,9 @@ -from typing import Any, Iterable, Protocol +from typing import Any, Iterable, Protocol, Union from .Image import Image, _Resample, _Size +from .ImageColor import _Ink + +_Border = Union[int, tuple[int, int], tuple[int, int, int, int]] class _Deformer(Protocol): def getmesh(self, image: Image): ... @@ -21,11 +24,11 @@ def contain(image: Image, size: _Size, method: _Resample = ...) -> Image: ... def pad( image: Image, size: _Size, method: _Resample = ..., color: Any | None = ..., centering: Iterable[float] = ... ) -> Image: ... -def crop(image: Image, border: int = ...) -> Image: ... +def crop(image: Image, border: _Border = ...) -> Image: ... def scale(image: Image, factor: float, resample: _Resample = ...) -> Image: ... def deform(image: Image, deformer: _Deformer, resample: _Resample = ...) -> Image: ... def equalize(image: Image, mask: Any | None = ...) -> Image: ... -def expand(image: Image, border: int = ..., fill: int = ...) -> Image: ... +def expand(image: Image, border: _Border = ..., fill: _Ink = ...) -> Image: ... def fit(image: Image, size: _Size, method: _Resample = ..., bleed: float = ..., centering: Iterable[float] = ...) -> Image: ... def flip(image: Image) -> Image: ... def grayscale(image: Image) -> Image: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageQt.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageQt.pyi index 8b213852f200..b223bba1455d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageQt.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageQt.pyi @@ -15,7 +15,7 @@ def fromqimage(im: Image | QImage) -> Image: ... def fromqpixmap(im: Image | QImage) -> Image: ... def align8to32(bytes: bytes, width: int, mode: Literal["1", "L", "P"]) -> bytes: ... -class ImageQt(QImage): # type: ignore +class ImageQt(QImage): def __init__(self, im: Image) -> None: ... def toqimage(im: Image) -> ImageQt: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageShow.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageShow.pyi index 27daf3fb46c3..917de83d75a7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageShow.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImageShow.pyi @@ -1,4 +1,5 @@ from typing import Any +from typing_extensions import Literal def register(viewer, order: int = ...) -> None: ... def show(image, title: Any | None = ..., **options): ... @@ -30,8 +31,11 @@ class UnixViewer(Viewer): def get_command(self, file, **options): ... def show_file(self, file, **options): ... +class XDGViewer(UnixViewer): + def get_command_ex(self, file, **options) -> tuple[Literal["xdg-open"], Literal["xdg-open"]]: ... + class DisplayViewer(UnixViewer): - def get_command_ex(self, file, **options): ... + def get_command_ex(self, file, title: str | None = ..., **options): ... class GmDisplayViewer(UnixViewer): def get_command_ex(self, file, **options): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImtImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImtImagePlugin.pyi index 567cbe42ef20..eadbef84fa25 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImtImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/ImtImagePlugin.pyi @@ -1,9 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile field: Any class ImtImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["IMT"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IptcImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IptcImagePlugin.pyi index dac96b3b5c6a..738caa991214 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IptcImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/IptcImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -9,8 +10,8 @@ def i(c): ... def dump(c) -> None: ... class IptcImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["IPTC"]] + format_description: ClassVar[str] def getint(self, key): ... def field(self): ... im: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi index 72907813434a..a37d618f97f1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/Jpeg2KImagePlugin.pyi @@ -1,10 +1,11 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile class Jpeg2KImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["JPEG2000"]] + format_description: ClassVar[str] reduce: Any tile: Any def load(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/JpegImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/JpegImagePlugin.pyi index 0941106925f3..3e2fefd0a490 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/JpegImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/JpegImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -11,8 +12,8 @@ def DQT(self, marker) -> None: ... MARKER: Any class JpegImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["JPEG", "MPO"]] + format_description: ClassVar[str] def load_read(self, read_bytes): ... mode: Any tile: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/McIdasImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/McIdasImagePlugin.pyi index c492fe2ac7c0..6e7be9913b5a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/McIdasImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/McIdasImagePlugin.pyi @@ -1,5 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile class McIdasImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["MCIDAS"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MicImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MicImagePlugin.pyi index 48fe3c7509e0..b4fdc9d05317 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MicImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MicImagePlugin.pyi @@ -1,10 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .TiffImagePlugin import TiffImageFile class MicImageFile(TiffImageFile): - format: str - format_description: str + format: ClassVar[Literal["MIC"]] fp: Any frame: Any def seek(self, frame) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpegImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpegImagePlugin.pyi index 4cadacb07b02..0f01f2b7ba37 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpegImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpegImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -13,5 +14,5 @@ class BitStream: def read(self, bits): ... class MpegImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["MPEG"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpoImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpoImagePlugin.pyi index b89dfa2e7b3a..2096c466cb6c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpoImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MpoImagePlugin.pyi @@ -1,10 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .JpegImagePlugin import JpegImageFile class MpoImageFile(JpegImageFile): - format: str - format_description: str + format: ClassVar[Literal["MPO"]] def load_seek(self, pos) -> None: ... fp: Any offset: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MspImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MspImagePlugin.pyi index 2f34b05597d4..bc3f25db4952 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MspImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/MspImagePlugin.pyi @@ -1,8 +1,11 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile, PyDecoder class MspImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["MSP"]] + format_description: ClassVar[str] class MspDecoder(PyDecoder): def decode(self, buffer): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcdImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcdImagePlugin.pyi index 0c147ca5e452..a5ea3dd00f33 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcdImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcdImagePlugin.pyi @@ -1,9 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile class PcdImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PCD"]] + format_description: ClassVar[str] im: Any def load_end(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcxImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcxImagePlugin.pyi index 8a47f72a8bed..b8916662a70b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcxImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PcxImagePlugin.pyi @@ -1,11 +1,12 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile logger: Any class PcxImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PCX", "DCX"]] + format_description: ClassVar[str] SAVE: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PdfParser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PdfParser.pyi index 4e484ae2a817..6cf70bc63668 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PdfParser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PdfParser.pyi @@ -1,5 +1,5 @@ import collections -from typing import Any, List +from typing import Any def encode_text(s: str) -> bytes: ... @@ -44,7 +44,7 @@ class PdfName: allowed_chars: Any def __bytes__(self): ... -class PdfArray(List[Any]): +class PdfArray(list[Any]): def __bytes__(self): ... class PdfDict(collections.UserDict): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PixarImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PixarImagePlugin.pyi index 31890c45d362..8c9bcaf2f608 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PixarImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PixarImagePlugin.pyi @@ -1,5 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile class PixarImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PIXAR"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PngImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PngImagePlugin.pyi index 3126f6b23b26..b2033e678889 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PngImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PngImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from ._binary import o8 as o8 from .ImageFile import ImageFile @@ -75,8 +76,8 @@ class PngStream(ChunkStream): def chunk_fdAT(self, pos, length): ... class PngImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PNG"]] + format_description: ClassVar[str] @property def text(self): ... fp: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PpmImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PpmImagePlugin.pyi index 2adee46f2c62..797bbd79b043 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PpmImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PpmImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -6,5 +7,5 @@ b_whitespace: bytes MODES: Any class PpmImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PPM"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PsdImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PsdImagePlugin.pyi index 5a3d381fe61b..0d38350117cd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PsdImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/PsdImagePlugin.pyi @@ -1,12 +1,13 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile MODES: Any class PsdImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["PSD"]] + format_description: ClassVar[str] mode: Any tile: Any frame: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SgiImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SgiImagePlugin.pyi index 2d498dbd28da..e7f4ff96e1b7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SgiImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SgiImagePlugin.pyi @@ -1,12 +1,13 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile, PyDecoder MODES: Any class SgiImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["SGI"]] + format_description: ClassVar[str] class SGI16Decoder(PyDecoder): def decode(self, buffer): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SpiderImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SpiderImagePlugin.pyi index 8a3e213c0396..17f5e6d3a4d7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SpiderImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SpiderImagePlugin.pyi @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, ClassVar from typing_extensions import Literal from .ImageFile import ImageFile @@ -11,8 +11,8 @@ def isSpiderHeader(t): ... def isSpiderImage(filename): ... class SpiderImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["SPIDER"]] + format_description: ClassVar[str] @property def n_frames(self): ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SunImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SunImagePlugin.pyi index 731c7884610a..27ac28a588ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SunImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/SunImagePlugin.pyi @@ -1,5 +1,8 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile class SunImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["SUN"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TgaImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TgaImagePlugin.pyi index 3072da3019a4..5022107b6676 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TgaImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TgaImagePlugin.pyi @@ -1,11 +1,12 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile MODES: Any class TgaImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["TGA"]] + format_description: ClassVar[str] SAVE: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffImagePlugin.pyi index 5eaaa7b6f734..7fb5cb47fac0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffImagePlugin.pyi @@ -1,6 +1,7 @@ from collections.abc import MutableMapping from numbers import Rational -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile @@ -136,8 +137,8 @@ class ImageFileDirectory_v1(ImageFileDirectory_v2): ImageFileDirectory = ImageFileDirectory_v1 class TiffImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["TIFF", "MIC"]] + format_description: ClassVar[str] tag_v2: Any tag: Any def __init__(self, fp: Any | None = ..., filename: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffTags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffTags.pyi index 5559e169bd5c..64ede121bfb6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffTags.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/TiffTags.pyi @@ -1,4 +1,4 @@ -from typing import Any, Dict, NamedTuple, Tuple, Union +from typing import Any, NamedTuple, Union from typing_extensions import Literal class _TagInfo(NamedTuple): @@ -36,7 +36,7 @@ DOUBLE: Literal[12] IFD: Literal[13] _TagType = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] -_TagTuple = Union[Tuple[str, _TagType, int], Tuple[str, _TagInfo, int, Dict[str, int]]] +_TagTuple = Union[tuple[str, _TagType, int], tuple[str, _TagInfo, int, dict[str, int]]] TAGS_V2: dict[int, _TagTuple] TAGS_V2_GROUPS: dict[int, dict[int, _TagTuple]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WalImageFile.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WalImageFile.pyi index 6355032c853c..24d1cb5cc35f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WalImageFile.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WalImageFile.pyi @@ -1,3 +1,13 @@ +from typing import ClassVar +from typing_extensions import Literal + +from . import ImageFile + +class WalImageFile(ImageFile.ImageFile): + format: ClassVar[Literal["WAL"]] + format_description: ClassVar[str] + def load(self) -> None: ... + def open(filename): ... quake2palette: bytes diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WebPImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WebPImagePlugin.pyi index 97614e6d03f1..f7b7e6c5d0a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WebPImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WebPImagePlugin.pyi @@ -1,12 +1,13 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile SUPPORTED: bool class WebPImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["WEBP"]] + format_description: ClassVar[str] def seek(self, frame) -> None: ... fp: Any tile: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WmfImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WmfImagePlugin.pyi index b7244948f03a..5f7c4d4ece81 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WmfImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/WmfImagePlugin.pyi @@ -1,4 +1,5 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import StubImageFile @@ -10,6 +11,6 @@ class WmfHandler: def load(self, im): ... class WmfStubImageFile(StubImageFile): - format: str - format_description: str + format: ClassVar[Literal["WMF"]] + format_description: ClassVar[str] def load(self, dpi: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XVThumbImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XVThumbImagePlugin.pyi index 3d098da18a06..ebfc7d98d992 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XVThumbImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XVThumbImagePlugin.pyi @@ -1,7 +1,10 @@ +from typing import ClassVar +from typing_extensions import Literal + from .ImageFile import ImageFile PALETTE: bytes class XVThumbImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["XVThumb"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XbmImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XbmImagePlugin.pyi index f8305c04a35a..588aaac2d275 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XbmImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XbmImagePlugin.pyi @@ -1,9 +1,10 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile xbm_head: Any class XbmImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["XBM"]] + format_description: ClassVar[str] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XpmImagePlugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XpmImagePlugin.pyi index 600a9e0cd67c..d0df39a6e263 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XpmImagePlugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/XpmImagePlugin.pyi @@ -1,10 +1,11 @@ -from typing import Any +from typing import Any, ClassVar +from typing_extensions import Literal from .ImageFile import ImageFile xpm_head: Any class XpmImageFile(ImageFile): - format: str - format_description: str + format: ClassVar[Literal["XPM"]] + format_description: ClassVar[str] def load_read(self, bytes): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/METADATA.toml index f339dc7fdf26..f3e83f9c456b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/METADATA.toml @@ -1,2 +1 @@ -version = "1.0" -python2 = true +version = "1.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/__init__.pyi index 6cfbf75b685d..c501ab39a9a6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/__init__.pyi @@ -1,5 +1,4 @@ import sys -from typing import FrozenSet, Tuple from .connections import Connection as Connection from .constants import FIELD_TYPE as FIELD_TYPE @@ -30,7 +29,7 @@ threadsafety: int apilevel: str paramstyle: str -class DBAPISet(FrozenSet[int]): +class DBAPISet(frozenset[int]): def __ne__(self, other) -> bool: ... def __eq__(self, other) -> bool: ... def __hash__(self) -> int: ... @@ -53,7 +52,7 @@ else: def get_client_info() -> str: ... __version__: str -version_info: Tuple[int, int, int, str, int] +version_info: tuple[int, int, int, str, int] NULL: str # pymysql/__init__.py says "Connect = connect = Connection = connections.Connection" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/connections.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/connections.pyi index f30e0366167a..2b2b49d0a509 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/connections.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/connections.pyi @@ -1,5 +1,6 @@ +from _typeshed import Self from socket import socket as _socket -from typing import Any, AnyStr, Generic, Mapping, Tuple, Type, TypeVar, overload +from typing import Any, AnyStr, Generic, Mapping, TypeVar, overload from .charset import charset_by_id as charset_by_id, charset_by_name as charset_by_name from .constants import CLIENT as CLIENT, COMMAND as COMMAND, FIELD_TYPE as FIELD_TYPE, SERVER_STATUS as SERVER_STATUS @@ -35,7 +36,7 @@ class MysqlPacket: def read_uint64(self) -> Any: ... def read_length_encoded_integer(self) -> int: ... def read_length_coded_string(self) -> bytes: ... - def read_struct(self, fmt: str) -> Tuple[Any, ...]: ... + def read_struct(self, fmt: str) -> tuple[Any, ...]: ... def is_ok_packet(self) -> bool: ... def is_eof_packet(self) -> bool: ... def is_auth_switch_request(self) -> bool: ... @@ -133,7 +134,7 @@ class Connection(Generic[_C]): conv=..., use_unicode: bool | None = ..., client_flag: int = ..., - cursorclass: Type[_C] = ..., # different between overloads + cursorclass: type[_C] = ..., # different between overloads init_command: Any | None = ..., connect_timeout: int | None = ..., ssl: Mapping[Any, Any] | None = ..., @@ -178,7 +179,7 @@ class Connection(Generic[_C]): @overload def cursor(self, cursor: None = ...) -> _C: ... @overload - def cursor(self, cursor: Type[_C2]) -> _C2: ... + def cursor(self, cursor: type[_C2]) -> _C2: ... def query(self, sql, unbuffered: bool = ...) -> int: ... def next_result(self, unbuffered: bool = ...) -> int: ... def affected_rows(self): ... @@ -195,6 +196,8 @@ class Connection(Generic[_C]): def get_proto_info(self): ... def get_server_info(self): ... def show_warnings(self): ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, *exc_info: object) -> None: ... Warning: Any Error: Any InterfaceError: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/converters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/converters.pyi index 01a256158d1c..f3d73fc3ba4d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/converters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/converters.pyi @@ -1,47 +1,39 @@ -from typing import Any - -from .charset import charset_by_id as charset_by_id -from .constants import FIELD_TYPE as FIELD_TYPE, FLAG as FLAG - -PYTHON3: Any -ESCAPE_REGEX: Any -ESCAPE_MAP: Any - -def escape_item(val, charset): ... -def escape_dict(val, charset): ... -def escape_sequence(val, charset): ... -def escape_set(val, charset): ... -def escape_bool(value): ... -def escape_object(value): ... - -escape_int: Any - -escape_long: Any - -def escape_float(value): ... -def escape_string(value): ... -def escape_unicode(value): ... -def escape_None(value): ... -def escape_timedelta(obj): ... -def escape_time(obj): ... -def escape_datetime(obj): ... -def escape_date(obj): ... -def escape_struct_time(obj): ... -def convert_datetime(connection, field, obj): ... -def convert_timedelta(connection, field, obj): ... -def convert_time(connection, field, obj): ... -def convert_date(connection, field, obj): ... -def convert_mysql_timestamp(connection, field, timestamp): ... -def convert_set(s): ... -def convert_bit(connection, field, b): ... -def convert_characters(connection, field, data): ... -def convert_int(connection, field, data): ... -def convert_long(connection, field, data): ... -def convert_float(connection, field, data): ... - -encoders: Any -decoders: Any -conversions: Any - -def convert_decimal(connection, field, data): ... -def escape_decimal(obj): ... +import datetime +import time +from collections.abc import Callable, Mapping, Sequence +from decimal import Decimal +from typing import Any, Optional, TypeVar + +_EscaperMapping = Optional[Mapping[type[object], Callable[..., str]]] +_T = TypeVar("_T") + +def escape_item(val: object, charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_dict(val: Mapping[str, object], charset: object, mapping: _EscaperMapping = ...) -> dict[str, str]: ... +def escape_sequence(val: Sequence[object], charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_set(val: set[object], charset: object, mapping: _EscaperMapping = ...) -> str: ... +def escape_bool(value: bool, mapping: _EscaperMapping = ...) -> str: ... +def escape_int(value: int, mapping: _EscaperMapping = ...) -> str: ... +def escape_float(value: float, mapping: _EscaperMapping = ...) -> str: ... +def escape_string(value: str, mapping: _EscaperMapping = ...) -> str: ... +def escape_bytes_prefixed(value: bytes, mapping: _EscaperMapping = ...) -> str: ... +def escape_bytes(value: bytes, mapping: _EscaperMapping = ...) -> str: ... +def escape_str(value: str, mapping: _EscaperMapping = ...) -> str: ... +def escape_None(value: None, mapping: _EscaperMapping = ...) -> str: ... +def escape_timedelta(obj: datetime.timedelta, mapping: _EscaperMapping = ...) -> str: ... +def escape_time(obj: datetime.time, mapping: _EscaperMapping = ...) -> str: ... +def escape_datetime(obj: datetime.datetime, mapping: _EscaperMapping = ...) -> str: ... +def escape_date(obj: datetime.date, mapping: _EscaperMapping = ...) -> str: ... +def escape_struct_time(obj: time.struct_time, mapping: _EscaperMapping = ...) -> str: ... +def Decimal2Literal(o: Decimal, d: object) -> str: ... +def convert_datetime(obj: str | bytes) -> datetime.datetime | str: ... +def convert_timedelta(obj: str | bytes) -> datetime.timedelta | str: ... +def convert_time(obj: str | bytes) -> datetime.time | str: ... +def convert_date(obj: str | bytes) -> datetime.date | str: ... +def through(x: _T) -> _T: ... + +convert_bit = through + +encoders: dict[type[object], Callable[..., str]] +decoders: dict[int, Callable[[str | bytes], Any]] +conversions: dict[type[object] | int, Callable[..., Any]] +Thing2Literal = escape_str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi index b2d1ffb74241..23911f0d54a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/cursors.pyi @@ -1,12 +1,11 @@ -from typing import Any, Iterable, Iterator, Text, Tuple, TypeVar +from _typeshed import Self +from typing import Any, Iterable, Iterator, Text from .connections import Connection -_SelfT = TypeVar("_SelfT") - class Cursor: connection: Connection[Any] - description: Tuple[Text, ...] + description: tuple[Text, ...] rownumber: int rowcount: int arraysize: int @@ -24,27 +23,27 @@ class Cursor: def executemany(self, query: Text, args: Iterable[object]) -> int | None: ... def callproc(self, procname: Text, args: Iterable[Any] = ...) -> Any: ... def scroll(self, value: int, mode: Text = ...) -> None: ... - def __enter__(self: _SelfT) -> _SelfT: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *exc_info: Any) -> None: ... # Methods returning result tuples are below. - def fetchone(self) -> Tuple[Any, ...] | None: ... - def fetchmany(self, size: int | None = ...) -> Tuple[Tuple[Any, ...], ...]: ... - def fetchall(self) -> Tuple[Tuple[Any, ...], ...]: ... - def __iter__(self) -> Iterator[Tuple[Any, ...]]: ... + def fetchone(self) -> tuple[Any, ...] | None: ... + def fetchmany(self, size: int | None = ...) -> tuple[tuple[Any, ...], ...]: ... + def fetchall(self) -> tuple[tuple[Any, ...], ...]: ... + def __iter__(self) -> Iterator[tuple[Any, ...]]: ... class DictCursorMixin: dict_type: Any # TODO: add support if someone needs this def fetchone(self) -> dict[Text, Any] | None: ... - def fetchmany(self, size: int | None = ...) -> Tuple[dict[Text, Any], ...]: ... - def fetchall(self) -> Tuple[dict[Text, Any], ...]: ... + def fetchmany(self, size: int | None = ...) -> tuple[dict[Text, Any], ...]: ... + def fetchall(self) -> tuple[dict[Text, Any], ...]: ... def __iter__(self) -> Iterator[dict[Text, Any]]: ... class SSCursor(Cursor): - def fetchall(self) -> list[Tuple[Any, ...]]: ... # type: ignore - def fetchall_unbuffered(self) -> Iterator[Tuple[Any, ...]]: ... + def fetchall(self) -> list[tuple[Any, ...]]: ... # type: ignore[override] + def fetchall_unbuffered(self) -> Iterator[tuple[Any, ...]]: ... def scroll(self, value: int, mode: Text = ...) -> None: ... -class DictCursor(DictCursorMixin, Cursor): ... # type: ignore +class DictCursor(DictCursorMixin, Cursor): ... # type: ignore[misc] -class SSDictCursor(DictCursorMixin, SSCursor): # type: ignore - def fetchall_unbuffered(self) -> Iterator[dict[Text, Any]]: ... # type: ignore +class SSDictCursor(DictCursorMixin, SSCursor): # type: ignore[misc] + def fetchall_unbuffered(self) -> Iterator[dict[Text, Any]]: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/err.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/err.pyi index 2a13b1d893df..8aec38f533f2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/err.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyMySQL/pymysql/err.pyi @@ -1,5 +1,5 @@ import builtins -from typing import NoReturn, Type +from typing import NoReturn from .constants import ER as ER @@ -15,6 +15,6 @@ class InternalError(DatabaseError): ... class ProgrammingError(DatabaseError): ... class NotSupportedError(DatabaseError): ... -error_map: dict[int, Type[DatabaseError]] +error_map: dict[int, type[DatabaseError]] def raise_mysql_exception(data) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/METADATA.toml index c5aa7383b514..39a61569b3d7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/METADATA.toml @@ -1,2 +1 @@ -version = "5.4" -python2 = true +version = "6.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/__init__.pyi index 17807a368459..7d28aca00527 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/__init__.pyi @@ -1,24 +1,21 @@ -import sys -from typing import IO, Any, Callable, Iterable, Iterator, Pattern, Sequence, Text, Type, TypeVar, Union, overload - -from yaml.constructor import BaseConstructor -from yaml.dumper import * # noqa: F403 -from yaml.error import * # noqa: F403 -from yaml.events import * # noqa: F403 -from yaml.loader import * # noqa: F403 -from yaml.nodes import * # noqa: F403 -from yaml.representer import BaseRepresenter -from yaml.resolver import BaseResolver -from yaml.tokens import * # noqa: F403 +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from typing import Any, Pattern, TypeVar, overload from . import resolver as resolver # Help mypy a bit; this is implied by loader and dumper +from .constructor import BaseConstructor from .cyaml import * +from .dumper import * +from .emitter import _WriteStream +from .error import * +from .events import * +from .loader import * +from .nodes import * +from .reader import _ReadStream +from .representer import BaseRepresenter +from .resolver import BaseResolver +from .tokens import * -if sys.version_info >= (3, 0): - _Str = str -else: - _Str = Union[Text, str] -# FIXME: the functions really return py2:unicode/py3:str if encoding is None, otherwise py2:str/py3:bytes. Waiting for python/mypy#5621 +# FIXME: the functions really return str if encoding is None, otherwise bytes. Waiting for python/mypy#5621 _Yaml = Any __with_libyaml__: Any @@ -28,67 +25,77 @@ _T = TypeVar("_T") _Constructor = TypeVar("_Constructor", bound=BaseConstructor) _Representer = TypeVar("_Representer", bound=BaseRepresenter) +def warnings(settings=...): ... def scan(stream, Loader=...): ... def parse(stream, Loader=...): ... def compose(stream, Loader=...): ... def compose_all(stream, Loader=...): ... -def load(stream: bytes | IO[bytes] | Text | IO[Text], Loader=...) -> Any: ... -def load_all(stream: bytes | IO[bytes] | Text | IO[Text], Loader=...) -> Iterator[Any]: ... -def full_load(stream: bytes | IO[bytes] | Text | IO[Text]) -> Any: ... -def full_load_all(stream: bytes | IO[bytes] | Text | IO[Text]) -> Iterator[Any]: ... -def safe_load(stream: bytes | IO[bytes] | Text | IO[Text]) -> Any: ... -def safe_load_all(stream: bytes | IO[bytes] | Text | IO[Text]) -> Iterator[Any]: ... -def unsafe_load(stream: bytes | IO[bytes] | Text | IO[Text]) -> Any: ... -def unsafe_load_all(stream: bytes | IO[bytes] | Text | IO[Text]) -> Iterator[Any]: ... -def emit(events, stream=..., Dumper=..., canonical=..., indent=..., width=..., allow_unicode=..., line_break=...): ... +def load(stream: _ReadStream, Loader) -> Any: ... +def load_all(stream: _ReadStream, Loader) -> Iterator[Any]: ... +def full_load(stream: _ReadStream) -> Any: ... +def full_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def safe_load(stream: _ReadStream) -> Any: ... +def safe_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def unsafe_load(stream: _ReadStream) -> Any: ... +def unsafe_load_all(stream: _ReadStream) -> Iterator[Any]: ... +def emit( + events, + stream: _WriteStream[Any] | None = ..., + Dumper=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., +): ... @overload def serialize_all( nodes, - stream: IO[str], + stream: _WriteStream[Any], Dumper=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., ) -> None: ... @overload def serialize_all( nodes, stream: None = ..., Dumper=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., ) -> _Yaml: ... @overload def serialize( node, - stream: IO[str], + stream: _WriteStream[Any], Dumper=..., *, - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., ) -> None: ... @overload def serialize( @@ -96,34 +103,34 @@ def serialize( stream: None = ..., Dumper=..., *, - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., ) -> _Yaml: ... @overload def dump_all( documents: Sequence[Any], - stream: IO[str], + stream: _WriteStream[Any], Dumper=..., - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... @overload @@ -131,38 +138,38 @@ def dump_all( documents: Sequence[Any], stream: None = ..., Dumper=..., - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> _Yaml: ... @overload def dump( data: Any, - stream: IO[str], + stream: _WriteStream[Any], Dumper=..., *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... @overload @@ -171,37 +178,37 @@ def dump( stream: None = ..., Dumper=..., *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> _Yaml: ... @overload def safe_dump_all( documents: Sequence[Any], - stream: IO[str], + stream: _WriteStream[Any], *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... @overload @@ -209,37 +216,37 @@ def safe_dump_all( documents: Sequence[Any], stream: None = ..., *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> _Yaml: ... @overload def safe_dump( data: Any, - stream: IO[str], + stream: _WriteStream[Any], *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... @overload @@ -247,61 +254,61 @@ def safe_dump( data: Any, stream: None = ..., *, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding: _Str | None = ..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> _Yaml: ... def add_implicit_resolver( - tag: _Str, + tag: str, regexp: Pattern[str], first: Iterable[Any] | None = ..., - Loader: Type[BaseResolver] | None = ..., - Dumper: Type[BaseResolver] = ..., + Loader: type[BaseResolver] | None = ..., + Dumper: type[BaseResolver] = ..., ) -> None: ... def add_path_resolver( - tag: _Str, + tag: str, path: Iterable[Any], - kind: Type[Any] | None = ..., - Loader: Type[BaseResolver] | None = ..., - Dumper: Type[BaseResolver] = ..., + kind: type[Any] | None = ..., + Loader: type[BaseResolver] | None = ..., + Dumper: type[BaseResolver] = ..., ) -> None: ... @overload def add_constructor( - tag: _Str, constructor: Callable[[Loader | FullLoader | UnsafeLoader, Node], Any], Loader: None = ... + tag: str, constructor: Callable[[Loader | FullLoader | UnsafeLoader, Node], Any], Loader: None = ... ) -> None: ... @overload -def add_constructor(tag: _Str, constructor: Callable[[_Constructor, Node], Any], Loader: Type[_Constructor]) -> None: ... +def add_constructor(tag: str, constructor: Callable[[_Constructor, Node], Any], Loader: type[_Constructor]) -> None: ... @overload def add_multi_constructor( - tag_prefix: _Str, multi_constructor: Callable[[Loader | FullLoader | UnsafeLoader, _Str, Node], Any], Loader: None = ... + tag_prefix: str, multi_constructor: Callable[[Loader | FullLoader | UnsafeLoader, str, Node], Any], Loader: None = ... ) -> None: ... @overload def add_multi_constructor( - tag_prefix: _Str, multi_constructor: Callable[[_Constructor, _Str, Node], Any], Loader: Type[_Constructor] + tag_prefix: str, multi_constructor: Callable[[_Constructor, str, Node], Any], Loader: type[_Constructor] ) -> None: ... @overload -def add_representer(data_type: Type[_T], representer: Callable[[Dumper, _T], Node]) -> None: ... +def add_representer(data_type: type[_T], representer: Callable[[Dumper, _T], Node]) -> None: ... @overload -def add_representer(data_type: Type[_T], representer: Callable[[_Representer, _T], Node], Dumper: Type[_Representer]) -> None: ... +def add_representer(data_type: type[_T], representer: Callable[[_Representer, _T], Node], Dumper: type[_Representer]) -> None: ... @overload -def add_multi_representer(data_type: Type[_T], multi_representer: Callable[[Dumper, _T], Node]) -> None: ... +def add_multi_representer(data_type: type[_T], multi_representer: Callable[[Dumper, _T], Node]) -> None: ... @overload def add_multi_representer( - data_type: Type[_T], multi_representer: Callable[[_Representer, _T], Node], Dumper: Type[_Representer] + data_type: type[_T], multi_representer: Callable[[_Representer, _T], Node], Dumper: type[_Representer] ) -> None: ... class YAMLObjectMetaclass(type): - def __init__(self, name, bases, kwds) -> None: ... + def __init__(cls, name, bases, kwds) -> None: ... class YAMLObject(metaclass=YAMLObjectMetaclass): yaml_loader: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/_yaml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/_yaml.pyi new file mode 100644 index 000000000000..5eacde576217 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/_yaml.pyi @@ -0,0 +1,56 @@ +from _typeshed import SupportsRead +from collections.abc import Mapping, Sequence +from typing import IO, Any + +from .events import Event +from .nodes import Node +from .tokens import Token + +def get_version_string() -> str: ... +def get_version() -> tuple[int, int, int]: ... + +class Mark: + name: Any + index: int + line: int + column: int + buffer: Any + pointer: Any + def __init__(self, name, index: int, line: int, column: int, buffer, pointer) -> None: ... + def get_snippet(self): ... + +class CParser: + def __init__(self, stream: str | bytes | SupportsRead[str | bytes]) -> None: ... + def dispose(self) -> None: ... + def get_token(self) -> Token | None: ... + def peek_token(self) -> Token | None: ... + def check_token(self, *choices) -> bool: ... + def get_event(self) -> Event | None: ... + def peek_event(self) -> Event | None: ... + def check_event(self, *choices) -> bool: ... + def check_node(self) -> bool: ... + def get_node(self) -> Node | None: ... + def get_single_node(self) -> Node | None: ... + def raw_parse(self) -> int: ... + def raw_scan(self) -> int: ... + +class CEmitter: + def __init__( + self, + stream: IO[Any], + canonical: Any | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: Any | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: Any | None = ..., + explicit_end: Any | None = ..., + version: Sequence[int] | None = ..., + tags: Mapping[str, str] | None = ..., + ) -> None: ... + def dispose(self) -> None: ... + def emit(self, event_object) -> None: ... + def open(self) -> None: ... + def close(self) -> None: ... + def serialize(self, node) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/constructor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/constructor.pyi index ef3d48f01407..e503cba6949a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/constructor.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/constructor.pyi @@ -1,10 +1,9 @@ -import sys -from typing import Any, Text, Union +from typing import Any, Pattern, Union from yaml.error import MarkedYAMLError from yaml.nodes import ScalarNode -_Scalar = Union[Text, int, float, bool, None] +_Scalar = Union[str, int, float, bool, None] class ConstructorError(MarkedYAMLError): ... @@ -17,6 +16,7 @@ class BaseConstructor: deep_construct: Any def __init__(self) -> None: ... def check_data(self): ... + def check_state_key(self, key: str) -> None: ... def get_data(self): ... def get_single_data(self) -> Any: ... def construct_document(self, node): ... @@ -54,6 +54,8 @@ class SafeConstructor(BaseConstructor): def construct_undefined(self, node): ... class FullConstructor(SafeConstructor): + def get_state_keys_blacklist(self) -> list[str]: ... + def get_state_keys_blacklist_regexp(self) -> Pattern[str]: ... def construct_python_str(self, node): ... def construct_python_unicode(self, node): ... def construct_python_bytes(self, node): ... @@ -65,7 +67,7 @@ class FullConstructor(SafeConstructor): def construct_python_name(self, suffix, node): ... def construct_python_module(self, suffix, node): ... def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=..., unsafe=...): ... - def set_python_instance_state(self, instance, state): ... + def set_python_instance_state(self, instance, state, unsafe: bool = ...) -> None: ... def construct_python_object(self, suffix, node): ... def construct_python_object_apply(self, suffix, node, newobj=...): ... def construct_python_object_new(self, suffix, node): ... @@ -86,8 +88,6 @@ class Constructor(SafeConstructor): def find_python_name(self, name, mark): ... def construct_python_name(self, suffix, node): ... def construct_python_module(self, suffix, node): ... - if sys.version_info < (3, 0): - class classobj: ... def make_python_instance(self, suffix, node, args=..., kwds=..., newobj=...): ... def set_python_instance_state(self, instance, state): ... def construct_python_object(self, suffix, node): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/cyaml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/cyaml.pyi index 6face45a13d9..d70b44148bec 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/cyaml.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/cyaml.pyi @@ -1,27 +1,15 @@ from _typeshed import SupportsRead -from typing import IO, Any, Mapping, Sequence, Text, Union +from collections.abc import Mapping, Sequence +from typing import IO, Any, Union -from yaml.constructor import BaseConstructor, FullConstructor, SafeConstructor, UnsafeConstructor -from yaml.events import Event -from yaml.nodes import Node -from yaml.representer import BaseRepresenter, SafeRepresenter -from yaml.resolver import BaseResolver, Resolver -from yaml.tokens import Token +from ._yaml import CEmitter, CParser +from .constructor import BaseConstructor, FullConstructor, SafeConstructor, UnsafeConstructor +from .representer import BaseRepresenter, SafeRepresenter +from .resolver import BaseResolver, Resolver -_Readable = SupportsRead[Union[Text, bytes]] +__all__ = ["CBaseLoader", "CSafeLoader", "CFullLoader", "CUnsafeLoader", "CLoader", "CBaseDumper", "CSafeDumper", "CDumper"] -class CParser: - def __init__(self, stream: str | bytes | _Readable) -> None: ... - def dispose(self) -> None: ... - def get_token(self) -> Token | None: ... - def peek_token(self) -> Token | None: ... - def check_token(self, *choices) -> bool: ... - def get_event(self) -> Event | None: ... - def peek_event(self) -> Event | None: ... - def check_event(self, *choices) -> bool: ... - def check_node(self) -> bool: ... - def get_node(self) -> Node | None: ... - def get_single_node(self) -> Node | None: ... +_Readable = SupportsRead[Union[str, bytes]] class CBaseLoader(CParser, BaseConstructor, BaseResolver): def __init__(self, stream: str | bytes | _Readable) -> None: ... @@ -38,40 +26,42 @@ class CFullLoader(CParser, FullConstructor, Resolver): class CUnsafeLoader(CParser, UnsafeConstructor, Resolver): def __init__(self, stream: str | bytes | _Readable) -> None: ... -class CEmitter(object): +class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): def __init__( self, stream: IO[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., canonical: Any | None = ..., indent: int | None = ..., width: int | None = ..., allow_unicode: Any | None = ..., line_break: str | None = ..., - encoding: Text | None = ..., + encoding: str | None = ..., explicit_start: Any | None = ..., explicit_end: Any | None = ..., version: Sequence[int] | None = ..., - tags: Mapping[Text, Text] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., ) -> None: ... -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): +class CDumper(CEmitter, SafeRepresenter, Resolver): def __init__( self, stream: IO[Any], default_style: str | None = ..., - default_flow_style: bool | None = ..., + default_flow_style: bool = ..., canonical: Any | None = ..., indent: int | None = ..., width: int | None = ..., allow_unicode: Any | None = ..., line_break: str | None = ..., - encoding: Text | None = ..., + encoding: str | None = ..., explicit_start: Any | None = ..., explicit_end: Any | None = ..., version: Sequence[int] | None = ..., - tags: Mapping[Text, Text] | None = ..., + tags: Mapping[str, str] | None = ..., + sort_keys: bool = ..., ) -> None: ... -class CDumper(CEmitter, SafeRepresenter, Resolver): ... - CSafeDumper = CDumper diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/dumper.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/dumper.pyi index b2ca83fc8701..040aa0fed558 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/dumper.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/dumper.pyi @@ -1,61 +1,66 @@ +from collections.abc import Mapping +from typing import Any + from yaml.emitter import Emitter from yaml.representer import BaseRepresenter, Representer, SafeRepresenter from yaml.resolver import BaseResolver, Resolver from yaml.serializer import Serializer +from .emitter import _WriteStream + class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): def __init__( self, - stream, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): def __init__( self, - stream, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... class Dumper(Emitter, Serializer, Representer, Resolver): def __init__( self, - stream, - default_style=..., - default_flow_style=..., - canonical=..., - indent=..., - width=..., - allow_unicode=..., - line_break=..., - encoding=..., - explicit_start=..., - explicit_end=..., - version=..., - tags=..., + stream: _WriteStream[Any], + default_style: str | None = ..., + default_flow_style: bool | None = ..., + canonical: bool | None = ..., + indent: int | None = ..., + width: int | None = ..., + allow_unicode: bool | None = ..., + line_break: str | None = ..., + encoding: str | None = ..., + explicit_start: bool | None = ..., + explicit_end: bool | None = ..., + version: tuple[int, int] | None = ..., + tags: Mapping[str, str] | None = ..., sort_keys: bool = ..., ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/emitter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/emitter.pyi index 9f44bd771c69..aaf3b02d5c3e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/emitter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/emitter.pyi @@ -1,7 +1,15 @@ -from typing import Any +from typing import Any, Protocol, TypeVar from yaml.error import YAMLError +_T_contra = TypeVar("_T_contra", str, bytes, contravariant=True) + +class _WriteStream(Protocol[_T_contra]): + def write(self, __data: _T_contra) -> object: ... + # Optional fields: + # encoding: str + # def flush(self) -> object: ... + class EmitterError(YAMLError): ... class ScalarAnalysis: @@ -19,7 +27,7 @@ class ScalarAnalysis: class Emitter: DEFAULT_TAG_PREFIXES: Any - stream: Any + stream: _WriteStream[Any] encoding: Any states: Any state: Any @@ -47,7 +55,9 @@ class Emitter: prepared_tag: Any analysis: Any style: Any - def __init__(self, stream, canonical=..., indent=..., width=..., allow_unicode=..., line_break=...) -> None: ... + def __init__( + self, stream: _WriteStream[Any], canonical=..., indent=..., width=..., allow_unicode=..., line_break=... + ) -> None: ... def dispose(self): ... def emit(self, event): ... def need_more_events(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/loader.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/loader.pyi index 3782cd365762..89dea9aaf2a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/loader.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/loader.pyi @@ -5,17 +5,19 @@ from yaml.reader import Reader from yaml.resolver import BaseResolver, Resolver from yaml.scanner import Scanner +from .reader import _ReadStream + class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... class FullLoader(Reader, Scanner, Parser, Composer, FullConstructor, Resolver): - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... class UnsafeLoader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/reader.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/reader.pyi index 18c3c7a9ab5b..06e9a0f42e29 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/reader.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/reader.pyi @@ -1,7 +1,10 @@ +from _typeshed import SupportsRead from typing import Any from yaml.error import YAMLError +_ReadStream = str | bytes | SupportsRead[str] | SupportsRead[bytes] + class ReaderError(YAMLError): name: Any character: Any @@ -12,7 +15,7 @@ class ReaderError(YAMLError): class Reader: name: Any - stream: Any + stream: SupportsRead[str] | SupportsRead[bytes] | None stream_pointer: Any eof: Any buffer: Any @@ -23,7 +26,7 @@ class Reader: index: Any line: Any column: Any - def __init__(self, stream) -> None: ... + def __init__(self, stream: _ReadStream) -> None: ... def peek(self, index=...): ... def prefix(self, length=...): ... def forward(self, length=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/representer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/representer.pyi index df4e9116f155..f9802d2df7e0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/representer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/PyYAML/yaml/representer.pyi @@ -1,8 +1,8 @@ import datetime -import sys from _typeshed import SupportsItems +from collections.abc import Callable, Iterable, Mapping from types import BuiltinFunctionType, FunctionType, ModuleType -from typing import Any, Callable, ClassVar, Iterable, Mapping, NoReturn, Tuple, Type, TypeVar +from typing import Any, ClassVar, NoReturn, TypeVar from yaml.error import YAMLError as YAMLError from yaml.nodes import MappingNode as MappingNode, Node as Node, ScalarNode as ScalarNode, SequenceNode as SequenceNode @@ -13,8 +13,8 @@ _R = TypeVar("_R", bound=BaseRepresenter) class RepresenterError(YAMLError): ... class BaseRepresenter: - yaml_representers: ClassVar[dict[Type[Any], Callable[[BaseRepresenter, Any], Node]]] - yaml_multi_representers: ClassVar[dict[Type[Any], Callable[[BaseRepresenter, Any], Node]]] + yaml_representers: ClassVar[dict[type[Any], Callable[[BaseRepresenter, Any], Node]]] + yaml_multi_representers: ClassVar[dict[type[Any], Callable[[BaseRepresenter, Any], Node]]] default_style: str | Any sort_keys: bool default_flow_style: bool @@ -24,16 +24,14 @@ class BaseRepresenter: def __init__(self, default_style: str | None = ..., default_flow_style: bool = ..., sort_keys: bool = ...) -> None: ... def represent(self, data) -> None: ... def represent_data(self, data) -> Node: ... - if sys.version_info < (3, 0): - def get_classobj_bases(self, cls): ... @classmethod - def add_representer(cls: Type[_R], data_type: Type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... @classmethod - def add_multi_representer(cls: Type[_R], data_type: Type[_T], representer: Callable[[_R, _T], Node]) -> None: ... + def add_multi_representer(cls: type[_R], data_type: type[_T], representer: Callable[[_R, _T], Node]) -> None: ... def represent_scalar(self, tag: str, value, style: str | None = ...) -> ScalarNode: ... def represent_sequence(self, tag: str, sequence: Iterable[Any], flow_style: bool | None = ...) -> SequenceNode: ... def represent_mapping( - self, tag: str, mapping: SupportsItems[Any, Any] | Iterable[Tuple[Any, Any]], flow_style: bool | None = ... + self, tag: str, mapping: SupportsItems[Any, Any] | Iterable[tuple[Any, Any]], flow_style: bool | None = ... ) -> MappingNode: ... def ignore_aliases(self, data) -> bool: ... @@ -42,15 +40,12 @@ class SafeRepresenter(BaseRepresenter): def ignore_aliases(self, data) -> bool: ... def represent_none(self, data) -> ScalarNode: ... def represent_str(self, data: str) -> ScalarNode: ... - if sys.version_info < (3, 0): - def represent_unicode(self, data): ... - def represent_long(self, data): ... def represent_binary(self, data: bytes) -> ScalarNode: ... def represent_bool(self, data: bool) -> ScalarNode: ... def represent_int(self, data: int) -> ScalarNode: ... def represent_float(self, data: float) -> ScalarNode: ... def represent_list(self, data: Iterable[Any]) -> SequenceNode: ... - def represent_dict(self, data: SupportsItems[Any, Any] | Iterable[Tuple[Any, Any]]) -> MappingNode: ... + def represent_dict(self, data: SupportsItems[Any, Any] | Iterable[tuple[Any, Any]]) -> MappingNode: ... def represent_set(self, data: Iterable[Any]) -> MappingNode: ... def represent_date(self, data: datetime.date) -> ScalarNode: ... def represent_datetime(self, data: datetime.datetime) -> ScalarNode: ... @@ -58,10 +53,6 @@ class SafeRepresenter(BaseRepresenter): def represent_undefined(self, data) -> NoReturn: ... class Representer(SafeRepresenter): - if sys.version_info < (3, 0): - def represent_unicode(self, data): ... - def represent_long(self, data): ... - def represent_instance(self, data): ... def represent_complex(self, data: complex) -> ScalarNode: ... def represent_tuple(self, data: Iterable[Any]) -> SequenceNode: ... def represent_name(self, data: BuiltinFunctionType | FunctionType) -> ScalarNode: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/METADATA.toml index efa8f9f7ef0a..7ebedc4cd8e1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.9" -requires = ["types-docutils"] +version = "2.9.*" +requires = ["types-docutils", "types-setuptools"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/__init__.pyi index 1b05e273a1d3..7a9621cf9b7f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/__init__.pyi @@ -1,5 +1,16 @@ -from typing import Any +from _typeshed import SupportsWrite +from typing import TypeVar, overload + +from pygments.formatter import Formatter + +_T = TypeVar("_T", str, bytes) def lex(code, lexer): ... -def format(tokens, formatter, outfile: Any | None = ...): ... -def highlight(code, lexer, formatter, outfile: Any | None = ...): ... +@overload +def format(tokens, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +@overload +def format(tokens, formatter: Formatter[_T], outfile: None = ...) -> _T: ... +@overload +def highlight(code, lexer, formatter: Formatter[_T], outfile: SupportsWrite[_T]) -> None: ... +@overload +def highlight(code, lexer, formatter: Formatter[_T], outfile: None = ...) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi index 15f60f43d57d..05325c8c63d2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/filters/__init__.pyi @@ -1,4 +1,4 @@ -from collections.abc import Iterable, Iterator +from collections.abc import Generator, Iterable, Iterator from typing import Any from pygments.filter import Filter @@ -7,7 +7,7 @@ from pygments.token import _TokenType def find_filter_class(filtername): ... def get_filter_by_name(filtername, **options): ... -def get_all_filters(): ... +def get_all_filters() -> Generator[str, None, None]: ... class CodeTagFilter(Filter): tag_re: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi index 1fbf82f19956..f441d487cf6b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatter.pyi @@ -1,6 +1,8 @@ -from typing import Any +from typing import Any, Generic, TypeVar, overload -class Formatter: +_T = TypeVar("_T", str, bytes) + +class Formatter(Generic[_T]): name: Any aliases: Any filenames: Any @@ -10,6 +12,11 @@ class Formatter: title: Any encoding: Any options: Any - def __init__(self, **options) -> None: ... + @overload + def __init__(self: Formatter[str], *, encoding: None = ..., outencoding: None = ..., **options) -> None: ... + @overload + def __init__(self: Formatter[bytes], *, encoding: str, outencoding: None = ..., **options) -> None: ... + @overload + def __init__(self: Formatter[bytes], *, encoding: None = ..., outencoding: str, **options) -> None: ... def get_style_defs(self, arg: str = ...): ... def format(self, tokensource, outfile): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi index 0dd9f2890de8..573b42de71e6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/__init__.pyi @@ -1,3 +1,6 @@ +from typing import Any, Generator + +from ..formatter import Formatter from .bbcode import BBCodeFormatter as BBCodeFormatter from .html import HtmlFormatter as HtmlFormatter from .img import ( @@ -15,7 +18,7 @@ from .svg import SvgFormatter as SvgFormatter from .terminal import TerminalFormatter as TerminalFormatter from .terminal256 import Terminal256Formatter as Terminal256Formatter, TerminalTrueColorFormatter as TerminalTrueColorFormatter -def get_all_formatters() -> None: ... +def get_all_formatters() -> Generator[type[Formatter[Any]], None, None]: ... def get_formatter_by_name(_alias, **options): ... def load_formatter_from_file(filename, formattername: str = ..., **options): ... def get_formatter_for_filename(fn, **options): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi index 332c6c13a0bd..df1708bec27a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/bbcode.pyi @@ -1,11 +1,12 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class BBCodeFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class BBCodeFormatter(Formatter[_T]): name: str aliases: Any filenames: Any styles: Any - def __init__(self, **options) -> None: ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi index ba3d0696d853..1822dec0efaa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/html.pyi @@ -1,8 +1,10 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class HtmlFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class HtmlFormatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -30,7 +32,6 @@ class HtmlFormatter(Formatter): linespans: Any anchorlinenos: Any hl_lines: Any - def __init__(self, **options) -> None: ... def get_style_defs(self, arg: Any | None = ...): ... def get_token_style_defs(self, arg: Any | None = ...): ... def get_background_style_defs(self, arg: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi index 06b50aee8ca4..255dc80fdf38 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/img.pyi @@ -1,7 +1,9 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter +_T = TypeVar("_T", str, bytes) + class PilNotAvailable(ImportError): ... class FontNotFound(Exception): ... @@ -15,7 +17,7 @@ class FontManager: def get_text_size(self, text): ... def get_font(self, bold, oblique): ... -class ImageFormatter(Formatter): +class ImageFormatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -42,23 +44,22 @@ class ImageFormatter(Formatter): hl_lines: Any hl_color: Any drawables: Any - def __init__(self, **options) -> None: ... def get_style_defs(self, arg: str = ...) -> None: ... def format(self, tokensource, outfile) -> None: ... -class GifImageFormatter(ImageFormatter): +class GifImageFormatter(ImageFormatter[_T]): name: str aliases: Any filenames: Any default_image_format: str -class JpgImageFormatter(ImageFormatter): +class JpgImageFormatter(ImageFormatter[_T]): name: str aliases: Any filenames: Any default_image_format: str -class BmpImageFormatter(ImageFormatter): +class BmpImageFormatter(ImageFormatter[_T]): name: str aliases: Any filenames: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi index b8bedfd849ac..7af728d1445c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/irc.pyi @@ -1,13 +1,14 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class IRCFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class IRCFormatter(Formatter[_T]): name: str aliases: Any filenames: Any darkbg: Any colorscheme: Any linenos: Any - def __init__(self, **options) -> None: ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi index a321c7b1ada0..83147436e7e6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/latex.pyi @@ -1,9 +1,11 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter from pygments.lexer import Lexer -class LatexFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class LatexFormatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -21,7 +23,6 @@ class LatexFormatter(Formatter): left: Any right: Any envname: Any - def __init__(self, **options) -> None: ... def get_style_defs(self, arg: str = ...): ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi index 222ab054b96f..e02007b00c03 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/other.pyi @@ -1,14 +1,16 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class NullFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class NullFormatter(Formatter[_T]): name: str aliases: Any filenames: Any def format(self, tokensource, outfile) -> None: ... -class RawTokenFormatter(Formatter): +class RawTokenFormatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -16,11 +18,9 @@ class RawTokenFormatter(Formatter): encoding: str compress: Any error_color: Any - def __init__(self, **options) -> None: ... def format(self, tokensource, outfile) -> None: ... -class TestcaseFormatter(Formatter): +class TestcaseFormatter(Formatter[_T]): name: str aliases: Any - def __init__(self, **options) -> None: ... def format(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi index c72b9fd84a63..d266bbe3f295 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/pangomarkup.pyi @@ -1,11 +1,12 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class PangoMarkupFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class PangoMarkupFormatter(Formatter[_T]): name: str aliases: Any filenames: Any styles: Any - def __init__(self, **options) -> None: ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi index 2dcffc66952d..900f43d6f38b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/rtf.pyi @@ -1,12 +1,13 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class RtfFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class RtfFormatter(Formatter[_T]): name: str aliases: Any filenames: Any fontface: Any fontsize: Any - def __init__(self, **options) -> None: ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi index 8dd0402595d1..f349157c0cc5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/svg.pyi @@ -1,8 +1,10 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class SvgFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class SvgFormatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -17,5 +19,4 @@ class SvgFormatter(Formatter): linenostart: Any linenostep: Any linenowidth: Any - def __init__(self, **options) -> None: ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi index 92b2c8b4ee91..7448f17a695d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal.pyi @@ -1,14 +1,15 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter -class TerminalFormatter(Formatter): +_T = TypeVar("_T", str, bytes) + +class TerminalFormatter(Formatter[_T]): name: str aliases: Any filenames: Any darkbg: Any colorscheme: Any linenos: Any - def __init__(self, **options) -> None: ... def format(self, tokensource, outfile): ... def format_unencoded(self, tokensource, outfile) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi index 55786c45dc9b..69f30cc1929a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/formatters/terminal256.pyi @@ -1,7 +1,9 @@ -from typing import Any +from typing import Any, TypeVar from pygments.formatter import Formatter +_T = TypeVar("_T", str, bytes) + class EscapeSequence: fg: Any bg: Any @@ -16,7 +18,7 @@ class EscapeSequence: def true_color_string(self): ... def reset_string(self): ... -class Terminal256Formatter(Formatter): +class Terminal256Formatter(Formatter[_T]): name: str aliases: Any filenames: Any @@ -27,11 +29,10 @@ class Terminal256Formatter(Formatter): useunderline: Any useitalic: Any linenos: Any - def __init__(self, **options) -> None: ... def format(self, tokensource, outfile): ... def format_unencoded(self, tokensource, outfile) -> None: ... -class TerminalTrueColorFormatter(Terminal256Formatter): +class TerminalTrueColorFormatter(Terminal256Formatter[_T]): name: str aliases: Any filenames: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexer.pyi index 979d822b9962..6267debf322b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexer.pyi @@ -1,5 +1,5 @@ from collections.abc import Iterable, Iterator, Sequence -from typing import Any, Tuple +from typing import Any from pygments.token import _TokenType from pygments.util import Future @@ -40,7 +40,7 @@ class _inherit: ... inherit: Any -class combined(Tuple[Any]): +class combined(tuple[Any, ...]): def __new__(cls, *args): ... def __init__(self, *args) -> None: ... @@ -89,7 +89,9 @@ class LexerContext: def __init__(self, text, pos, stack: Any | None = ..., end: Any | None = ...) -> None: ... class ExtendedRegexLexer(RegexLexer): - def get_tokens_unprocessed(self, text: str | None = ..., context: LexerContext | None = ...) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore + def get_tokens_unprocessed( # type: ignore[override] + self, text: str | None = ..., context: LexerContext | None = ... + ) -> Iterator[tuple[int, _TokenType, str]]: ... class ProfilingRegexLexerMeta(RegexLexerMeta): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi index 23a2966c3890..ce42fdfc34f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/lexers/__init__.pyi @@ -1,13 +1,12 @@ from _typeshed import StrOrBytesPath, StrPath from collections.abc import Iterator -from typing import Any, Tuple, Union +from typing import Any, Union from pygments.lexer import Lexer, LexerMeta _OpenFile = Union[StrOrBytesPath, int] # copy/pasted from builtins.pyi -# TODO: use lower-case tuple once mypy updated -def get_all_lexers() -> Iterator[tuple[str, Tuple[str, ...], Tuple[str, ...], Tuple[str, ...]]]: ... +def get_all_lexers() -> Iterator[tuple[str, tuple[str, ...], tuple[str, ...], tuple[str, ...]]]: ... def find_lexer_class(name: str) -> LexerMeta | None: ... def find_lexer_class_by_name(_alias: str) -> LexerMeta: ... def get_lexer_by_name(_alias: str, **options: Any) -> Lexer: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi index e2115ee9d0fd..81e3554c082c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/plugin.pyi @@ -1,10 +1,18 @@ +from typing import Any, Generator, Iterable + +from pkg_resources import EntryPoint +from pygments.filter import Filter +from pygments.formatter import Formatter +from pygments.lexer import Lexer +from pygments.style import Style + LEXER_ENTRY_POINT: str FORMATTER_ENTRY_POINT: str STYLE_ENTRY_POINT: str FILTER_ENTRY_POINT: str -def iter_entry_points(group_name): ... -def find_plugin_lexers() -> None: ... -def find_plugin_formatters() -> None: ... -def find_plugin_styles() -> None: ... -def find_plugin_filters() -> None: ... +def iter_entry_points(group_name: str) -> Iterable[EntryPoint]: ... +def find_plugin_lexers() -> Generator[type[Lexer], None, None]: ... +def find_plugin_formatters() -> Generator[tuple[str, type[Formatter[Any]]], None, None]: ... +def find_plugin_styles() -> Generator[tuple[str, type[Style]], None, None]: ... +def find_plugin_filters() -> Generator[tuple[str, type[Filter]], None, None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/style.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/style.pyi index 7b4cc56ca8fc..97ff2ff86872 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/style.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/style.pyi @@ -1,9 +1,9 @@ -from collections.abc import Iterator, Mapping, Set +from collections.abc import Iterator, Mapping, Set as AbstractSet from typing_extensions import TypedDict from pygments.token import _TokenType -ansicolors: Set[str] # not intended to be mutable +ansicolors: AbstractSet[str] # not intended to be mutable class _StyleDict(TypedDict): color: str | None diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/token.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/token.pyi index b428132f5ca0..9fb24b751762 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/token.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/Pygments/pygments/token.pyi @@ -1,15 +1,15 @@ +from _typeshed import Self from collections.abc import Mapping -from typing import Tuple +from typing import Any -class _TokenType(Tuple[str]): # TODO: change to lower-case tuple once new mypy released +class _TokenType(tuple[str, ...]): parent: _TokenType | None def split(self) -> list[_TokenType]: ... subtypes: set[_TokenType] - def __init__(self, *args: str) -> None: ... - def __contains__(self, val: _TokenType) -> bool: ... # type: ignore + def __contains__(self, val: _TokenType) -> bool: ... # type: ignore[override] def __getattr__(self, name: str) -> _TokenType: ... - def __copy__(self): ... - def __deepcopy__(self, memo): ... + def __copy__(self: Self) -> Self: ... + def __deepcopy__(self: Self, memo: Any) -> Self: ... Token: _TokenType Text: _TokenType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/METADATA.toml new file mode 100644 index 000000000000..72be0540cffd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/METADATA.toml @@ -0,0 +1,6 @@ +version = "1.4.*" +extra_description = """\ + The `sqlalchemy-stubs` package is an alternative to this package and also \ + includes a mypy plugin for more precise types.\ +""" +stubtest = false # https://github.com/python/typeshed/issues/7307 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/__init__.pyi new file mode 100644 index 000000000000..dde3cbce262a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/__init__.pyi @@ -0,0 +1,133 @@ +from .engine import ( + create_engine as create_engine, + create_mock_engine as create_mock_engine, + engine_from_config as engine_from_config, +) +from .inspection import inspect as inspect +from .schema import ( + BLANK_SCHEMA as BLANK_SCHEMA, + DDL as DDL, + CheckConstraint as CheckConstraint, + Column as Column, + ColumnDefault as ColumnDefault, + Computed as Computed, + Constraint as Constraint, + DefaultClause as DefaultClause, + FetchedValue as FetchedValue, + ForeignKey as ForeignKey, + ForeignKeyConstraint as ForeignKeyConstraint, + Identity as Identity, + Index as Index, + MetaData as MetaData, + PrimaryKeyConstraint as PrimaryKeyConstraint, + Sequence as Sequence, + Table as Table, + ThreadLocalMetaData as ThreadLocalMetaData, + UniqueConstraint as UniqueConstraint, +) +from .sql import ( + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + alias as alias, + all_ as all_, + and_ as and_, + any_ as any_, + asc as asc, + between as between, + bindparam as bindparam, + case as case, + cast as cast, + collate as collate, + column as column, + delete as delete, + desc as desc, + distinct as distinct, + except_ as except_, + except_all as except_all, + exists as exists, + extract as extract, + false as false, + func as func, + funcfilter as funcfilter, + insert as insert, + intersect as intersect, + intersect_all as intersect_all, + join as join, + lambda_stmt as lambda_stmt, + lateral as lateral, + literal as literal, + literal_column as literal_column, + modifier as modifier, + not_ as not_, + null as null, + nulls_first as nulls_first, + nulls_last as nulls_last, + nullsfirst as nullsfirst, + nullslast as nullslast, + or_ as or_, + outerjoin as outerjoin, + outparam as outparam, + over as over, + select as select, + subquery as subquery, + table as table, + tablesample as tablesample, + text as text, + true as true, + tuple_ as tuple_, + type_coerce as type_coerce, + union as union, + union_all as union_all, + update as update, + values as values, + within_group as within_group, +) +from .types import ( + ARRAY as ARRAY, + BIGINT as BIGINT, + BINARY as BINARY, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + JSON as JSON, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + BigInteger as BigInteger, + Boolean as Boolean, + Date as Date, + DateTime as DateTime, + Enum as Enum, + Float as Float, + Integer as Integer, + Interval as Interval, + LargeBinary as LargeBinary, + Numeric as Numeric, + PickleType as PickleType, + SmallInteger as SmallInteger, + String as String, + Text as Text, + Time as Time, + TupleType as TupleType, + TypeDecorator as TypeDecorator, + Unicode as Unicode, + UnicodeText as UnicodeText, +) + +__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi new file mode 100644 index 000000000000..e3f87af28757 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cimmutabledict.pyi @@ -0,0 +1,17 @@ +from _typeshed import SupportsKeysAndGetItem +from collections.abc import Iterable +from typing import Generic, TypeVar, overload + +_KT = TypeVar("_KT") +_KT2 = TypeVar("_KT2") +_VT = TypeVar("_VT") +_VT2 = TypeVar("_VT2") + +class immutabledict(dict[_KT, _VT], Generic[_KT, _VT]): + @overload + def union(self, __dict: dict[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... + @overload + def union(self, __dict: None = ..., **kw: SupportsKeysAndGetItem[_KT2, _VT2]) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... + def merge_with( + self, *args: SupportsKeysAndGetItem[_KT | _KT2, _VT2] | Iterable[tuple[_KT2, _VT2]] | None + ) -> immutabledict[_KT | _KT2, _VT | _VT2]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi new file mode 100644 index 000000000000..b66d337f00a3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/__init__.pyi @@ -0,0 +1 @@ +class Connector: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi new file mode 100644 index 000000000000..d3bfccd46d9f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/mxodbc.pyi @@ -0,0 +1,17 @@ +from typing import Any + +from . import Connector + +class MxODBCConnector(Connector): + driver: str + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_native_decimal: bool + @classmethod + def dbapi(cls): ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi new file mode 100644 index 000000000000..e821b1964453 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/connectors/pyodbc.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from . import Connector + +class PyODBCConnector(Connector): + driver: str + supports_sane_rowcount_returning: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_native_decimal: bool + default_paramstyle: str + use_setinputsizes: bool + pyodbc_driver_name: Any + def __init__(self, supports_unicode_binds: Any | None = ..., use_setinputsizes: bool = ..., **kw) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi new file mode 100644 index 000000000000..a1e830277edd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/cresultproxy.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class BaseRow: + def __init__(self, parent, processors, keymap, key_style, data) -> None: ... + def __reduce__(self): ... + def __iter__(self): ... + def __len__(self): ... + def __hash__(self): ... + __getitem__: Any + +def safe_rowproxy_reconstructor(__cls, __state): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi new file mode 100644 index 000000000000..58f463cb74e7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/databases/__init__.pyi @@ -0,0 +1,18 @@ +from ..dialects.firebird import base as firebird_base +from ..dialects.mssql import base as mssql_base +from ..dialects.mysql import base as mysql_base +from ..dialects.oracle import base as oracle_base +from ..dialects.postgresql import base as postgresql_base +from ..dialects.sqlite import base as sqlite_base +from ..dialects.sybase import base as sybase_base + +__all__ = ("firebird", "mssql", "mysql", "postgresql", "sqlite", "oracle", "sybase") + +firebird = firebird_base +mssql = mssql_base +mysql = mysql_base +oracle = oracle_base +postgresql = postgresql_base +postgres = postgresql_base +sqlite = sqlite_base +sybase = sybase_base diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dbapi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dbapi.pyi new file mode 100644 index 000000000000..432e5936a0c0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dbapi.pyi @@ -0,0 +1,36 @@ +# TODO: Tempory copy of _typeshed.dbapi, until that file is available in all typecheckers. +# Does not exist at runtime. + +from collections.abc import Mapping, Sequence +from typing import Any, Protocol + +DBAPITypeCode = Any | None +# Strictly speaking, this should be a Sequence, but the type system does +# not support fixed-length sequences. +DBAPIColumnDescription = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None] + +class DBAPIConnection(Protocol): + def close(self) -> object: ... + def commit(self) -> object: ... + # optional: + # def rollback(self) -> Any: ... + def cursor(self) -> DBAPICursor: ... + +class DBAPICursor(Protocol): + @property + def description(self) -> Sequence[DBAPIColumnDescription] | None: ... + @property + def rowcount(self) -> int: ... + # optional: + # def callproc(self, __procname: str, __parameters: Sequence[Any] = ...) -> Sequence[Any]: ... + def close(self) -> object: ... + def execute(self, __operation: str, __parameters: Sequence[Any] | Mapping[str, Any] = ...) -> object: ... + def executemany(self, __operation: str, __seq_of_parameters: Sequence[Sequence[Any]]) -> object: ... + def fetchone(self) -> Sequence[Any] | None: ... + def fetchmany(self, __size: int = ...) -> Sequence[Sequence[Any]]: ... + def fetchall(self) -> Sequence[Sequence[Any]]: ... + # optional: + # def nextset(self) -> None | Literal[True]: ... + arraysize: int + def setinputsizes(self, __sizes: Sequence[DBAPITypeCode | int | None]) -> object: ... + def setoutputsize(self, __size: int, __column: int = ...) -> object: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi new file mode 100644 index 000000000000..63292f3709ce --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/__init__.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from . import ( + firebird as firebird, + mssql as mssql, + mysql as mysql, + oracle as oracle, + postgresql as postgresql, + sqlite as sqlite, + sybase as sybase, +) + +__all__ = ("firebird", "mssql", "mysql", "oracle", "postgresql", "sqlite", "sybase") + +registry: Any +plugins: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi new file mode 100644 index 000000000000..c421f787eb13 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/__init__.pyi @@ -0,0 +1,34 @@ +from typing import Any + +from sqlalchemy.dialects.firebird.base import ( + BIGINT as BIGINT, + BLOB as BLOB, + CHAR as CHAR, + DATE as DATE, + FLOAT as FLOAT, + NUMERIC as NUMERIC, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) + +__all__ = ( + "SMALLINT", + "BIGINT", + "FLOAT", + "FLOAT", + "DATE", + "TIME", + "TEXT", + "NUMERIC", + "FLOAT", + "TIMESTAMP", + "VARCHAR", + "CHAR", + "BLOB", + "dialect", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi new file mode 100644 index 000000000000..d6764ab69e2c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/base.pyi @@ -0,0 +1,108 @@ +from typing import Any + +from sqlalchemy import sql, types as sqltypes +from sqlalchemy.engine import default +from sqlalchemy.sql import compiler +from sqlalchemy.types import ( + BIGINT as BIGINT, + BLOB as BLOB, + DATE as DATE, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + Integer as Integer, +) + +RESERVED_WORDS: Any + +class _StringType(sqltypes.String): + charset: Any + def __init__(self, charset: Any | None = ..., **kw) -> None: ... + +class VARCHAR(_StringType, sqltypes.VARCHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class CHAR(_StringType, sqltypes.CHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class _FBDateTime(sqltypes.DateTime): + def bind_processor(self, dialect): ... + +colspecs: Any +ischema_names: Any + +class FBTypeCompiler(compiler.GenericTypeCompiler): + def visit_boolean(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + +class FBCompiler(sql.compiler.SQLCompiler): + ansi_bind_rules: bool + def visit_now_func(self, fn, **kw): ... + def visit_startswith_op_binary(self, binary, operator, **kw): ... + def visit_not_startswith_op_binary(self, binary, operator, **kw): ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_alias(self, alias, asfrom: bool = ..., **kwargs): ... # type: ignore[override] + def visit_substring_func(self, func, **kw): ... + def visit_length_func(self, function, **kw): ... + visit_char_length_func: Any + def function_argspec(self, func, **kw): ... + def default_from(self): ... + def visit_sequence(self, seq, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def limit_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + +class FBDDLCompiler(sql.compiler.DDLCompiler): + def visit_create_sequence(self, create): ... + def visit_drop_sequence(self, drop): ... + def visit_computed_column(self, generated): ... + +class FBIdentifierPreparer(sql.compiler.IdentifierPreparer): + reserved_words: Any + illegal_initial_characters: Any + def __init__(self, dialect) -> None: ... + +class FBExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + +class FBDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + max_identifier_length: int + supports_sequences: bool + sequences_optional: bool + supports_default_values: bool + postfetch_lastrowid: bool + supports_native_boolean: bool + requires_name_normalize: bool + supports_empty_insert: bool + statement_compiler: Any + ddl_compiler: Any + preparer: Any + type_compiler: Any + colspecs: Any + ischema_names: Any + construct_arguments: Any + def __init__(self, *args, **kwargs) -> None: ... + implicit_returning: Any + def initialize(self, connection) -> None: ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Any | None = ...): ... # type: ignore[override] + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_column_sequence(self, connection, table_name, column_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Any | None = ..., **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi new file mode 100644 index 000000000000..4bc56c5a6659 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/fdb.pyi @@ -0,0 +1,10 @@ +from .kinterbasdb import FBDialect_kinterbasdb + +class FBDialect_fdb(FBDialect_kinterbasdb): + supports_statement_cache: bool + def __init__(self, enable_rowcount: bool = ..., retaining: bool = ..., **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + +dialect = FBDialect_fdb diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi new file mode 100644 index 000000000000..a46e1c36198a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/firebird/kinterbasdb.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from ...types import Float, Numeric +from .base import FBDialect, FBExecutionContext + +class _kinterbasdb_numeric: + def bind_processor(self, dialect): ... + +class _FBNumeric_kinterbasdb(_kinterbasdb_numeric, Numeric): ... +class _FBFloat_kinterbasdb(_kinterbasdb_numeric, Float): ... + +class FBExecutionContext_kinterbasdb(FBExecutionContext): + @property + def rowcount(self): ... + +class FBDialect_kinterbasdb(FBDialect): + driver: str + supports_statement_cache: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + colspecs: Any + enable_rowcount: Any + type_conv: Any + concurrency_level: Any + retaining: Any + def __init__( + self, type_conv: int = ..., concurrency_level: int = ..., enable_rowcount: bool = ..., retaining: bool = ..., **kwargs + ) -> None: ... + @classmethod + def dbapi(cls): ... + def do_execute(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = FBDialect_kinterbasdb diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi new file mode 100644 index 000000000000..c4b6c72f6f65 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/__init__.pyi @@ -0,0 +1,76 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DATETIME2 as DATETIME2, + DATETIMEOFFSET as DATETIMEOFFSET, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + IMAGE as IMAGE, + INTEGER as INTEGER, + JSON as JSON, + MONEY as MONEY, + NCHAR as NCHAR, + NTEXT as NTEXT, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + ROWVERSION as ROWVERSION, + SMALLDATETIME as SMALLDATETIME, + SMALLINT as SMALLINT, + SMALLMONEY as SMALLMONEY, + SQL_VARIANT as SQL_VARIANT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYINT as TINYINT, + UNIQUEIDENTIFIER as UNIQUEIDENTIFIER, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + XML as XML, + try_cast as try_cast, +) + +__all__ = ( + "JSON", + "INTEGER", + "BIGINT", + "SMALLINT", + "TINYINT", + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "TEXT", + "NTEXT", + "DECIMAL", + "NUMERIC", + "FLOAT", + "DATETIME", + "DATETIME2", + "DATETIMEOFFSET", + "DATE", + "TIME", + "SMALLDATETIME", + "BINARY", + "VARBINARY", + "BIT", + "REAL", + "IMAGE", + "TIMESTAMP", + "ROWVERSION", + "MONEY", + "SMALLMONEY", + "UNIQUEIDENTIFIER", + "SQL_VARIANT", + "XML", + "dialect", + "try_cast", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi new file mode 100644 index 000000000000..3ff313abf997 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/base.pyi @@ -0,0 +1,324 @@ +from typing import Any, overload +from typing_extensions import Literal + +import sqlalchemy.types as sqltypes + +from ...engine import default +from ...sql import compiler +from ...sql.elements import Cast +from ...types import ( + BIGINT as BIGINT, + BINARY as BINARY, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + SMALLINT as SMALLINT, + TEXT as TEXT, + VARCHAR as VARCHAR, +) +from .json import JSON as JSON + +MS_2017_VERSION: Any +MS_2016_VERSION: Any +MS_2014_VERSION: Any +MS_2012_VERSION: Any +MS_2008_VERSION: Any +MS_2005_VERSION: Any +MS_2000_VERSION: Any +RESERVED_WORDS: Any + +class REAL(sqltypes.REAL): + __visit_name__: str + def __init__(self, **kw) -> None: ... + +class TINYINT(sqltypes.Integer): + __visit_name__: str + +class _MSDate(sqltypes.Date): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class TIME(sqltypes.TIME): + precision: Any + def __init__(self, precision: Any | None = ..., **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +_MSTime = TIME + +class _BASETIMEIMPL(TIME): + __visit_name__: str + +class _DateTimeBase: + def bind_processor(self, dialect): ... + +class _MSDateTime(_DateTimeBase, sqltypes.DateTime): ... + +class SMALLDATETIME(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + +class DATETIME2(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + precision: Any + def __init__(self, precision: Any | None = ..., **kw) -> None: ... + +class DATETIMEOFFSET(_DateTimeBase, sqltypes.DateTime): + __visit_name__: str + precision: Any + def __init__(self, precision: Any | None = ..., **kw) -> None: ... + +class _UnicodeLiteral: + def literal_processor(self, dialect): ... + +class _MSUnicode(_UnicodeLiteral, sqltypes.Unicode): ... +class _MSUnicodeText(_UnicodeLiteral, sqltypes.UnicodeText): ... + +class TIMESTAMP(sqltypes._Binary): + __visit_name__: str + length: Any + convert_int: Any + def __init__(self, convert_int: bool = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class ROWVERSION(TIMESTAMP): + __visit_name__: str + +class NTEXT(sqltypes.UnicodeText): + __visit_name__: str + +class VARBINARY(sqltypes.VARBINARY, sqltypes.LargeBinary): + __visit_name__: str + filestream: bool + @overload + def __init__(self, length: Literal["max"] | None, filestream: Literal[True]) -> None: ... + @overload + def __init__(self, *, filestream: Literal[True]) -> None: ... + @overload + def __init__(self, length: Any | None = ..., filestream: Literal[False] = ...) -> None: ... + +class IMAGE(sqltypes.LargeBinary): + __visit_name__: str + +class XML(sqltypes.Text): + __visit_name__: str + +class BIT(sqltypes.Boolean): + __visit_name__: str + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__: str + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__: str + +class SQL_VARIANT(sqltypes.TypeEngine): + __visit_name__: str + +class TryCast(Cast): + __visit_name__: str + stringify_dialect: str + inherit_cache: bool + def __init__(self, *arg, **kw) -> None: ... + +try_cast: Any +MSDateTime: Any +MSDate: Any +MSReal = REAL +MSTinyInteger = TINYINT +MSTime = TIME +MSSmallDateTime = SMALLDATETIME +MSDateTime2 = DATETIME2 +MSDateTimeOffset = DATETIMEOFFSET +MSText = TEXT +MSNText = NTEXT +MSString = VARCHAR +MSNVarchar = NVARCHAR +MSChar = CHAR +MSNChar = NCHAR +MSBinary = BINARY +MSVarBinary = VARBINARY +MSImage = IMAGE +MSBit = BIT +MSMoney = MONEY +MSSmallMoney = SMALLMONEY +MSUniqueIdentifier = UNIQUEIDENTIFIER +MSVariant = SQL_VARIANT +ischema_names: Any + +class MSTypeCompiler(compiler.GenericTypeCompiler): + def visit_FLOAT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_ROWVERSION(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_DATETIMEOFFSET(self, type_, **kw): ... + def visit_DATETIME2(self, type_, **kw): ... + def visit_SMALLDATETIME(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_NTEXT(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_date(self, type_, **kw): ... + def visit__BASETIMEIMPL(self, type_, **kw): ... + def visit_time(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_IMAGE(self, type_, **kw): ... + def visit_XML(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_SMALLMONEY(self, type_, **kw): ... + def visit_UNIQUEIDENTIFIER(self, type_, **kw): ... + def visit_SQL_VARIANT(self, type_, **kw): ... + +class MSExecutionContext(default.DefaultExecutionContext): + def pre_exec(self) -> None: ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + def get_lastrowid(self): ... + @property + def rowcount(self): ... + def handle_dbapi_exception(self, e) -> None: ... + def get_result_cursor_strategy(self, result): ... + def fire_sequence(self, seq, type_): ... + def get_insert_default(self, column): ... + +class MSSQLCompiler(compiler.SQLCompiler): + returning_precedes_values: bool + extract_map: Any + tablealiases: Any + def __init__(self, *args, **kwargs) -> None: ... + def visit_now_func(self, fn, **kw): ... + def visit_current_date_func(self, fn, **kw): ... + def visit_length_func(self, fn, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_concat_op_binary(self, binary, operator, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def get_from_hint_text(self, table, text): ... + def get_crud_hint_text(self, table, text): ... + def fetch_clause(self, cs, **kwargs): ... + def limit_clause(self, cs, **kwargs): ... + def visit_try_cast(self, element, **kw): ... + def translate_select_structure(self, select_stmt, **kwargs): ... + def visit_table(self, table, mssql_aliased: bool = ..., iscrud: bool = ..., **kwargs): ... # type: ignore[override] + def visit_alias(self, alias, **kw): ... + def visit_column(self, column, add_to_result_map: Any | None = ..., **kw): ... # type: ignore[override] + def visit_extract(self, extract, **kw): ... + def visit_savepoint(self, savepoint_stmt): ... + def visit_rollback_to_savepoint(self, savepoint_stmt): ... + def visit_binary(self, binary, **kwargs): ... + def returning_clause(self, stmt, returning_cols): ... + def get_cte_preamble(self, recursive): ... + def label_select_column(self, select, column, asfrom): ... + def for_update_clause(self, select, **kw): ... + def order_by_clause(self, select, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, type_): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_sequence(self, seq, **kw): ... + +class MSSQLStrictCompiler(MSSQLCompiler): + ansi_bind_rules: bool + def visit_in_op_binary(self, binary, operator, **kw): ... + def visit_not_in_op_binary(self, binary, operator, **kw): ... + def render_literal_value(self, value, type_): ... + +class MSDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_create_index(self, create, include_schema: bool = ...): ... # type: ignore[override] + def visit_drop_index(self, drop): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_unique_constraint(self, constraint): ... + def visit_computed_column(self, generated): ... + def visit_create_sequence(self, create, **kw): ... + def visit_identity_column(self, identity, **kw): ... + +class MSIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def __init__(self, dialect) -> None: ... + def quote_schema(self, schema, force: Any | None = ...): ... + +class MSDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_default_values: bool + supports_empty_insert: bool + use_scope_identity: bool + max_identifier_length: int + schema_name: str + implicit_returning: bool + full_returning: bool + colspecs: Any + engine_config_types: Any + ischema_names: Any + supports_sequences: bool + sequences_optional: bool + default_sequence_base: int + supports_native_boolean: bool + non_native_boolean_check_constraint: bool + supports_unicode_binds: bool + postfetch_lastrowid: bool + legacy_schema_aliasing: bool + server_version_info: Any + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + construct_arguments: Any + query_timeout: Any + deprecate_large_types: Any + isolation_level: Any + def __init__( + self, + query_timeout: Any | None = ..., + use_scope_identity: bool = ..., + schema_name: str = ..., + isolation_level: Any | None = ..., + deprecate_large_types: Any | None = ..., + json_serializer: Any | None = ..., + json_deserializer: Any | None = ..., + legacy_schema_aliasing: Any | None = ..., + **opts, + ) -> None: ... + def do_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def initialize(self, connection) -> None: ... + def on_connect(self): ... + def has_table(self, connection, tablename, dbname, owner, schema): ... + def has_sequence(self, connection, sequencename, dbname, owner, schema): ... + def get_sequence_names(self, connection, dbname, owner, schema, **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, dbname, owner, schema, **kw): ... + def get_view_names(self, connection, dbname, owner, schema, **kw): ... + def get_indexes(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_view_definition(self, connection, viewname, dbname, owner, schema, **kw): ... + def get_columns(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_pk_constraint(self, connection, tablename, dbname, owner, schema, **kw): ... + def get_foreign_keys(self, connection, tablename, dbname, owner, schema, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi new file mode 100644 index 000000000000..0a97a197f164 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/information_schema.pyi @@ -0,0 +1,35 @@ +from typing import Any + +from ...sql import expression +from ...types import TypeDecorator + +ischema: Any + +class CoerceUnicode(TypeDecorator): + impl: Any + cache_ok: bool + def process_bind_param(self, value, dialect): ... + def bind_expression(self, bindvalue): ... + +class _cast_on_2005(expression.ColumnElement[Any]): + bindvalue: Any + def __init__(self, bindvalue) -> None: ... + +schemata: Any +tables: Any +columns: Any +mssql_temp_table_columns: Any +constraints: Any +column_constraints: Any +key_constraints: Any +ref_constraints: Any +views: Any +computed_columns: Any +sequences: Any + +class IdentitySqlVariant(TypeDecorator): + impl: Any + cache_ok: bool + def column_expression(self, colexpr): ... + +identity_columns: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi new file mode 100644 index 000000000000..2ced3beece0d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/json.pyi @@ -0,0 +1,10 @@ +from ...types import JSON as _JSON + +class JSON(_JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi new file mode 100644 index 000000000000..bddc1929a4b9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/mxodbc.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from ...connectors.mxodbc import MxODBCConnector +from .base import VARBINARY, MSDialect, _MSDate, _MSTime +from .pyodbc import MSExecutionContext_pyodbc, _MSNumeric_pyodbc + +class _MSNumeric_mxodbc(_MSNumeric_pyodbc): ... + +class _MSDate_mxodbc(_MSDate): + def bind_processor(self, dialect): ... + +class _MSTime_mxodbc(_MSTime): + def bind_processor(self, dialect): ... + +class _VARBINARY_mxodbc(VARBINARY): + def bind_processor(self, dialect): ... + +class MSExecutionContext_mxodbc(MSExecutionContext_pyodbc): ... + +class MSDialect_mxodbc(MxODBCConnector, MSDialect): + supports_statement_cache: bool + colspecs: Any + description_encoding: Any + def __init__(self, description_encoding: Any | None = ..., **params) -> None: ... + +dialect = MSDialect_mxodbc diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/__main__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/provision.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/Pillow/PIL/__main__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/provision.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi new file mode 100644 index 000000000000..b16a0b4f729f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pymssql.pyi @@ -0,0 +1,24 @@ +from typing import Any + +from ...types import Numeric +from .base import MSDialect, MSIdentifierPreparer + +class _MSNumeric_pymssql(Numeric): + def result_processor(self, dialect, type_): ... + +class MSIdentifierPreparer_pymssql(MSIdentifierPreparer): + def __init__(self, dialect) -> None: ... + +class MSDialect_pymssql(MSDialect): + supports_statement_cache: bool + supports_native_decimal: bool + driver: str + preparer: Any + colspecs: Any + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def set_isolation_level(self, connection, level) -> None: ... + +dialect = MSDialect_pymssql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi new file mode 100644 index 000000000000..907824cea6f3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mssql/pyodbc.pyi @@ -0,0 +1,44 @@ +from typing import Any + +from ...connectors.pyodbc import PyODBCConnector +from ...types import DateTime, Float, Numeric +from .base import BINARY, DATETIMEOFFSET, VARBINARY, MSDialect, MSExecutionContext + +class _ms_numeric_pyodbc: + def bind_processor(self, dialect): ... + +class _MSNumeric_pyodbc(_ms_numeric_pyodbc, Numeric): ... +class _MSFloat_pyodbc(_ms_numeric_pyodbc, Float): ... + +class _ms_binary_pyodbc: + def bind_processor(self, dialect): ... + +class _ODBCDateTimeBindProcessor: + has_tz: bool + def bind_processor(self, dialect): ... + +class _ODBCDateTime(_ODBCDateTimeBindProcessor, DateTime): ... + +class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET): + has_tz: bool + +class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY): ... +class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY): ... + +class MSExecutionContext_pyodbc(MSExecutionContext): + def pre_exec(self) -> None: ... + def post_exec(self) -> None: ... + +class MSDialect_pyodbc(PyODBCConnector, MSDialect): + supports_statement_cache: bool + supports_sane_rowcount_returning: bool + colspecs: Any + description_encoding: Any + use_scope_identity: Any + fast_executemany: Any + def __init__(self, description_encoding: Any | None = ..., fast_executemany: bool = ..., **params) -> None: ... + def on_connect(self): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MSDialect_pyodbc diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi new file mode 100644 index 000000000000..faaa7d6e642a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/__init__.pyi @@ -0,0 +1,85 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + DOUBLE as DOUBLE, + ENUM as ENUM, + FLOAT as FLOAT, + INTEGER as INTEGER, + JSON as JSON, + LONGBLOB as LONGBLOB, + LONGTEXT as LONGTEXT, + MEDIUMBLOB as MEDIUMBLOB, + MEDIUMINT as MEDIUMINT, + MEDIUMTEXT as MEDIUMTEXT, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SET as SET, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYBLOB as TINYBLOB, + TINYINT as TINYINT, + TINYTEXT as TINYTEXT, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + YEAR as YEAR, +) +from .dml import Insert as Insert, insert as insert +from .expression import match as match + +__all__ = ( + "BIGINT", + "BINARY", + "BIT", + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "DOUBLE", + "ENUM", + "DECIMAL", + "FLOAT", + "INTEGER", + "INTEGER", + "JSON", + "LONGBLOB", + "LONGTEXT", + "MEDIUMBLOB", + "MEDIUMINT", + "MEDIUMTEXT", + "NCHAR", + "NVARCHAR", + "NUMERIC", + "SET", + "SMALLINT", + "REAL", + "TEXT", + "TIME", + "TIMESTAMP", + "TINYBLOB", + "TINYINT", + "TINYTEXT", + "VARBINARY", + "VARCHAR", + "YEAR", + "dialect", + "insert", + "Insert", + "match", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi new file mode 100644 index 000000000000..4021fd9061ba --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/aiomysql.pyi @@ -0,0 +1,73 @@ +from typing import Any + +from ...engine import AdaptedConnection +from .pymysql import MySQLDialect_pymysql + +class AsyncAdapt_aiomysql_cursor: + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + @property + def description(self): ... + @property + def rowcount(self): ... + @property + def arraysize(self): ... + @arraysize.setter + def arraysize(self, value) -> None: ... + @property + def lastrowid(self): ... + def close(self) -> None: ... + def execute(self, operation, parameters: Any | None = ...): ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor): + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiomysql_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + def ping(self, reconnect): ... + def character_set_name(self): ... + def autocommit(self, value) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection): + await_: Any + +class AsyncAdapt_aiomysql_dbapi: + aiomysql: Any + pymysql: Any + paramstyle: str + def __init__(self, aiomysql, pymysql) -> None: ... + def connect(self, *arg, **kw): ... + +class MySQLDialect_aiomysql(MySQLDialect_pymysql): + driver: str + supports_statement_cache: bool + supports_server_side_cursors: bool + is_async: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = MySQLDialect_aiomysql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi new file mode 100644 index 000000000000..68d058563464 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/asyncmy.pyi @@ -0,0 +1,73 @@ +from typing import Any + +from ...engine import AdaptedConnection +from .pymysql import MySQLDialect_pymysql + +class AsyncAdapt_asyncmy_cursor: + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + @property + def description(self): ... + @property + def rowcount(self): ... + @property + def arraysize(self): ... + @arraysize.setter + def arraysize(self, value) -> None: ... + @property + def lastrowid(self): ... + def close(self) -> None: ... + def execute(self, operation, parameters: Any | None = ...): ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor): + server_side: bool + await_: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncmy_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + def ping(self, reconnect): ... + def character_set_name(self): ... + def autocommit(self, value) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_asyncmy_connection(AsyncAdapt_asyncmy_connection): + await_: Any + +class AsyncAdapt_asyncmy_dbapi: + asyncmy: Any + pymysql: Any + paramstyle: str + def __init__(self, asyncmy: Any) -> None: ... + def connect(self, *arg, **kw): ... + +class MySQLDialect_asyncmy(MySQLDialect_pymysql): + driver: str + supports_statement_cache: bool + supports_server_side_cursors: bool + is_async: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = MySQLDialect_asyncmy diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi new file mode 100644 index 000000000000..a7b58f6ea754 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/base.pyi @@ -0,0 +1,239 @@ +from typing import Any + +from ...engine import default +from ...sql import compiler +from ...types import BINARY as BINARY, BLOB as BLOB, BOOLEAN as BOOLEAN, DATE as DATE, VARBINARY as VARBINARY +from .enumerated import ENUM as ENUM, SET as SET +from .json import JSON as JSON +from .types import ( + BIGINT as BIGINT, + BIT as BIT, + CHAR as CHAR, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + DOUBLE as DOUBLE, + FLOAT as FLOAT, + INTEGER as INTEGER, + LONGBLOB as LONGBLOB, + LONGTEXT as LONGTEXT, + MEDIUMBLOB as MEDIUMBLOB, + MEDIUMINT as MEDIUMINT, + MEDIUMTEXT as MEDIUMTEXT, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TINYBLOB as TINYBLOB, + TINYINT as TINYINT, + TINYTEXT as TINYTEXT, + VARCHAR as VARCHAR, + YEAR as YEAR, +) + +AUTOCOMMIT_RE: Any +SET_RE: Any +MSTime = TIME +MSSet = SET +MSEnum = ENUM +MSLongBlob = LONGBLOB +MSMediumBlob = MEDIUMBLOB +MSTinyBlob = TINYBLOB +MSBlob = BLOB +MSBinary = BINARY +MSVarBinary = VARBINARY +MSNChar = NCHAR +MSNVarChar = NVARCHAR +MSChar = CHAR +MSString = VARCHAR +MSLongText = LONGTEXT +MSMediumText = MEDIUMTEXT +MSTinyText = TINYTEXT +MSText = TEXT +MSYear = YEAR +MSTimeStamp = TIMESTAMP +MSBit = BIT +MSSmallInteger = SMALLINT +MSTinyInteger = TINYINT +MSMediumInteger = MEDIUMINT +MSBigInteger = BIGINT +MSNumeric = NUMERIC +MSDecimal = DECIMAL +MSDouble = DOUBLE +MSReal = REAL +MSFloat = FLOAT +MSInteger = INTEGER +colspecs: Any +ischema_names: Any + +class MySQLExecutionContext(default.DefaultExecutionContext): + def should_autocommit_text(self, statement): ... + def create_server_side_cursor(self): ... + def fire_sequence(self, seq, type_): ... + +class MySQLCompiler(compiler.SQLCompiler): + render_table_with_column_in_update_from: bool + extract_map: Any + def default_from(self): ... + def visit_random_func(self, fn, **kw): ... + def visit_sequence(self, seq, **kw): ... + def visit_sysdate_func(self, fn, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_on_duplicate_key_update(self, on_duplicate, **kw): ... + def visit_concat_op_binary(self, binary, operator, **kw): ... + def visit_mysql_match(self, element, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def get_from_hint_text(self, table, text): ... + def visit_typeclause(self, typeclause, type_: Any | None = ..., **kw): ... + def visit_cast(self, cast, **kw): ... + def render_literal_value(self, value, type_): ... + def visit_true(self, element, **kw): ... + def visit_false(self, element, **kw): ... + def get_select_precolumns(self, select, **kw): ... + def visit_join(self, join, asfrom: bool = ..., from_linter: Any | None = ..., **kwargs): ... + def for_update_clause(self, select, **kw): ... + def limit_clause(self, select, **kw): ... + def update_limit_clause(self, update_stmt): ... + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, element_types): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class MySQLDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kw): ... + def post_create_table(self, table): ... + def visit_create_index(self, create, **kw): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_drop_index(self, drop): ... + def visit_drop_constraint(self, drop): ... + def define_constraint_match(self, constraint): ... + def visit_set_table_comment(self, create): ... + def visit_drop_table_comment(self, create): ... + def visit_set_column_comment(self, create): ... + +class MySQLTypeCompiler(compiler.GenericTypeCompiler): + def visit_NUMERIC(self, type_, **kw): ... + def visit_DECIMAL(self, type_, **kw): ... + def visit_DOUBLE(self, type_, **kw): ... + def visit_REAL(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_INTEGER(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_MEDIUMINT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_SMALLINT(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_YEAR(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_TINYTEXT(self, type_, **kw): ... + def visit_MEDIUMTEXT(self, type_, **kw): ... + def visit_LONGTEXT(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_TINYBLOB(self, type_, **kw): ... + def visit_MEDIUMBLOB(self, type_, **kw): ... + def visit_LONGBLOB(self, type_, **kw): ... + def visit_ENUM(self, type_, **kw): ... + def visit_SET(self, type_, **kw): ... + def visit_BOOLEAN(self, type_, **kw): ... + +class MySQLIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def __init__(self, dialect, server_ansiquotes: bool = ..., **kw) -> None: ... + +class MariaDBIdentifierPreparer(MySQLIdentifierPreparer): + reserved_words: Any + +class MySQLDialect(default.DefaultDialect): + logger: Any + name: str + supports_statement_cache: bool + supports_alter: bool + supports_native_boolean: bool + max_identifier_length: int + max_index_name_length: int + max_constraint_name_length: int + supports_native_enum: bool + supports_sequences: bool + sequences_optional: bool + supports_for_update_of: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_multivalues_insert: bool + supports_comments: bool + inline_comments: bool + default_paramstyle: str + colspecs: Any + cte_follows_insert: bool + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + ischema_names: Any + preparer: Any + is_mariadb: bool + construct_arguments: Any + isolation_level: Any + def __init__( + self, + isolation_level: Any | None = ..., + json_serializer: Any | None = ..., + json_deserializer: Any | None = ..., + is_mariadb: Any | None = ..., + **kwargs, + ) -> None: ... + def on_connect(self): ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def is_disconnect(self, e, connection, cursor): ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Any | None = ...): ... # type: ignore[override] + def get_sequence_names(self, connection, schema: Any | None = ..., **kw): ... + identifier_preparer: Any + def initialize(self, connection) -> None: ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def get_table_options(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_table_comment(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw): ... + +class _DecodingRow: + rowproxy: Any + charset: Any + def __init__(self, rowproxy, charset) -> None: ... + def __getitem__(self, index): ... + def __getattr__(self, attr): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi new file mode 100644 index 000000000000..408d931df22d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/cymysql.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from .base import BIT +from .mysqldb import MySQLDialect_mysqldb + +class _cymysqlBIT(BIT): + def result_processor(self, dialect, coltype): ... + +class MySQLDialect_cymysql(MySQLDialect_mysqldb): + driver: str + supports_statement_cache: bool + description_encoding: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_cymysql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi new file mode 100644 index 000000000000..77b095c16b73 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/dml.pyi @@ -0,0 +1,23 @@ +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @property + def inserted(self): ... + @memoized_property + def inserted_alias(self): ... + def on_duplicate_key_update(self, *args, **kw) -> None: ... + +insert: Any + +class OnDuplicateClause(ClauseElement): + __visit_name__: str + stringify_dialect: str + inserted_alias: Any + update: Any + def __init__(self, inserted_alias, update) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi new file mode 100644 index 000000000000..e68dcdfdf032 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/enumerated.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from ...sql import sqltypes +from .types import _StringType + +class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum, _StringType): # type: ignore # incompatible with base class + __visit_name__: str + native_enum: bool + def __init__(self, *enums, **kw) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + +class SET(_StringType): + __visit_name__: str + retrieve_as_bitwise: Any + values: Any + def __init__(self, *values, **kw) -> None: ... + def column_expression(self, colexpr): ... + def result_processor(self, dialect, coltype): ... + def bind_processor(self, dialect): ... + def adapt(self, impltype, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi new file mode 100644 index 000000000000..24d63634ffac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/expression.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...sql import elements +from ...sql.base import Generative + +class match(Generative, elements.BinaryExpression): + __visit_name__: str + inherit_cache: bool + def __init__(self, *cols, **kw) -> None: ... + modifiers: Any + def in_boolean_mode(self) -> None: ... + def in_natural_language_mode(self) -> None: ... + def with_query_expansion(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi new file mode 100644 index 000000000000..c35f9c440158 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/json.pyi @@ -0,0 +1,10 @@ +import sqlalchemy.types as sqltypes + +class JSON(sqltypes.JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi new file mode 100644 index 000000000000..db8f5abd5f09 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadb.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from .base import MySQLDialect + +class MariaDBDialect(MySQLDialect): + is_mariadb: bool + supports_statement_cache: bool + name: str + preparer: Any + +def loader(driver): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi new file mode 100644 index 000000000000..0735fb75a28e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mariadbconnector.pyi @@ -0,0 +1,36 @@ +from typing import Any + +from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext + +mariadb_cpy_minimum_version: Any + +class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext): + def create_server_side_cursor(self): ... + def create_default_cursor(self): ... + +class MySQLCompiler_mariadbconnector(MySQLCompiler): ... + +class MySQLDialect_mariadbconnector(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + encoding: str + convert_unicode: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + supports_server_side_cursors: bool + paramstyle: str + def __init__(self, **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def is_disconnect(self, e, connection, cursor): ... + def create_connect_args(self, url): ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + +dialect = MySQLDialect_mariadbconnector diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi new file mode 100644 index 000000000000..df0a63589b1c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqlconnector.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from ...util import memoized_property +from .base import BIT, MySQLCompiler, MySQLDialect, MySQLIdentifierPreparer + +class MySQLCompiler_mysqlconnector(MySQLCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + def post_process_text(self, text): ... + def escape_literal_column(self, text): ... + +class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer): ... + +class _myconnpyBIT(BIT): + def result_processor(self, dialect, coltype) -> None: ... + +class MySQLDialect_mysqlconnector(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_binds: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + preparer: Any + colspecs: Any + def __init__(self, *arg, **kw) -> None: ... + @property + def description_encoding(self): ... + @memoized_property + def supports_unicode_statements(self): ... + @classmethod + def dbapi(cls): ... + def do_ping(self, dbapi_connection): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_mysqlconnector diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi new file mode 100644 index 000000000000..bb41d161da2f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/mysqldb.pyi @@ -0,0 +1,32 @@ +from typing import Any + +from ...util import memoized_property +from .base import MySQLCompiler, MySQLDialect, MySQLExecutionContext + +class MySQLExecutionContext_mysqldb(MySQLExecutionContext): + @property + def rowcount(self): ... + +class MySQLCompiler_mysqldb(MySQLCompiler): ... + +class MySQLDialect_mysqldb(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_native_decimal: bool + default_paramstyle: str + statement_compiler: Any + preparer: Any + def __init__(self, **kwargs) -> None: ... + @memoized_property + def supports_server_side_cursors(self): ... + @classmethod + def dbapi(cls): ... + def on_connect(self): ... + def do_ping(self, dbapi_connection): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def create_connect_args(self, url, _translate_args: Any | None = ...): ... + +dialect = MySQLDialect_mysqldb diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi new file mode 100644 index 000000000000..40d1d6919ccb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/oursql.pyi @@ -0,0 +1,39 @@ +from typing import Any + +from .base import BIT, MySQLDialect, MySQLExecutionContext + +class _oursqlBIT(BIT): + def result_processor(self, dialect, coltype) -> None: ... + +class MySQLExecutionContext_oursql(MySQLExecutionContext): + @property + def plain_query(self): ... + +class MySQLDialect_oursql(MySQLDialect): + driver: str + supports_statement_cache: bool + supports_unicode_binds: bool + supports_unicode_statements: bool + supports_native_decimal: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def do_execute(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_begin(self, connection) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def get_table_options(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def initialize(self, connection): ... + def is_disconnect(self, e, connection, cursor): ... + def create_connect_args(self, url): ... + +dialect = MySQLDialect_oursql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/provision.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/provision.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi new file mode 100644 index 000000000000..a4f6cb64fc48 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pymysql.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from ...util import memoized_property +from .mysqldb import MySQLDialect_mysqldb + +class MySQLDialect_pymysql(MySQLDialect_mysqldb): + driver: str + supports_statement_cache: bool + description_encoding: Any + supports_unicode_statements: bool + supports_unicode_binds: bool + @memoized_property + def supports_server_side_cursors(self): ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url, _translate_args: Any | None = ...): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = MySQLDialect_pymysql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi new file mode 100644 index 000000000000..f9363c3cc0b0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/pyodbc.pyi @@ -0,0 +1,20 @@ +from typing import Any + +from ...connectors.pyodbc import PyODBCConnector +from .base import MySQLDialect, MySQLExecutionContext +from .types import TIME + +class _pyodbcTIME(TIME): + def result_processor(self, dialect, coltype): ... + +class MySQLExecutionContext_pyodbc(MySQLExecutionContext): + def get_lastrowid(self): ... + +class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect): + supports_statement_cache: bool + colspecs: Any + supports_unicode_statements: bool + pyodbc_driver_name: str + def on_connect(self): ... + +dialect = MySQLDialect_pyodbc diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi new file mode 100644 index 000000000000..0df92ff88c10 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reflection.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class ReflectedState: + columns: Any + table_options: Any + table_name: Any + keys: Any + fk_constraints: Any + ck_constraints: Any + def __init__(self) -> None: ... + +class MySQLTableDefinitionParser: + logger: Any + dialect: Any + preparer: Any + def __init__(self, dialect, preparer) -> None: ... + def parse(self, show_create, charset): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi new file mode 100644 index 000000000000..28a741b2b19d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/reserved_words.pyi @@ -0,0 +1,4 @@ +from typing import Any + +RESERVED_WORDS_MARIADB: Any +RESERVED_WORDS_MYSQL: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi new file mode 100644 index 000000000000..fecd364f683c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/mysql/types.pyi @@ -0,0 +1,145 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +class _NumericType: + unsigned: Any + zerofill: Any + def __init__(self, unsigned: bool = ..., zerofill: bool = ..., **kw) -> None: ... + +class _FloatType(_NumericType, sqltypes.Float): + scale: Any + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + +class _IntegerType(_NumericType, sqltypes.Integer): + display_width: Any + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class _StringType(sqltypes.String): + charset: Any + ascii: Any + unicode: Any + binary: Any + national: Any + def __init__( + self, + charset: Any | None = ..., + collation: Any | None = ..., + ascii: bool = ..., + binary: bool = ..., + unicode: bool = ..., + national: bool = ..., + **kw, + ) -> None: ... + +class _MatchType(sqltypes.Float, sqltypes.MatchType): # type: ignore # incompatible with base class + def __init__(self, **kw) -> None: ... + +class NUMERIC(_NumericType, sqltypes.NUMERIC): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + +class DECIMAL(_NumericType, sqltypes.DECIMAL): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + +class DOUBLE(_FloatType): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + +class REAL(_FloatType, sqltypes.REAL): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + +class FLOAT(_FloatType, sqltypes.FLOAT): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: bool = ..., **kw) -> None: ... + def bind_processor(self, dialect) -> None: ... + +class INTEGER(_IntegerType, sqltypes.INTEGER): + __visit_name__: str + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class BIGINT(_IntegerType, sqltypes.BIGINT): + __visit_name__: str + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class MEDIUMINT(_IntegerType): + __visit_name__: str + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class TINYINT(_IntegerType): + __visit_name__: str + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class SMALLINT(_IntegerType, sqltypes.SMALLINT): + __visit_name__: str + def __init__(self, display_width: Any | None = ..., **kw) -> None: ... + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + length: Any + def __init__(self, length: Any | None = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class TIME(sqltypes.TIME): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Any | None = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class TIMESTAMP(sqltypes.TIMESTAMP): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Any | None = ...) -> None: ... + +class DATETIME(sqltypes.DATETIME): + __visit_name__: str + fsp: Any + def __init__(self, timezone: bool = ..., fsp: Any | None = ...) -> None: ... + +class YEAR(sqltypes.TypeEngine): + __visit_name__: str + display_width: Any + def __init__(self, display_width: Any | None = ...) -> None: ... + +class TEXT(_StringType, sqltypes.TEXT): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kw) -> None: ... + +class TINYTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class MEDIUMTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class LONGTEXT(_StringType): + __visit_name__: str + def __init__(self, **kwargs) -> None: ... + +class VARCHAR(_StringType, sqltypes.VARCHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class CHAR(_StringType, sqltypes.CHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class NVARCHAR(_StringType, sqltypes.NVARCHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class NCHAR(_StringType, sqltypes.NCHAR): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class TINYBLOB(sqltypes._Binary): + __visit_name__: str + +class MEDIUMBLOB(sqltypes._Binary): + __visit_name__: str + +class LONGBLOB(sqltypes._Binary): + __visit_name__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi new file mode 100644 index 000000000000..3cc1662ff2f1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/__init__.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from .base import ( + BFILE as BFILE, + BINARY_DOUBLE as BINARY_DOUBLE, + BINARY_FLOAT as BINARY_FLOAT, + BLOB as BLOB, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DOUBLE_PRECISION as DOUBLE_PRECISION, + FLOAT as FLOAT, + INTERVAL as INTERVAL, + LONG as LONG, + NCHAR as NCHAR, + NCLOB as NCLOB, + NUMBER as NUMBER, + NVARCHAR as NVARCHAR, + NVARCHAR2 as NVARCHAR2, + RAW as RAW, + ROWID as ROWID, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, + VARCHAR2 as VARCHAR2, +) + +__all__ = ( + "VARCHAR", + "NVARCHAR", + "CHAR", + "NCHAR", + "DATE", + "NUMBER", + "BLOB", + "BFILE", + "CLOB", + "NCLOB", + "TIMESTAMP", + "RAW", + "FLOAT", + "DOUBLE_PRECISION", + "BINARY_DOUBLE", + "BINARY_FLOAT", + "LONG", + "dialect", + "INTERVAL", + "VARCHAR2", + "NVARCHAR2", + "ROWID", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi new file mode 100644 index 000000000000..6b9668957b2b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/base.pyi @@ -0,0 +1,219 @@ +from typing import Any + +from sqlalchemy.sql import ClauseElement + +from ...engine import default +from ...sql import compiler, sqltypes +from ...types import ( + BLOB as BLOB, + CHAR as CHAR, + CLOB as CLOB, + FLOAT as FLOAT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NVARCHAR as NVARCHAR, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) + +RESERVED_WORDS: Any +NO_ARG_FNS: Any + +class RAW(sqltypes._Binary): + __visit_name__: str + +OracleRaw = RAW + +class NCLOB(sqltypes.Text): + __visit_name__: str + +class VARCHAR2(VARCHAR): + __visit_name__: str + +NVARCHAR2 = NVARCHAR + +class NUMBER(sqltypes.Numeric, sqltypes.Integer): + __visit_name__: str + def __init__(self, precision: Any | None = ..., scale: Any | None = ..., asdecimal: Any | None = ...) -> None: ... + def adapt(self, impltype): ... + +class DOUBLE_PRECISION(sqltypes.Float): + __visit_name__: str + +class BINARY_DOUBLE(sqltypes.Float): + __visit_name__: str + +class BINARY_FLOAT(sqltypes.Float): + __visit_name__: str + +class BFILE(sqltypes.LargeBinary): + __visit_name__: str + +class LONG(sqltypes.Text): + __visit_name__: str + +class DATE(sqltypes.DateTime): + __visit_name__: str + +class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): + __visit_name__: str + day_precision: Any + second_precision: Any + def __init__(self, day_precision: Any | None = ..., second_precision: Any | None = ...) -> None: ... + def as_generic(self, allow_nulltype: bool = ...): ... + def coerce_compared_value(self, op, value): ... + +class ROWID(sqltypes.TypeEngine): + __visit_name__: str + +class _OracleBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +colspecs: Any +ischema_names: Any + +class OracleTypeCompiler(compiler.GenericTypeCompiler): + def visit_datetime(self, type_, **kw): ... + def visit_float(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_INTERVAL(self, type_, **kw): ... + def visit_LONG(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_DOUBLE_PRECISION(self, type_, **kw): ... + def visit_BINARY_DOUBLE(self, type_, **kw): ... + def visit_BINARY_FLOAT(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_NUMBER(self, type_, **kw): ... + def visit_string(self, type_, **kw): ... + def visit_VARCHAR2(self, type_, **kw): ... + def visit_NVARCHAR2(self, type_, **kw): ... + visit_NVARCHAR: Any + def visit_VARCHAR(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_big_integer(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_RAW(self, type_, **kw): ... + def visit_ROWID(self, type_, **kw): ... + +class OracleCompiler(compiler.SQLCompiler): + compound_keywords: Any + def __init__(self, *args, **kwargs) -> None: ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_now_func(self, fn, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def get_cte_preamble(self, recursive): ... + def get_select_hint_text(self, byfroms): ... + def function_argspec(self, fn, **kw): ... + def visit_function(self, func, **kw): ... + def visit_table_valued_column(self, element, **kw): ... + def default_from(self): ... + def visit_join(self, join, from_linter: Any | None = ..., **kwargs): ... # type: ignore[override] + def visit_outer_join_column(self, vc, **kw): ... + def visit_sequence(self, seq, **kw): ... + def get_render_as_alias_suffix(self, alias_name_text): ... + has_out_parameters: bool + def returning_clause(self, stmt, returning_cols): ... + def translate_select_structure(self, select_stmt, **kwargs): ... + def limit_clause(self, select, **kw): ... + def visit_empty_set_expr(self, type_): ... + def for_update_clause(self, select, **kw): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class OracleDDLCompiler(compiler.DDLCompiler): + def define_constraint_cascades(self, constraint): ... + def visit_drop_table_comment(self, drop): ... + def visit_create_index(self, create): ... + def post_create_table(self, table): ... + def get_identity_options(self, identity_options): ... + def visit_computed_column(self, generated): ... + def visit_identity_column(self, identity, **kw): ... + +class OracleIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + illegal_initial_characters: Any + def format_savepoint(self, savepoint): ... + +class OracleExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + +class OracleDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_alter: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + max_identifier_length: int + supports_simple_order_by_label: bool + cte_follows_insert: bool + supports_sequences: bool + sequences_optional: bool + postfetch_lastrowid: bool + default_paramstyle: str + colspecs: Any + ischema_names: Any + requires_name_normalize: bool + supports_comments: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_identity_columns: bool + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + reflection_options: Any + construct_arguments: Any + use_ansi: Any + optimize_limits: Any + exclude_tablespaces: Any + def __init__( + self, + use_ansi: bool = ..., + optimize_limits: bool = ..., + use_binds_for_limits: Any | None = ..., + use_nchar_for_unicode: bool = ..., + exclude_tablespaces=..., + **kwargs, + ) -> None: ... + implicit_returning: Any + def initialize(self, connection) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def get_isolation_level(self, connection) -> None: ... + def get_default_isolation_level(self, dbapi_conn): ... + def set_isolation_level(self, connection, level) -> None: ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Any | None = ...): ... # type: ignore[override] + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_temp_table_names(self, connection, **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def get_sequence_names(self, connection, schema: Any | None = ..., **kw): ... + def get_table_options(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_table_comment( + self, connection, table_name, schema: Any | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_indexes( + self, connection, table_name, schema: Any | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_view_definition( + self, connection, view_name, schema: Any | None = ..., resolve_synonyms: bool = ..., dblink: str = ..., **kw + ): ... + def get_check_constraints(self, connection, table_name, schema: Any | None = ..., include_all: bool = ..., **kw): ... + +class _OuterJoinColumn(ClauseElement): + __visit_name__: str + column: Any + def __init__(self, column) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi new file mode 100644 index 000000000000..05f26b87b3ee --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/cx_oracle.pyi @@ -0,0 +1,127 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from . import base as oracle +from .base import OracleCompiler, OracleDialect, OracleExecutionContext + +class _OracleInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class _OracleNumeric(sqltypes.Numeric): + is_number: bool + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype) -> None: ... + +class _OracleBinaryFloat(_OracleNumeric): + def get_dbapi_type(self, dbapi): ... + +class _OracleBINARY_FLOAT(_OracleBinaryFloat, oracle.BINARY_FLOAT): ... +class _OracleBINARY_DOUBLE(_OracleBinaryFloat, oracle.BINARY_DOUBLE): ... + +class _OracleNUMBER(_OracleNumeric): + is_number: bool + +class _OracleDate(sqltypes.Date): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _OracleChar(sqltypes.CHAR): + def get_dbapi_type(self, dbapi): ... + +class _OracleNChar(sqltypes.NCHAR): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeStringNCHAR(oracle.NVARCHAR2): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeStringCHAR(sqltypes.Unicode): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeTextNCLOB(oracle.NCLOB): + def get_dbapi_type(self, dbapi): ... + +class _OracleUnicodeTextCLOB(sqltypes.UnicodeText): + def get_dbapi_type(self, dbapi): ... + +class _OracleText(sqltypes.Text): + def get_dbapi_type(self, dbapi): ... + +class _OracleLong(oracle.LONG): + def get_dbapi_type(self, dbapi): ... + +class _OracleString(sqltypes.String): ... + +class _OracleEnum(sqltypes.Enum): + def bind_processor(self, dialect): ... + +class _OracleBinary(sqltypes.LargeBinary): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _OracleInterval(oracle.INTERVAL): + def get_dbapi_type(self, dbapi): ... + +class _OracleRaw(oracle.RAW): ... + +class _OracleRowid(oracle.ROWID): + def get_dbapi_type(self, dbapi): ... + +class OracleCompiler_cx_oracle(OracleCompiler): + def bindparam_string(self, name, **kw): ... + +class OracleExecutionContext_cx_oracle(OracleExecutionContext): + out_parameters: Any + include_set_input_sizes: Any + def pre_exec(self) -> None: ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + def create_cursor(self): ... + def get_out_parameter_values(self, out_param_names): ... + +class OracleDialect_cx_oracle(OracleDialect): + supports_statement_cache: bool + statement_compiler: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + use_setinputsizes: bool + driver: str + colspecs: Any + execute_sequence_format: Any + arraysize: Any + encoding_errors: Any + auto_convert_lobs: Any + coerce_to_unicode: Any + coerce_to_decimal: Any + cx_oracle_ver: Any + def __init__( + self, + auto_convert_lobs: bool = ..., + coerce_to_unicode: bool = ..., + coerce_to_decimal: bool = ..., + arraysize: int = ..., + encoding_errors: Any | None = ..., + threaded: Any | None = ..., + **kwargs, + ): ... + @classmethod + def dbapi(cls): ... + def initialize(self, connection) -> None: ... + def get_isolation_level(self, connection): ... + def set_isolation_level(self, connection, level) -> None: ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def create_xid(self): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def do_recover_twophase(self, connection) -> None: ... + +dialect = OracleDialect_cx_oracle diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/provision.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/oracle/provision.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi new file mode 100644 index 000000000000..67a7995dd236 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/__init__.pyi @@ -0,0 +1,98 @@ +import typing + +from .array import ARRAY as ARRAY, All as All, Any as Any, array as array +from .base import ( + BIGINT as BIGINT, + BIT as BIT, + BOOLEAN as BOOLEAN, + BYTEA as BYTEA, + CHAR as CHAR, + CIDR as CIDR, + DATE as DATE, + DOUBLE_PRECISION as DOUBLE_PRECISION, + ENUM as ENUM, + FLOAT as FLOAT, + INET as INET, + INTEGER as INTEGER, + INTERVAL as INTERVAL, + MACADDR as MACADDR, + MONEY as MONEY, + NUMERIC as NUMERIC, + OID as OID, + REAL as REAL, + REGCLASS as REGCLASS, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + TSVECTOR as TSVECTOR, + UUID as UUID, + VARCHAR as VARCHAR, + CreateEnumType as CreateEnumType, + DropEnumType as DropEnumType, +) +from .dml import Insert as Insert, insert as insert +from .ext import ExcludeConstraint as ExcludeConstraint, aggregate_order_by as aggregate_order_by, array_agg as array_agg +from .hstore import HSTORE as HSTORE, hstore as hstore +from .json import JSON as JSON, JSONB as JSONB +from .ranges import ( + DATERANGE as DATERANGE, + INT4RANGE as INT4RANGE, + INT8RANGE as INT8RANGE, + NUMRANGE as NUMRANGE, + TSRANGE as TSRANGE, + TSTZRANGE as TSTZRANGE, +) + +__all__ = ( + "INTEGER", + "BIGINT", + "SMALLINT", + "VARCHAR", + "CHAR", + "TEXT", + "NUMERIC", + "FLOAT", + "REAL", + "INET", + "CIDR", + "UUID", + "BIT", + "MACADDR", + "MONEY", + "OID", + "REGCLASS", + "DOUBLE_PRECISION", + "TIMESTAMP", + "TIME", + "DATE", + "BYTEA", + "BOOLEAN", + "INTERVAL", + "ARRAY", + "ENUM", + "dialect", + "array", + "HSTORE", + "hstore", + "INT4RANGE", + "INT8RANGE", + "NUMRANGE", + "DATERANGE", + "TSVECTOR", + "TSRANGE", + "TSTZRANGE", + "JSON", + "JSONB", + "Any", + "All", + "DropEnumType", + "CreateEnumType", + "ExcludeConstraint", + "aggregate_order_by", + "array_agg", + "insert", + "Insert", +) + +dialect: typing.Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi new file mode 100644 index 000000000000..ff186142a04e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/array.pyi @@ -0,0 +1,40 @@ +from typing import Any as _Any + +import sqlalchemy.types as sqltypes + +from ...sql import expression + +def Any(other, arrexpr, operator=...): ... +def All(other, arrexpr, operator=...): ... + +class array(expression.ClauseList, expression.ColumnElement[_Any]): + __visit_name__: str + stringify_dialect: str + inherit_cache: bool + type: _Any + def __init__(self, clauses, **kw) -> None: ... + def self_group(self, against: _Any | None = ...): ... + +CONTAINS: _Any +CONTAINED_BY: _Any +OVERLAP: _Any + +class ARRAY(sqltypes.ARRAY): + class Comparator(sqltypes.ARRAY.Comparator): + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + def overlap(self, other): ... + comparator_factory: _Any + item_type: _Any + as_tuple: _Any + dimensions: _Any + zero_indexes: _Any + def __init__(self, item_type, as_tuple: bool = ..., dimensions: _Any | None = ..., zero_indexes: bool = ...) -> None: ... + @property + def hashable(self): ... + @property + def python_type(self): ... + def compare_values(self, x, y): ... + def bind_expression(self, bindvalue): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi new file mode 100644 index 000000000000..0645aa783294 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/asyncpg.pyi @@ -0,0 +1,202 @@ +from typing import Any + +from ...engine import AdaptedConnection +from ...sql import sqltypes +from . import json +from .base import ENUM, INTERVAL, OID, REGCLASS, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer + +class AsyncpgTime(sqltypes.Time): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgDate(sqltypes.Date): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgDateTime(sqltypes.DateTime): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +class AsyncPgInterval(INTERVAL): + def get_dbapi_type(self, dbapi): ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + +class AsyncPgEnum(ENUM): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgBigInteger(sqltypes.BigInteger): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSON(json.JSON): + def get_dbapi_type(self, dbapi): ... + def result_processor(self, dialect, coltype) -> None: ... + +class AsyncpgJSONB(json.JSONB): + def get_dbapi_type(self, dbapi): ... + def result_processor(self, dialect, coltype) -> None: ... + +class AsyncpgJSONIndexType(sqltypes.JSON.JSONIndexType): + def get_dbapi_type(self, dbapi) -> None: ... + +class AsyncpgJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgJSONPathType(json.JSONPathType): + def bind_processor(self, dialect): ... + +class AsyncpgUUID(UUID): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class AsyncpgNumeric(sqltypes.Numeric): + def get_dbapi_type(self, dbapi): ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class AsyncpgFloat(AsyncpgNumeric): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgREGCLASS(REGCLASS): + def get_dbapi_type(self, dbapi): ... + +class AsyncpgOID(OID): + def get_dbapi_type(self, dbapi): ... + +class PGExecutionContext_asyncpg(PGExecutionContext): + def handle_dbapi_exception(self, e) -> None: ... + exclude_set_input_sizes: Any + def pre_exec(self) -> None: ... + def create_server_side_cursor(self): ... + +class PGCompiler_asyncpg(PGCompiler): ... +class PGIdentifierPreparer_asyncpg(PGIdentifierPreparer): ... + +class AsyncAdapt_asyncpg_cursor: + server_side: bool + description: Any + arraysize: int + rowcount: int + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def execute(self, operation, parameters: Any | None = ...) -> None: ... + def executemany(self, operation, seq_of_parameters): ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor): + server_side: bool + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + def __aiter__(self): ... + async def __anext__(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + def executemany(self, operation, seq_of_parameters) -> None: ... + +class AsyncAdapt_asyncpg_connection(AdaptedConnection): + await_: Any + dbapi: Any + isolation_level: str + readonly: bool + deferrable: bool + def __init__(self, dbapi, connection, prepared_statement_cache_size: int = ...) -> None: ... + @property + def autocommit(self): ... + @autocommit.setter + def autocommit(self, value) -> None: ... + def set_isolation_level(self, level) -> None: ... + def cursor(self, server_side: bool = ...): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_asyncpg_connection(AsyncAdapt_asyncpg_connection): + await_: Any + +class AsyncAdapt_asyncpg_dbapi: + asyncpg: Any + paramstyle: str + def __init__(self, asyncpg) -> None: ... + def connect(self, *arg, **kw): ... + + class Error(Exception): ... + class Warning(Exception): ... + class InterfaceError(Error): ... + class DatabaseError(Error): ... + class InternalError(DatabaseError): ... + class OperationalError(DatabaseError): ... + class ProgrammingError(DatabaseError): ... + class IntegrityError(DatabaseError): ... + class DataError(DatabaseError): ... + class NotSupportedError(DatabaseError): ... + class InternalServerError(InternalError): ... + + class InvalidCachedStatementError(NotSupportedError): + def __init__(self, message) -> None: ... + + def Binary(self, value): ... + STRING: Any + TIMESTAMP: Any + TIMESTAMP_W_TZ: Any + TIME: Any + DATE: Any + INTERVAL: Any + NUMBER: Any + FLOAT: Any + BOOLEAN: Any + INTEGER: Any + BIGINTEGER: Any + BYTES: Any + DECIMAL: Any + JSON: Any + JSONB: Any + ENUM: Any + UUID: Any + BYTEA: Any + DATETIME: Any + BINARY: Any + +class PGDialect_asyncpg(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_server_side_cursors: bool + supports_unicode_binds: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + use_setinputsizes: bool + use_native_uuid: bool + colspecs: Any + is_async: bool + @classmethod + def dbapi(cls): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def create_connect_args(self, url): ... + @classmethod + def get_pool_class(cls, url): ... + def is_disconnect(self, e, connection, cursor): ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + async def setup_asyncpg_json_codec(self, conn): ... + async def setup_asyncpg_jsonb_codec(self, conn): ... + def on_connect(self): ... + def get_driver_connection(self, connection): ... + +dialect = PGDialect_asyncpg diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi new file mode 100644 index 000000000000..a2b167315ae9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/base.pyi @@ -0,0 +1,302 @@ +from typing import Any + +from ...engine import characteristics, default, reflection +from ...schema import _CreateDropBase +from ...sql import compiler, elements, sqltypes +from ...sql.ddl import DDLBase +from ...types import ( + BIGINT as BIGINT, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + VARCHAR as VARCHAR, +) + +IDX_USING: Any +AUTOCOMMIT_REGEXP: Any +RESERVED_WORDS: Any + +class BYTEA(sqltypes.LargeBinary): + __visit_name__: str + +class DOUBLE_PRECISION(sqltypes.Float): + __visit_name__: str + +class INET(sqltypes.TypeEngine): + __visit_name__: str + +PGInet = INET + +class CIDR(sqltypes.TypeEngine): + __visit_name__: str + +PGCidr = CIDR + +class MACADDR(sqltypes.TypeEngine): + __visit_name__: str + +PGMacAddr = MACADDR + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class OID(sqltypes.TypeEngine): + __visit_name__: str + +class REGCLASS(sqltypes.TypeEngine): + __visit_name__: str + +class TIMESTAMP(sqltypes.TIMESTAMP): + precision: Any + def __init__(self, timezone: bool = ..., precision: Any | None = ...) -> None: ... + +class TIME(sqltypes.TIME): + precision: Any + def __init__(self, timezone: bool = ..., precision: Any | None = ...) -> None: ... + +class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval): + __visit_name__: str + native: bool + precision: Any + fields: Any + def __init__(self, precision: Any | None = ..., fields: Any | None = ...) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + def as_generic(self, allow_nulltype: bool = ...): ... + @property + def python_type(self): ... + def coerce_compared_value(self, op, value): ... + +PGInterval = INTERVAL + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + length: Any + varying: Any + def __init__(self, length: Any | None = ..., varying: bool = ...) -> None: ... + +PGBit = BIT + +class UUID(sqltypes.TypeEngine): + __visit_name__: str + as_uuid: Any + def __init__(self, as_uuid: bool = ...) -> None: ... + def coerce_compared_value(self, op, value): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +PGUuid = UUID + +class TSVECTOR(sqltypes.TypeEngine): + __visit_name__: str + +class ENUM(sqltypes.NativeForEmulated, sqltypes.Enum): # type: ignore # base classes incompatible + native_enum: bool + create_type: Any + def __init__(self, *enums, **kw) -> None: ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + def create(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + + class EnumGenerator(DDLBase): + checkfirst: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., **kwargs) -> None: ... + def visit_enum(self, enum) -> None: ... + + class EnumDropper(DDLBase): + checkfirst: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., **kwargs) -> None: ... + def visit_enum(self, enum) -> None: ... + +class _ColonCast(elements.Cast): + __visit_name__: str + type: Any + clause: Any + typeclause: Any + def __init__(self, expression, type_) -> None: ... + +colspecs: Any +ischema_names: Any + +class PGCompiler(compiler.SQLCompiler): + def visit_colon_cast(self, element, **kw): ... + def visit_array(self, element, **kw): ... + def visit_slice(self, element, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, _cast_applied: bool = ..., **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, _cast_applied: bool = ..., **kw): ... + def visit_getitem_binary(self, binary, operator, **kw): ... + def visit_aggregate_order_by(self, element, **kw): ... + def visit_match_op_binary(self, binary, operator, **kw): ... + def visit_ilike_op_binary(self, binary, operator, **kw): ... + def visit_not_ilike_op_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_expr(self, element_types): ... + def render_literal_value(self, value, type_): ... + def visit_sequence(self, seq, **kw): ... + def limit_clause(self, select, **kw): ... + def format_from_hint_text(self, sqltext, table, hint, iscrud): ... + def get_select_precolumns(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + def visit_substring_func(self, func, **kw): ... + def visit_on_conflict_do_nothing(self, on_conflict, **kw): ... + def visit_on_conflict_do_update(self, on_conflict, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + def fetch_clause(self, select, **kw): ... + +class PGDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_check_constraint(self, constraint): ... + def visit_drop_table_comment(self, drop): ... + def visit_create_enum_type(self, create): ... + def visit_drop_enum_type(self, drop): ... + def visit_create_index(self, create): ... + def visit_drop_index(self, drop): ... + def visit_exclude_constraint(self, constraint, **kw): ... + def post_create_table(self, table): ... + def visit_computed_column(self, generated): ... + def visit_create_sequence(self, create, **kw): ... + +class PGTypeCompiler(compiler.GenericTypeCompiler): + def visit_TSVECTOR(self, type_, **kw): ... + def visit_INET(self, type_, **kw): ... + def visit_CIDR(self, type_, **kw): ... + def visit_MACADDR(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_OID(self, type_, **kw): ... + def visit_REGCLASS(self, type_, **kw): ... + def visit_FLOAT(self, type_, **kw): ... + def visit_DOUBLE_PRECISION(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_HSTORE(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + def visit_JSONB(self, type_, **kw): ... + def visit_INT4RANGE(self, type_, **kw): ... + def visit_INT8RANGE(self, type_, **kw): ... + def visit_NUMRANGE(self, type_, **kw): ... + def visit_DATERANGE(self, type_, **kw): ... + def visit_TSRANGE(self, type_, **kw): ... + def visit_TSTZRANGE(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_ENUM(self, type_, identifier_preparer: Any | None = ..., **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_INTERVAL(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_UUID(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_BYTEA(self, type_, **kw): ... + def visit_ARRAY(self, type_, **kw): ... + +class PGIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + def format_type(self, type_, use_schema: bool = ...): ... + +class PGInspector(reflection.Inspector): + def get_table_oid(self, table_name, schema: Any | None = ...): ... + def get_enums(self, schema: Any | None = ...): ... + def get_foreign_table_names(self, schema: Any | None = ...): ... + def get_view_names(self, schema: Any | None = ..., include=...): ... + +class CreateEnumType(_CreateDropBase): + __visit_name__: str + +class DropEnumType(_CreateDropBase): + __visit_name__: str + +class PGExecutionContext(default.DefaultExecutionContext): + def fire_sequence(self, seq, type_): ... + def get_insert_default(self, column): ... + def should_autocommit_text(self, statement): ... + +class PGReadOnlyConnectionCharacteristic(characteristics.ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... + +class PGDeferrableConnectionCharacteristic(characteristics.ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... + +class PGDialect(default.DefaultDialect): + name: str + supports_statement_cache: bool + supports_alter: bool + max_identifier_length: int + supports_sane_rowcount: bool + supports_native_enum: bool + supports_native_boolean: bool + supports_smallserial: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + postfetch_lastrowid: bool + supports_comments: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_multivalues_insert: bool + supports_identity_columns: bool + default_paramstyle: str + ischema_names: Any + colspecs: Any + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + inspector: Any + isolation_level: Any + implicit_returning: bool + full_returning: bool + connection_characteristics: Any + construct_arguments: Any + reflection_options: Any + def __init__( + self, isolation_level: Any | None = ..., json_serializer: Any | None = ..., json_deserializer: Any | None = ..., **kwargs + ) -> None: ... + def initialize(self, connection) -> None: ... + def on_connect(self): ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection) -> None: ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def has_schema(self, connection, schema): ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def has_sequence(self, connection, sequence_name, schema: Any | None = ...): ... # type: ignore[override] + def has_type(self, connection, type_name, schema: Any | None = ...): ... + def get_table_oid(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., include=..., **kw): ... + def get_sequence_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys( + self, connection, table_name, schema: Any | None = ..., postgresql_ignore_search_path: bool = ..., **kw + ): ... + def get_indexes(self, connection, table_name, schema, **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_table_comment(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi new file mode 100644 index 000000000000..cfa9b38b3dad --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/dml.pyi @@ -0,0 +1,47 @@ +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @memoized_property + def excluded(self): ... + def on_conflict_do_update( + self, + constraint: Any | None = ..., + index_elements: Any | None = ..., + index_where: Any | None = ..., + set_: Any | None = ..., + where: Any | None = ..., + ) -> None: ... + def on_conflict_do_nothing( + self, constraint: Any | None = ..., index_elements: Any | None = ..., index_where: Any | None = ... + ) -> None: ... + +insert: Any + +class OnConflictClause(ClauseElement): + stringify_dialect: str + constraint_target: Any + inferred_target_elements: Any + inferred_target_whereclause: Any + def __init__(self, constraint: Any | None = ..., index_elements: Any | None = ..., index_where: Any | None = ...) -> None: ... + +class OnConflictDoNothing(OnConflictClause): + __visit_name__: str + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__: str + update_values_to_set: Any + update_whereclause: Any + def __init__( + self, + constraint: Any | None = ..., + index_elements: Any | None = ..., + index_where: Any | None = ..., + set_: Any | None = ..., + where: Any | None = ..., + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi new file mode 100644 index 000000000000..66fd97542c54 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ext.pyi @@ -0,0 +1,27 @@ +from typing import Any + +from ...sql import expression +from ...sql.schema import ColumnCollectionConstraint + +class aggregate_order_by(expression.ColumnElement[Any]): + __visit_name__: str + stringify_dialect: str + inherit_cache: bool + target: Any + type: Any + order_by: Any + def __init__(self, target, *order_by) -> None: ... + def self_group(self, against: Any | None = ...): ... + def get_children(self, **kwargs): ... + +class ExcludeConstraint(ColumnCollectionConstraint): + __visit_name__: str + where: Any + inherit_cache: bool + create_drop_stringify_dialect: str + operators: Any + using: Any + ops: Any + def __init__(self, *elements, **kw) -> None: ... + +def array_agg(*arg, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi new file mode 100644 index 000000000000..df92db4558c1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/hstore.pyi @@ -0,0 +1,68 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...sql import functions as sqlfunc + +class HSTORE(sqltypes.Indexable, sqltypes.Concatenable, sqltypes.TypeEngine): + __visit_name__: str + hashable: bool + text_type: Any + def __init__(self, text_type: Any | None = ...) -> None: ... + + class Comparator(sqltypes.Indexable.Comparator, sqltypes.Concatenable.Comparator): + def has_key(self, other): ... + def has_all(self, other): ... + def has_any(self, other): ... + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + def defined(self, key): ... + def delete(self, key): ... + def slice(self, array): ... + def keys(self): ... + def vals(self): ... + def array(self): ... + def matrix(self): ... + comparator_factory: Any + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class hstore(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreDefinedFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreDeleteFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreSliceFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreKeysFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreValsFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreArrayFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool + +class _HStoreMatrixFunction(sqlfunc.GenericFunction): + type: Any + name: str + inherit_cache: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi new file mode 100644 index 000000000000..fe4c63d39d34 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/json.pyi @@ -0,0 +1,27 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +class JSONPathType(sqltypes.JSON.JSONPathType): + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSON(sqltypes.JSON): + astext_type: Any + def __init__(self, none_as_null: bool = ..., astext_type: Any | None = ...) -> None: ... + + class Comparator(sqltypes.JSON.Comparator): + @property + def astext(self): ... + comparator_factory: Any + +class JSONB(JSON): + __visit_name__: str + + class Comparator(JSON.Comparator): + def has_key(self, other): ... + def has_all(self, other): ... + def has_any(self, other): ... + def contains(self, other, **kwargs): ... + def contained_by(self, other): ... + comparator_factory: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi new file mode 100644 index 000000000000..fc60c10383d7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pg8000.pyi @@ -0,0 +1,134 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from .array import ARRAY as PGARRAY +from .base import ENUM, INTERVAL, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer +from .json import JSON, JSONB, JSONPathType + +class _PGNumeric(sqltypes.Numeric): + def result_processor(self, dialect, coltype): ... + +class _PGNumericNoBind(_PGNumeric): + def bind_processor(self, dialect) -> None: ... + +class _PGJSON(JSON): + def result_processor(self, dialect, coltype) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class _PGJSONB(JSONB): + def result_processor(self, dialect, coltype) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class _PGJSONIndexType(sqltypes.JSON.JSONIndexType): + def get_dbapi_type(self, dbapi) -> None: ... + +class _PGJSONIntIndexType(sqltypes.JSON.JSONIntIndexType): + def get_dbapi_type(self, dbapi): ... + +class _PGJSONStrIndexType(sqltypes.JSON.JSONStrIndexType): + def get_dbapi_type(self, dbapi): ... + +class _PGJSONPathType(JSONPathType): + def get_dbapi_type(self, dbapi): ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGEnum(ENUM): + def get_dbapi_type(self, dbapi): ... + +class _PGInterval(INTERVAL): + def get_dbapi_type(self, dbapi): ... + @classmethod + def adapt_emulated_to_native(cls, interval, **kw): ... + +class _PGTimeStamp(sqltypes.DateTime): + def get_dbapi_type(self, dbapi): ... + +class _PGTime(sqltypes.Time): + def get_dbapi_type(self, dbapi): ... + +class _PGInteger(sqltypes.Integer): + def get_dbapi_type(self, dbapi): ... + +class _PGSmallInteger(sqltypes.SmallInteger): + def get_dbapi_type(self, dbapi): ... + +class _PGNullType(sqltypes.NullType): + def get_dbapi_type(self, dbapi): ... + +class _PGBigInteger(sqltypes.BigInteger): + def get_dbapi_type(self, dbapi): ... + +class _PGBoolean(sqltypes.Boolean): + def get_dbapi_type(self, dbapi): ... + +class _PGARRAY(PGARRAY): + def bind_expression(self, bindvalue): ... + +class PGExecutionContext_pg8000(PGExecutionContext): + def create_server_side_cursor(self): ... + def pre_exec(self) -> None: ... + +class ServerSideCursor: + server_side: bool + ident: Any + cursor: Any + def __init__(self, cursor, ident) -> None: ... + @property + def connection(self): ... + @property + def rowcount(self): ... + @property + def description(self): ... + def execute(self, operation, args=..., stream: Any | None = ...): ... + def executemany(self, operation, param_sets): ... + def fetchone(self): ... + def fetchmany(self, num: Any | None = ...): ... + def fetchall(self): ... + def close(self) -> None: ... + def setinputsizes(self, *sizes) -> None: ... + def setoutputsize(self, size, column: Any | None = ...) -> None: ... + +class PGCompiler_pg8000(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + +class PGIdentifierPreparer_pg8000(PGIdentifierPreparer): + def __init__(self, *args, **kwargs) -> None: ... + +class PGDialect_pg8000(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + supports_server_side_cursors: bool + use_setinputsizes: bool + description_encoding: Any + colspecs: Any + client_encoding: Any + def __init__(self, client_encoding: Any | None = ..., **kwargs) -> None: ... + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def set_client_encoding(self, connection, client_encoding) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection): ... + def on_connect(self): ... + +dialect = PGDialect_pg8000 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/provision.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/compat.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/provision.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi new file mode 100644 index 000000000000..7a5fa9941133 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2.pyi @@ -0,0 +1,95 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from .array import ARRAY as PGARRAY +from .base import ENUM, UUID, PGCompiler, PGDialect, PGExecutionContext, PGIdentifierPreparer +from .hstore import HSTORE +from .json import JSON, JSONB + +logger: Any + +class _PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _PGEnum(ENUM): + def result_processor(self, dialect, coltype): ... + +class _PGHStore(HSTORE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGARRAY(PGARRAY): + def bind_expression(self, bindvalue): ... + +class _PGJSON(JSON): + def result_processor(self, dialect, coltype) -> None: ... + +class _PGJSONB(JSONB): + def result_processor(self, dialect, coltype) -> None: ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class PGExecutionContext_psycopg2(PGExecutionContext): + def create_server_side_cursor(self): ... + cursor_fetch_strategy: Any + def post_exec(self) -> None: ... + +class PGCompiler_psycopg2(PGCompiler): ... +class PGIdentifierPreparer_psycopg2(PGIdentifierPreparer): ... + +EXECUTEMANY_PLAIN: Any +EXECUTEMANY_BATCH: Any +EXECUTEMANY_VALUES: Any +EXECUTEMANY_VALUES_PLUS_BATCH: Any + +class PGDialect_psycopg2(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_server_side_cursors: bool + default_paramstyle: str + supports_sane_multi_rowcount: bool + statement_compiler: Any + preparer: Any + psycopg2_version: Any + engine_config_types: Any + colspecs: Any + use_native_unicode: Any + use_native_hstore: Any + use_native_uuid: Any + supports_unicode_binds: Any + client_encoding: Any + executemany_mode: Any + insert_executemany_returning: bool + executemany_batch_page_size: Any + executemany_values_page_size: Any + def __init__( + self, + use_native_unicode: bool = ..., + client_encoding: Any | None = ..., + use_native_hstore: bool = ..., + use_native_uuid: bool = ..., + executemany_mode: str = ..., + executemany_batch_page_size: int = ..., + executemany_values_page_size: int = ..., + **kwargs, + ) -> None: ... + def initialize(self, connection) -> None: ... + @classmethod + def dbapi(cls): ... + def set_isolation_level(self, connection, level) -> None: ... + def set_readonly(self, connection, value) -> None: ... + def get_readonly(self, connection): ... + def set_deferrable(self, connection, value) -> None: ... + def get_deferrable(self, connection): ... + def do_ping(self, dbapi_connection): ... + def on_connect(self): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_psycopg2 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi new file mode 100644 index 000000000000..4456b3294a70 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/psycopg2cffi.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from .psycopg2 import PGDialect_psycopg2 + +class PGDialect_psycopg2cffi(PGDialect_psycopg2): + driver: str + supports_unicode_statements: bool + supports_statement_cache: bool + FEATURE_VERSION_MAP: Any + @classmethod + def dbapi(cls): ... + +dialect = PGDialect_psycopg2cffi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi new file mode 100644 index 000000000000..a6f0d861b361 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pygresql.pyi @@ -0,0 +1,52 @@ +from typing import Any + +from ...types import Numeric +from .base import UUID, PGCompiler, PGDialect, PGIdentifierPreparer +from .hstore import HSTORE +from .json import JSON, JSONB + +class _PGNumeric(Numeric): + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype): ... + +class _PGHStore(HSTORE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGJSON(JSON): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGJSONB(JSONB): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGUUID(UUID): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _PGCompiler(PGCompiler): + def visit_mod_binary(self, binary, operator, **kw): ... + def post_process_text(self, text): ... + +class _PGIdentifierPreparer(PGIdentifierPreparer): ... + +class PGDialect_pygresql(PGDialect): + driver: str + supports_statement_cache: bool + statement_compiler: Any + preparer: Any + @classmethod + def dbapi(cls): ... + colspecs: Any + dbapi_version: Any + supports_unicode_statements: bool + supports_unicode_binds: bool + has_native_hstore: Any + has_native_json: Any + has_native_uuid: Any + def __init__(self, **kwargs) -> None: ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_pygresql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi new file mode 100644 index 000000000000..1b5bed220c5c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/pypostgresql.pyi @@ -0,0 +1,31 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...util import memoized_property +from .base import PGDialect, PGExecutionContext + +class PGNumeric(sqltypes.Numeric): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class PGExecutionContext_pypostgresql(PGExecutionContext): ... + +class PGDialect_pypostgresql(PGDialect): + driver: str + supports_statement_cache: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + description_encoding: Any + default_paramstyle: str + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + @memoized_property + def dbapi_exception_translation_map(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = PGDialect_pypostgresql diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi new file mode 100644 index 000000000000..35146308928b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/postgresql/ranges.pyi @@ -0,0 +1,36 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +class RangeOperators: + class comparator_factory(sqltypes.Concatenable.Comparator): + def __ne__(self, other): ... + def contains(self, other, **kw): ... + def contained_by(self, other): ... + def overlaps(self, other): ... + def strictly_left_of(self, other): ... + __lshift__: Any + def strictly_right_of(self, other): ... + __rshift__: Any + def not_extend_right_of(self, other): ... + def not_extend_left_of(self, other): ... + def adjacent_to(self, other): ... + def __add__(self, other): ... + +class INT4RANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class INT8RANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class NUMRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class DATERANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class TSRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str + +class TSTZRANGE(RangeOperators, sqltypes.TypeEngine): + __visit_name__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi new file mode 100644 index 000000000000..7dcb0cd7607e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/__init__.pyi @@ -0,0 +1,45 @@ +from typing import Any + +from .base import ( + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + JSON as JSON, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) +from .dml import Insert as Insert, insert as insert + +__all__ = ( + "BLOB", + "BOOLEAN", + "CHAR", + "DATE", + "DATETIME", + "DECIMAL", + "FLOAT", + "INTEGER", + "JSON", + "NUMERIC", + "SMALLINT", + "TEXT", + "TIME", + "TIMESTAMP", + "VARCHAR", + "REAL", + "Insert", + "insert", + "dialect", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi new file mode 100644 index 000000000000..dfc3247d5ea1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/aiosqlite.pyi @@ -0,0 +1,72 @@ +from typing import Any + +from ...engine import AdaptedConnection +from .base import SQLiteExecutionContext +from .pysqlite import SQLiteDialect_pysqlite + +class AsyncAdapt_aiosqlite_cursor: + server_side: bool + await_: Any + arraysize: int + rowcount: int + description: Any + def __init__(self, adapt_connection) -> None: ... + def close(self) -> None: ... + lastrowid: int + def execute(self, operation, parameters: Any | None = ...) -> None: ... + def executemany(self, operation, seq_of_parameters) -> None: ... + def setinputsizes(self, *inputsizes) -> None: ... + def __iter__(self): ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiosqlite_ss_cursor(AsyncAdapt_aiosqlite_cursor): + server_side: bool + def __init__(self, *arg, **kw) -> None: ... + def close(self) -> None: ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def fetchall(self): ... + +class AsyncAdapt_aiosqlite_connection(AdaptedConnection): + await_: Any + dbapi: Any + def __init__(self, dbapi, connection) -> None: ... + @property + def isolation_level(self): ... + @isolation_level.setter + def isolation_level(self, value) -> None: ... + def create_function(self, *args, **kw) -> None: ... + def cursor(self, server_side: bool = ...): ... + def execute(self, *args, **kw): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def close(self) -> None: ... + +class AsyncAdaptFallback_aiosqlite_connection(AsyncAdapt_aiosqlite_connection): + await_: Any + +class AsyncAdapt_aiosqlite_dbapi: + aiosqlite: Any + sqlite: Any + paramstyle: str + def __init__(self, aiosqlite, sqlite) -> None: ... + def connect(self, *arg, **kw): ... + +class SQLiteExecutionContext_aiosqlite(SQLiteExecutionContext): + def create_server_side_cursor(self): ... + +class SQLiteDialect_aiosqlite(SQLiteDialect_pysqlite): + driver: str + supports_statement_cache: bool + is_async: bool + supports_server_side_cursors: bool + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def is_disconnect(self, e, connection, cursor): ... + def get_driver_connection(self, connection): ... + +dialect = SQLiteDialect_aiosqlite diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi new file mode 100644 index 000000000000..31efadb6c920 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/base.pyi @@ -0,0 +1,142 @@ +from typing import Any + +import sqlalchemy.types as sqltypes + +from ...engine import default +from ...sql import compiler +from ...types import ( + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INTEGER as INTEGER, + NUMERIC as NUMERIC, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIMESTAMP as TIMESTAMP, + VARCHAR as VARCHAR, +) +from .json import JSON as JSON + +class _SQliteJson(JSON): + def result_processor(self, dialect, coltype): ... + +class _DateTimeMixin: + def __init__(self, storage_format: Any | None = ..., regexp: Any | None = ..., **kw) -> None: ... + @property + def format_is_text_affinity(self): ... + def adapt(self, cls, **kw): ... + def literal_processor(self, dialect): ... + +class DATETIME(_DateTimeMixin, sqltypes.DateTime): + def __init__(self, *args, **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class DATE(_DateTimeMixin, sqltypes.Date): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class TIME(_DateTimeMixin, sqltypes.Time): + def __init__(self, *args, **kwargs) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +colspecs: Any +ischema_names: Any + +class SQLiteCompiler(compiler.SQLCompiler): + extract_map: Any + def visit_now_func(self, fn, **kw): ... + def visit_localtimestamp_func(self, func, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_char_length_func(self, fn, **kw): ... + def visit_cast(self, cast, **kwargs): ... + def visit_extract(self, extract, **kw): ... + def limit_clause(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def visit_is_distinct_from_binary(self, binary, operator, **kw): ... + def visit_is_not_distinct_from_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_op_expr(self, type_, expand_op): ... + def visit_empty_set_expr(self, element_types): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_on_conflict_do_nothing(self, on_conflict, **kw): ... + def visit_on_conflict_do_update(self, on_conflict, **kw): ... + +class SQLiteDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_primary_key_constraint(self, constraint): ... + def visit_unique_constraint(self, constraint): ... + def visit_check_constraint(self, constraint): ... + def visit_column_check_constraint(self, constraint): ... + def visit_foreign_key_constraint(self, constraint): ... + def define_constraint_remote_table(self, constraint, table, preparer): ... + def visit_create_index(self, create, include_schema: bool = ..., include_table_schema: bool = ...): ... # type: ignore[override] + def post_create_table(self, table): ... + +class SQLiteTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_JSON(self, type_, **kw): ... + +class SQLiteIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + +class SQLiteExecutionContext(default.DefaultExecutionContext): ... + +class SQLiteDialect(default.DefaultDialect): + name: str + supports_alter: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_cast: bool + supports_multivalues_insert: bool + tuple_in_values: bool + supports_statement_cache: bool + default_paramstyle: str + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + ischema_names: Any + colspecs: Any + isolation_level: Any + construct_arguments: Any + native_datetime: Any + def __init__( + self, + isolation_level: Any | None = ..., + native_datetime: bool = ..., + json_serializer: Any | None = ..., + json_deserializer: Any | None = ..., + _json_serializer: Any | None = ..., + _json_deserializer: Any | None = ..., + **kwargs, + ) -> None: ... + def set_isolation_level(self, connection, level) -> None: ... + def get_isolation_level(self, connection): ... + def on_connect(self): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_temp_table_names(self, connection, **kw): ... + def get_temp_view_names(self, connection, **kw): ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_unique_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_check_constraints(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Any | None = ..., **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi new file mode 100644 index 000000000000..208ceca97e86 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/dml.pyi @@ -0,0 +1,35 @@ +from typing import Any + +from ...sql.dml import Insert as StandardInsert +from ...sql.elements import ClauseElement +from ...util import memoized_property + +class Insert(StandardInsert): + stringify_dialect: str + inherit_cache: bool + @memoized_property + def excluded(self): ... + def on_conflict_do_update( + self, index_elements: Any | None = ..., index_where: Any | None = ..., set_: Any | None = ..., where: Any | None = ... + ) -> None: ... + def on_conflict_do_nothing(self, index_elements: Any | None = ..., index_where: Any | None = ...) -> None: ... + +insert: Any + +class OnConflictClause(ClauseElement): + stringify_dialect: str + constraint_target: Any + inferred_target_elements: Any + inferred_target_whereclause: Any + def __init__(self, index_elements: Any | None = ..., index_where: Any | None = ...) -> None: ... + +class OnConflictDoNothing(OnConflictClause): + __visit_name__: str + +class OnConflictDoUpdate(OnConflictClause): + __visit_name__: str + update_values_to_set: Any + update_whereclause: Any + def __init__( + self, index_elements: Any | None = ..., index_where: Any | None = ..., set_: Any | None = ..., where: Any | None = ... + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi new file mode 100644 index 000000000000..2ced3beece0d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/json.pyi @@ -0,0 +1,10 @@ +from ...types import JSON as _JSON + +class JSON(_JSON): ... + +class _FormatTypeMixin: + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + +class JSONIndexType(_FormatTypeMixin, _JSON.JSONIndexType): ... +class JSONPathType(_FormatTypeMixin, _JSON.JSONPathType): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/contrib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/provision.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/contrib/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/provision.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi new file mode 100644 index 000000000000..cf2d8738ed15 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlcipher.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from .pysqlite import SQLiteDialect_pysqlite + +class SQLiteDialect_pysqlcipher(SQLiteDialect_pysqlite): + driver: str + supports_statement_cache: bool + pragmas: Any + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def on_connect_url(self, url): ... + def create_connect_args(self, url): ... + +dialect = SQLiteDialect_pysqlcipher diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi new file mode 100644 index 000000000000..5703abbdf573 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sqlite/pysqlite.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from .base import DATE, DATETIME, SQLiteDialect + +class _SQLite_pysqliteTimeStamp(DATETIME): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _SQLite_pysqliteDate(DATE): + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class SQLiteDialect_pysqlite(SQLiteDialect): + default_paramstyle: str + supports_statement_cache: bool + colspecs: Any + description_encoding: Any + driver: str + @classmethod + def dbapi(cls): ... + @classmethod + def get_pool_class(cls, url): ... + def set_isolation_level(self, connection, level): ... + def on_connect(self): ... + def create_connect_args(self, url): ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = SQLiteDialect_pysqlite diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi new file mode 100644 index 000000000000..3b97262f2a6a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .base import ( + BIGINT as BIGINT, + BINARY as BINARY, + BIT as BIT, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + FLOAT as FLOAT, + IMAGE as IMAGE, + INT as INT, + INTEGER as INTEGER, + MONEY as MONEY, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + SMALLINT as SMALLINT, + SMALLMONEY as SMALLMONEY, + TEXT as TEXT, + TIME as TIME, + TINYINT as TINYINT, + UNICHAR as UNICHAR, + UNITEXT as UNITEXT, + UNIVARCHAR as UNIVARCHAR, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, +) + +__all__ = ( + "CHAR", + "VARCHAR", + "TIME", + "NCHAR", + "NVARCHAR", + "TEXT", + "DATE", + "DATETIME", + "FLOAT", + "NUMERIC", + "BIGINT", + "INT", + "INTEGER", + "SMALLINT", + "BINARY", + "VARBINARY", + "UNITEXT", + "UNICHAR", + "UNIVARCHAR", + "IMAGE", + "BIT", + "MONEY", + "SMALLMONEY", + "TINYINT", + "dialect", +) + +dialect: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi new file mode 100644 index 000000000000..40d48d646b76 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/base.pyi @@ -0,0 +1,135 @@ +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.engine import default, reflection +from sqlalchemy.sql import compiler +from sqlalchemy.types import ( + BIGINT as BIGINT, + BINARY as BINARY, + CHAR as CHAR, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + Unicode as Unicode, +) + +RESERVED_WORDS: Any + +class _SybaseUnitypeMixin: + def result_processor(self, dialect, coltype): ... + +class UNICHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__: str + +class UNIVARCHAR(_SybaseUnitypeMixin, sqltypes.Unicode): + __visit_name__: str + +class UNITEXT(_SybaseUnitypeMixin, sqltypes.UnicodeText): + __visit_name__: str + +class TINYINT(sqltypes.Integer): + __visit_name__: str + +class BIT(sqltypes.TypeEngine): + __visit_name__: str + +class MONEY(sqltypes.TypeEngine): + __visit_name__: str + +class SMALLMONEY(sqltypes.TypeEngine): + __visit_name__: str + +class UNIQUEIDENTIFIER(sqltypes.TypeEngine): + __visit_name__: str + +class IMAGE(sqltypes.LargeBinary): + __visit_name__: str + +class SybaseTypeCompiler(compiler.GenericTypeCompiler): + def visit_large_binary(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_UNICHAR(self, type_, **kw): ... + def visit_UNIVARCHAR(self, type_, **kw): ... + def visit_UNITEXT(self, type_, **kw): ... + def visit_TINYINT(self, type_, **kw): ... + def visit_IMAGE(self, type_, **kw): ... + def visit_BIT(self, type_, **kw): ... + def visit_MONEY(self, type_, **kw): ... + def visit_SMALLMONEY(self, type_, **kw): ... + def visit_UNIQUEIDENTIFIER(self, type_, **kw): ... + +ischema_names: Any + +class SybaseInspector(reflection.Inspector): + def __init__(self, conn) -> None: ... + def get_table_id(self, table_name, schema: Any | None = ...): ... + +class SybaseExecutionContext(default.DefaultExecutionContext): + def set_ddl_autocommit(self, connection, value) -> None: ... + def pre_exec(self) -> None: ... + def post_exec(self) -> None: ... + def get_lastrowid(self): ... + +class SybaseSQLCompiler(compiler.SQLCompiler): + ansi_bind_rules: bool + extract_map: Any + def get_from_hint_text(self, table, text): ... + def limit_clause(self, select, **kw): ... + def visit_extract(self, extract, **kw): ... + def visit_now_func(self, fn, **kw): ... + def for_update_clause(self, select): ... + def order_by_clause(self, select, **kw): ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def delete_extra_from_clause(self, delete_stmt, from_table, extra_froms, from_hints, **kw): ... + +class SybaseDDLCompiler(compiler.DDLCompiler): + def get_column_specification(self, column, **kwargs): ... + def visit_drop_index(self, drop): ... + +class SybaseIdentifierPreparer(compiler.IdentifierPreparer): + reserved_words: Any + +class SybaseDialect(default.DefaultDialect): + name: str + supports_unicode_statements: bool + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + supports_statement_cache: bool + supports_native_boolean: bool + supports_unicode_binds: bool + postfetch_lastrowid: bool + colspecs: Any + ischema_names: Any + type_compiler: Any + statement_compiler: Any + ddl_compiler: Any + preparer: Any + inspector: Any + construct_arguments: Any + def __init__(self, *args, **kwargs) -> None: ... + max_identifier_length: int + def initialize(self, connection) -> None: ... + def get_table_id(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_indexes(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw): ... + def get_schema_names(self, connection, **kw): ... + def get_table_names(self, connection, schema: Any | None = ..., **kw): ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw): ... + def get_view_names(self, connection, schema: Any | None = ..., **kw): ... + def has_table(self, connection, table_name, schema: Any | None = ...): ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi new file mode 100644 index 000000000000..596496ea18f5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/mxodbc.pyi @@ -0,0 +1,9 @@ +from sqlalchemy.connectors.mxodbc import MxODBCConnector +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext + +class SybaseExecutionContext_mxodbc(SybaseExecutionContext): ... + +class SybaseDialect_mxodbc(MxODBCConnector, SybaseDialect): + supports_statement_cache: bool + +dialect = SybaseDialect_mxodbc diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi new file mode 100644 index 000000000000..2c870d14cafd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pyodbc.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.connectors.pyodbc import PyODBCConnector +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext + +class _SybNumeric_pyodbc(sqltypes.Numeric): + def bind_processor(self, dialect): ... + +class SybaseExecutionContext_pyodbc(SybaseExecutionContext): + def set_ddl_autocommit(self, connection, value) -> None: ... + +class SybaseDialect_pyodbc(PyODBCConnector, SybaseDialect): # type: ignore # argument disparities between base classes + supports_statement_cache: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + +dialect = SybaseDialect_pyodbc diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi new file mode 100644 index 000000000000..ae8c0591f00e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/dialects/sybase/pysybase.pyi @@ -0,0 +1,27 @@ +from typing import Any + +from sqlalchemy import types as sqltypes +from sqlalchemy.dialects.sybase.base import SybaseDialect, SybaseExecutionContext, SybaseSQLCompiler + +class _SybNumeric(sqltypes.Numeric): + def result_processor(self, dialect, type_): ... + +class SybaseExecutionContext_pysybase(SybaseExecutionContext): + def set_ddl_autocommit(self, dbapi_connection, value) -> None: ... + def pre_exec(self) -> None: ... + +class SybaseSQLCompiler_pysybase(SybaseSQLCompiler): + def bindparam_string(self, name, **kw): ... + +class SybaseDialect_pysybase(SybaseDialect): + driver: str + statement_compiler: Any + supports_statement_cache: bool + colspecs: Any + @classmethod + def dbapi(cls): ... + def create_connect_args(self, url): ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor): ... + +dialect = SybaseDialect_pysybase diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi new file mode 100644 index 000000000000..48d019b651ac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/__init__.pyi @@ -0,0 +1,46 @@ +from ..sql import ddl as ddl +from . import events as events, util as util +from .base import ( + Connection as Connection, + Engine as Engine, + NestedTransaction as NestedTransaction, + RootTransaction as RootTransaction, + Transaction as Transaction, + TwoPhaseTransaction as TwoPhaseTransaction, +) +from .create import create_engine as create_engine, engine_from_config as engine_from_config +from .cursor import ( + BaseCursorResult as BaseCursorResult, + BufferedColumnResultProxy as BufferedColumnResultProxy, + BufferedColumnRow as BufferedColumnRow, + BufferedRowResultProxy as BufferedRowResultProxy, + CursorResult as CursorResult, + FullyBufferedResultProxy as FullyBufferedResultProxy, + LegacyCursorResult as LegacyCursorResult, + ResultProxy as ResultProxy, +) +from .interfaces import ( + AdaptedConnection as AdaptedConnection, + Compiled as Compiled, + Connectable as Connectable, + CreateEnginePlugin as CreateEnginePlugin, + Dialect as Dialect, + ExceptionContext as ExceptionContext, + ExecutionContext as ExecutionContext, + TypeCompiler as TypeCompiler, +) +from .mock import create_mock_engine as create_mock_engine +from .reflection import Inspector as Inspector +from .result import ( + ChunkedIteratorResult as ChunkedIteratorResult, + FrozenResult as FrozenResult, + IteratorResult as IteratorResult, + MappingResult as MappingResult, + MergedResult as MergedResult, + Result as Result, + ScalarResult as ScalarResult, + result_tuple as result_tuple, +) +from .row import BaseRow as BaseRow, LegacyRow as LegacyRow, Row as Row, RowMapping as RowMapping +from .url import URL as URL, make_url as make_url +from .util import connection_memoize as connection_memoize diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi new file mode 100644 index 000000000000..824499d311c3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/base.pyi @@ -0,0 +1,217 @@ +from _typeshed import Self +from abc import abstractmethod +from collections.abc import Mapping +from types import TracebackType +from typing import Any, Callable, TypeVar, overload + +from ..dbapi import DBAPIConnection +from ..log import Identified, _EchoFlag, echo_property +from ..pool import Pool +from ..sql.compiler import Compiled +from ..sql.ddl import DDLElement +from ..sql.elements import ClauseElement +from ..sql.functions import FunctionElement +from ..sql.schema import DefaultGenerator +from .cursor import CursorResult +from .interfaces import Connectable as Connectable, Dialect, ExceptionContext +from .url import URL +from .util import TransactionalContext + +_T = TypeVar("_T") + +_Executable = ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled + +class Connection(Connectable): + engine: Engine + dialect: Dialect + should_close_with_result: bool + dispatch: Any + def __init__( + self, + engine: Engine, + connection: DBAPIConnection | None = ..., + close_with_result: bool = ..., + _branch_from: Any | None = ..., + _execution_options: Any | None = ..., + _dispatch: Any | None = ..., + _has_events: Any | None = ..., + _allow_revalidate: bool = ..., + ) -> None: ... + def schema_for_object(self, obj) -> str | None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + def execution_options(self, **opt): ... + def get_execution_options(self): ... + @property + def closed(self) -> bool: ... + @property + def invalidated(self) -> bool: ... + @property + def connection(self) -> DBAPIConnection: ... + def get_isolation_level(self): ... + @property + def default_isolation_level(self): ... + @property + def info(self): ... + def connect(self, close_with_result: bool = ...): ... # type: ignore[override] + def invalidate(self, exception: Exception | None = ...) -> None: ... + def detach(self) -> None: ... + def begin(self) -> Transaction: ... + def begin_nested(self) -> Transaction | None: ... + def begin_twophase(self, xid: Any | None = ...) -> TwoPhaseTransaction: ... + def recover_twophase(self): ... + def rollback_prepared(self, xid, recover: bool = ...) -> None: ... + def commit_prepared(self, xid, recover: bool = ...) -> None: ... + def in_transaction(self) -> bool: ... + def in_nested_transaction(self) -> bool: ... + def get_transaction(self) -> Transaction | None: ... + def get_nested_transaction(self) -> Transaction | None: ... + def close(self) -> None: ... + @overload + def scalar(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ... + @overload + def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + def scalars(self, object_, *multiparams, **params): ... + @overload # type: ignore[override] + def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params) -> CursorResult: ... + @overload + def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params) -> CursorResult: ... + def exec_driver_sql(self, statement: str, parameters: Any | None = ..., execution_options: Any | None = ...): ... + # TODO: + # def transaction(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def transaction(self, callable_: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... + # TODO: + # def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def run_callable(self, callable_: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... + +class ExceptionContextImpl(ExceptionContext): + engine: Any + connection: Any + sqlalchemy_exception: Any + original_exception: Any + execution_context: Any + statement: Any + parameters: Any + is_disconnect: Any + invalidate_pool_on_disconnect: Any + def __init__( + self, + exception, + sqlalchemy_exception, + engine, + connection, + cursor, + statement, + parameters, + context, + is_disconnect, + invalidate_pool_on_disconnect, + ) -> None: ... + +class Transaction(TransactionalContext): + def __init__(self, connection: Connection) -> None: ... + @property + def is_valid(self) -> bool: ... + def close(self) -> None: ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + # The following field are technically not defined on Transaction, but on + # all sub-classes. + @property + @abstractmethod + def connection(self) -> Connection: ... + @property + @abstractmethod + def is_active(self) -> bool: ... + +class MarkerTransaction(Transaction): + connection: Connection + @property + def is_active(self) -> bool: ... + +class RootTransaction(Transaction): + connection: Connection + is_active: bool + +class NestedTransaction(Transaction): + connection: Connection + is_active: bool + +class TwoPhaseTransaction(RootTransaction): + xid: Any + def __init__(self, connection: Connection, xid) -> None: ... + def prepare(self) -> None: ... + +class Engine(Connectable, Identified): + pool: Pool + url: URL + dialect: Dialect + logging_name: str # only exists if not None during initialization + echo: echo_property + hide_parameters: bool + def __init__( + self, + pool: Pool, + dialect: Dialect, + url: str | URL, + logging_name: str | None = ..., + echo: _EchoFlag = ..., + query_cache_size: int = ..., + execution_options: Mapping[str, Any] | None = ..., + hide_parameters: bool = ..., + ) -> None: ... + @property + def engine(self) -> Engine: ... + def clear_compiled_cache(self) -> None: ... + def update_execution_options(self, **opt) -> None: ... + def execution_options(self, **opt): ... + def get_execution_options(self): ... + @property + def name(self) -> str: ... + @property + def driver(self): ... + def dispose(self) -> None: ... + + class _trans_ctx: + conn: Connection + transaction: Transaction + close_with_result: bool + def __init__(self, conn: Connection, transaction: Transaction, close_with_result: bool) -> None: ... + def __enter__(self) -> Connection: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... + + def begin(self, close_with_result: bool = ...) -> _trans_ctx: ... + # TODO: + # def transaction(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T | None: ... + def transaction(self, callable_: Callable[..., _T], *args: Any, **kwargs: Any) -> _T | None: ... + # TODO: + # def run_callable(self, callable_: Callable[Concatenate[Connection, _P], _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def run_callable(self, callable_: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... + @overload # type: ignore[override] + def execute(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ... + @overload + def execute(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + @overload # type: ignore[override] + def scalar(self, statement: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> Any: ... + @overload + def scalar(self, statement: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + def connect(self, close_with_result: bool = ...) -> Connection: ... # type: ignore[override] + def table_names(self, schema: Any | None = ..., connection: Connection | None = ...): ... + def has_table(self, table_name: str, schema: Any | None = ...) -> bool: ... + def raw_connection(self, _connection: Connection | None = ...) -> DBAPIConnection: ... + +class OptionEngineMixin: + url: URL + dialect: Dialect + logging_name: str + echo: bool + hide_parameters: bool + dispatch: Any + def __init__(self, proxied, execution_options) -> None: ... + pool: Pool + +class OptionEngine(OptionEngineMixin, Engine): ... # type: ignore[misc] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi new file mode 100644 index 000000000000..ab5b5f63cddc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/characteristics.pyi @@ -0,0 +1,18 @@ +import abc + +from ..util import ABC + +class ConnectionCharacteristic(ABC, metaclass=abc.ABCMeta): + transactional: bool + @abc.abstractmethod + def reset_characteristic(self, dialect, dbapi_conn): ... + @abc.abstractmethod + def set_characteristic(self, dialect, dbapi_conn, value): ... + @abc.abstractmethod + def get_characteristic(self, dialect, dbapi_conn): ... + +class IsolationLevelCharacteristic(ConnectionCharacteristic): + transactional: bool + def reset_characteristic(self, dialect, dbapi_conn) -> None: ... + def set_characteristic(self, dialect, dbapi_conn, value) -> None: ... + def get_characteristic(self, dialect, dbapi_conn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi new file mode 100644 index 000000000000..40c6b29fd63a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/create.pyi @@ -0,0 +1,21 @@ +from collections.abc import Mapping +from typing import Any, overload +from typing_extensions import Literal + +from ..future.engine import Engine as FutureEngine +from .base import Engine +from .mock import MockConnection +from .url import URL + +# Further kwargs are forwarded to the engine, dialect, or pool. +@overload +def create_engine(url: URL | str, *, strategy: Literal["mock"], **kwargs) -> MockConnection: ... # type: ignore[misc] +@overload +def create_engine( + url: URL | str, *, module: Any | None = ..., enable_from_linting: bool = ..., future: Literal[True], **kwargs +) -> FutureEngine: ... +@overload +def create_engine( + url: URL | str, *, module: Any | None = ..., enable_from_linting: bool = ..., future: Literal[False] = ..., **kwargs +) -> Engine: ... +def engine_from_config(configuration: Mapping[str, Any], prefix: str = ..., **kwargs) -> Engine: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi new file mode 100644 index 000000000000..980290818edc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/cursor.pyi @@ -0,0 +1,127 @@ +import abc +from typing import Any + +from ..util import memoized_property +from .result import Result, ResultMetaData +from .row import LegacyRow + +MD_INDEX: int +MD_RESULT_MAP_INDEX: int +MD_OBJECTS: int +MD_LOOKUP_KEY: int +MD_RENDERED_NAME: int +MD_PROCESSOR: int +MD_UNTRANSLATED: int + +class CursorResultMetaData(ResultMetaData): + returns_rows: bool + case_sensitive: Any + def __init__(self, parent, cursor_description) -> None: ... + +class LegacyCursorResultMetaData(CursorResultMetaData): ... + +class ResultFetchStrategy: + alternate_cursor_description: Any + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...) -> None: ... + def fetchmany(self, result, dbapi_cursor, size: Any | None = ...) -> None: ... + def fetchall(self, result) -> None: ... + def handle_exception(self, result, dbapi_cursor, err) -> None: ... + +class NoCursorFetchStrategy(ResultFetchStrategy): + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Any | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class NoCursorDQLFetchStrategy(NoCursorFetchStrategy): ... +class NoCursorDMLFetchStrategy(NoCursorFetchStrategy): ... + +class CursorFetchStrategy(ResultFetchStrategy): + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def handle_exception(self, result, dbapi_cursor, err) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Any | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class BufferedRowCursorFetchStrategy(CursorFetchStrategy): + def __init__(self, dbapi_cursor, execution_options, growth_factor: int = ..., initial_buffer: Any | None = ...) -> None: ... + @classmethod + def create(cls, result): ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Any | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class FullyBufferedCursorFetchStrategy(CursorFetchStrategy): + alternate_cursor_description: Any + def __init__(self, dbapi_cursor, alternate_description: Any | None = ..., initial_buffer: Any | None = ...) -> None: ... + def yield_per(self, result, dbapi_cursor, num) -> None: ... + def soft_close(self, result, dbapi_cursor) -> None: ... + def hard_close(self, result, dbapi_cursor) -> None: ... + def fetchone(self, result, dbapi_cursor, hard_close: bool = ...): ... + def fetchmany(self, result, dbapi_cursor, size: Any | None = ...): ... + def fetchall(self, result, dbapi_cursor): ... + +class _NoResultMetaData(ResultMetaData): + returns_rows: bool + @property + def keys(self) -> None: ... + +class _LegacyNoResultMetaData(_NoResultMetaData): + @property + def keys(self): ... + +class BaseCursorResult: + out_parameters: Any + closed: bool + context: Any + dialect: Any + cursor: Any + cursor_strategy: Any + connection: Any + def __init__(self, context, cursor_strategy, cursor_description): ... + @property + def inserted_primary_key_rows(self): ... + @property + def inserted_primary_key(self): ... + def last_updated_params(self): ... + def last_inserted_params(self): ... + @property + def returned_defaults_rows(self): ... + @property + def returned_defaults(self): ... + def lastrow_has_defaults(self): ... + def postfetch_cols(self): ... + def prefetch_cols(self): ... + def supports_sane_rowcount(self): ... + def supports_sane_multi_rowcount(self): ... + @memoized_property + def rowcount(self): ... + @property + def lastrowid(self): ... + @property + def returns_rows(self): ... + @property + def is_insert(self): ... + +class CursorResult(BaseCursorResult, Result): + def merge(self, *others): ... + def close(self) -> None: ... + +class LegacyCursorResult(CursorResult): + def close(self) -> None: ... + +ResultProxy = LegacyCursorResult + +class BufferedRowResultProxy(ResultProxy): ... +class FullyBufferedResultProxy(ResultProxy): ... +class BufferedColumnRow(LegacyRow, metaclass=abc.ABCMeta): ... +class BufferedColumnResultProxy(ResultProxy): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi new file mode 100644 index 000000000000..9e521bbb6b16 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/default.pyi @@ -0,0 +1,220 @@ +from typing import Any, ClassVar + +from .. import types as sqltypes +from ..util import memoized_property +from . import interfaces + +AUTOCOMMIT_REGEXP: Any +SERVER_SIDE_CURSOR_RE: Any +CACHE_HIT: Any +CACHE_MISS: Any +CACHING_DISABLED: Any +NO_CACHE_KEY: Any +NO_DIALECT_SUPPORT: Any + +class DefaultDialect(interfaces.Dialect): # type: ignore[misc] + execution_ctx_cls: ClassVar[type[interfaces.ExecutionContext]] + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + supports_alter: bool + supports_comments: bool + inline_comments: bool + use_setinputsizes: bool + supports_statement_cache: bool + default_sequence_base: int + execute_sequence_format: Any + supports_schemas: bool + supports_views: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + supports_identity_columns: bool + postfetch_lastrowid: bool + implicit_returning: bool + full_returning: bool + insert_executemany_returning: bool + cte_follows_insert: bool + supports_native_enum: bool + supports_native_boolean: bool + non_native_boolean_check_constraint: bool + supports_simple_order_by_label: bool + tuple_in_values: bool + connection_characteristics: Any + engine_config_types: Any + supports_native_decimal: bool + supports_unicode_statements: bool + supports_unicode_binds: bool + returns_unicode_strings: Any + description_encoding: Any + name: str + max_identifier_length: int + isolation_level: Any + max_index_name_length: Any + max_constraint_name_length: Any + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + colspecs: Any + default_paramstyle: str + supports_default_values: bool + supports_default_metavalue: bool + supports_empty_insert: bool + supports_multivalues_insert: bool + supports_is_distinct_from: bool + supports_server_side_cursors: bool + server_side_cursors: bool + supports_for_update_of: bool + server_version_info: Any + default_schema_name: Any + construct_arguments: Any + requires_name_normalize: bool + reflection_options: Any + dbapi_exception_translation_map: Any + is_async: bool + CACHE_HIT: Any + CACHE_MISS: Any + CACHING_DISABLED: Any + NO_CACHE_KEY: Any + NO_DIALECT_SUPPORT: Any + convert_unicode: Any + encoding: Any + positional: bool + dbapi: Any + paramstyle: Any + identifier_preparer: Any + case_sensitive: Any + label_length: Any + compiler_linting: Any + def __init__( + self, + convert_unicode: bool = ..., + encoding: str = ..., + paramstyle: Any | None = ..., + dbapi: Any | None = ..., + implicit_returning: Any | None = ..., + case_sensitive: bool = ..., + supports_native_boolean: Any | None = ..., + max_identifier_length: Any | None = ..., + label_length: Any | None = ..., + compiler_linting=..., + server_side_cursors: bool = ..., + **kwargs, + ) -> None: ... + @property + def dialect_description(self): ... + @property + def supports_sane_rowcount_returning(self): ... + @classmethod + def get_pool_class(cls, url): ... + def get_dialect_pool_class(self, url): ... + @classmethod + def load_provisioning(cls) -> None: ... + default_isolation_level: Any + def initialize(self, connection) -> None: ... + def on_connect(self) -> None: ... + def get_default_isolation_level(self, dbapi_conn): ... + def type_descriptor(self, typeobj): ... + def has_index(self, connection, table_name, index_name, schema: Any | None = ...): ... + def validate_identifier(self, ident) -> None: ... + def connect(self, *cargs, **cparams): ... + def create_connect_args(self, url): ... + def set_engine_execution_options(self, engine, opts) -> None: ... + def set_connection_execution_options(self, connection, opts) -> None: ... + def do_begin(self, dbapi_connection) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_ping(self, dbapi_connection): ... + def create_xid(self): ... + def do_savepoint(self, connection, name) -> None: ... + def do_rollback_to_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_execute_no_params(self, cursor, statement, context: Any | None = ...) -> None: ... # type: ignore[override] + def is_disconnect(self, e, connection, cursor): ... + def reset_isolation_level(self, dbapi_conn) -> None: ... + def normalize_name(self, name): ... + def denormalize_name(self, name): ... + def get_driver_connection(self, connection): ... + +class _RendersLiteral: + def literal_processor(self, dialect): ... + +class _StrDateTime(_RendersLiteral, sqltypes.DateTime): ... +class _StrDate(_RendersLiteral, sqltypes.Date): ... +class _StrTime(_RendersLiteral, sqltypes.Time): ... + +class StrCompileDialect(DefaultDialect): # type: ignore[misc] + statement_compiler: Any + ddl_compiler: Any + type_compiler: Any + preparer: Any + supports_statement_cache: bool + supports_identity_columns: bool + supports_sequences: bool + sequences_optional: bool + preexecute_autoincrement_sequences: bool + implicit_returning: bool + supports_native_boolean: bool + supports_multivalues_insert: bool + supports_simple_order_by_label: bool + colspecs: Any + +class DefaultExecutionContext(interfaces.ExecutionContext): + isinsert: bool + isupdate: bool + isdelete: bool + is_crud: bool + is_text: bool + isddl: bool + executemany: bool + compiled: Any + statement: Any + result_column_struct: Any + returned_default_rows: Any + execution_options: Any + include_set_input_sizes: Any + exclude_set_input_sizes: Any + cursor_fetch_strategy: Any + cache_stats: Any + invoked_statement: Any + cache_hit: Any + @memoized_property + def identifier_preparer(self): ... + @memoized_property + def engine(self): ... + @memoized_property + def postfetch_cols(self): ... + @memoized_property + def prefetch_cols(self): ... + @memoized_property + def returning_cols(self) -> None: ... + @memoized_property + def no_parameters(self): ... + @memoized_property + def should_autocommit(self): ... + @property + def connection(self): ... + def should_autocommit_text(self, statement): ... + def create_cursor(self): ... + def create_default_cursor(self): ... + def create_server_side_cursor(self) -> None: ... + def pre_exec(self) -> None: ... + def get_out_parameter_values(self, names) -> None: ... + def post_exec(self) -> None: ... + def get_result_processor(self, type_, colname, coltype): ... + def get_lastrowid(self): ... + def handle_dbapi_exception(self, e) -> None: ... + @property + def rowcount(self): ... + def supports_sane_rowcount(self): ... + def supports_sane_multi_rowcount(self): ... + @memoized_property + def inserted_primary_key_rows(self): ... + def lastrow_has_defaults(self): ... + current_parameters: Any + def get_current_parameters(self, isolate_multiinsert_groups: bool = ...): ... + def get_insert_default(self, column): ... + def get_update_default(self, column): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi new file mode 100644 index 000000000000..7cca8b27ce2b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/events.pyi @@ -0,0 +1,29 @@ +from .. import event as event + +class ConnectionEvents(event.Events): + def before_execute(self, conn, clauseelement, multiparams, params, execution_options) -> None: ... + def after_execute(self, conn, clauseelement, multiparams, params, execution_options, result) -> None: ... + def before_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ... + def after_cursor_execute(self, conn, cursor, statement, parameters, context, executemany) -> None: ... + def handle_error(self, exception_context) -> None: ... + def engine_connect(self, conn, branch) -> None: ... + def set_connection_execution_options(self, conn, opts) -> None: ... + def set_engine_execution_options(self, engine, opts) -> None: ... + def engine_disposed(self, engine) -> None: ... + def begin(self, conn) -> None: ... + def rollback(self, conn) -> None: ... + def commit(self, conn) -> None: ... + def savepoint(self, conn, name) -> None: ... + def rollback_savepoint(self, conn, name, context) -> None: ... + def release_savepoint(self, conn, name, context) -> None: ... + def begin_twophase(self, conn, xid) -> None: ... + def prepare_twophase(self, conn, xid) -> None: ... + def rollback_twophase(self, conn, xid, is_prepared) -> None: ... + def commit_twophase(self, conn, xid, is_prepared) -> None: ... + +class DialectEvents(event.Events): + def do_connect(self, dialect, conn_rec, cargs, cparams) -> None: ... + def do_executemany(self, cursor, statement, parameters, context) -> None: ... + def do_execute_no_params(self, cursor, statement, context) -> None: ... + def do_execute(self, cursor, statement, parameters, context) -> None: ... + def do_setinputsizes(self, inputsizes, cursor, statement, parameters, context) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi new file mode 100644 index 000000000000..cdf04f61750d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/interfaces.pyi @@ -0,0 +1,167 @@ +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from typing import Any, ClassVar, overload + +from ..dbapi import DBAPIConnection, DBAPICursor +from ..exc import StatementError +from ..sql.compiler import Compiled as Compiled, IdentifierPreparer, TypeCompiler as TypeCompiler +from ..sql.ddl import DDLElement +from ..sql.elements import ClauseElement +from ..sql.functions import FunctionElement +from ..sql.schema import DefaultGenerator +from .base import Connection, Engine +from .cursor import CursorResult +from .url import URL + +class Dialect: + # Sub-classes are required to have the following attributes: + name: str + driver: str + positional: bool + paramstyle: str + encoding: str + statement_compiler: Compiled + ddl_compiler: Compiled + server_version_info: tuple[Any, ...] + # Only available on supporting dialects: + # default_schema_name: str + execution_ctx_cls: ClassVar[type[ExecutionContext]] + execute_sequence_format: type[tuple[Any] | list[Any]] + preparer: IdentifierPreparer + supports_alter: bool + max_identifier_length: int + supports_sane_rowcount: bool + supports_sane_multi_rowcount: bool + preexecute_autoincrement_sequences: bool + implicit_returning: bool + colspecs: dict[Any, Any] + supports_default_values: bool + supports_sequences: bool + sequences_optional: bool + supports_native_enum: bool + supports_native_boolean: bool + dbapi_exception_translation_map: dict[Any, Any] + + supports_statement_cache: bool + @abstractmethod + def create_connect_args(self, url: URL) -> None: ... + def initialize(self, connection) -> None: ... + def on_connect_url(self, url) -> Callable[[DBAPIConnection], object] | None: ... + def on_connect(self) -> Callable[[DBAPIConnection], object] | None: ... + # The following methods all raise NotImplementedError, but not all + # dialects implement all methods, which is why they can't be marked + # as abstract. + @classmethod + def type_descriptor(cls, typeobj) -> None: ... + def get_columns(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_pk_constraint(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_foreign_keys(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_table_names(self, connection, schema: Any | None = ..., **kw) -> None: ... + def get_temp_table_names(self, connection, schema: Any | None = ..., **kw) -> None: ... + def get_view_names(self, connection, schema: Any | None = ..., **kw) -> None: ... + def get_sequence_names(self, connection, schema: Any | None = ..., **kw) -> None: ... + def get_temp_view_names(self, connection, schema: Any | None = ..., **kw) -> None: ... + def get_view_definition(self, connection, view_name, schema: Any | None = ..., **kw) -> None: ... + def get_indexes(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_unique_constraints(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_check_constraints(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def get_table_comment(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def normalize_name(self, name) -> None: ... + def denormalize_name(self, name) -> None: ... + def has_table(self, connection, table_name, schema: Any | None = ..., **kw) -> None: ... + def has_index(self, connection, table_name, index_name, schema: Any | None = ...) -> None: ... + def has_sequence(self, connection, sequence_name, schema: Any | None = ..., **kw) -> None: ... + def do_begin(self, dbapi_connection) -> None: ... + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_set_input_sizes(self, cursor, list_of_tuples, context) -> None: ... + def create_xid(self) -> None: ... + def do_savepoint(self, connection, name) -> None: ... + def do_rollback_to_savepoint(self, connection, name) -> None: ... + def do_release_savepoint(self, connection, name) -> None: ... + def do_begin_twophase(self, connection, xid) -> None: ... + def do_prepare_twophase(self, connection, xid) -> None: ... + def do_rollback_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_commit_twophase(self, connection, xid, is_prepared: bool = ..., recover: bool = ...) -> None: ... + def do_recover_twophase(self, connection) -> None: ... + def do_executemany(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_execute(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def do_execute_no_params(self, cursor, statement, parameters, context: Any | None = ...) -> None: ... + def is_disconnect(self, e, connection, cursor) -> None: ... + def connect(self, *cargs, **cparams) -> DBAPIConnection: ... + def reset_isolation_level(self, dbapi_conn) -> None: ... + def set_isolation_level(self, dbapi_conn, level) -> None: ... + def get_isolation_level(self, dbapi_conn) -> None: ... + def get_default_isolation_level(self, dbapi_conn) -> None: ... + @classmethod + def get_dialect_cls(cls, url): ... + @classmethod + def load_provisioning(cls) -> None: ... + @classmethod + def engine_created(cls, engine) -> None: ... + def get_driver_connection(self, connection) -> None: ... + +class CreateEnginePlugin: + url: URL + def __init__(self, url: URL, kwargs) -> None: ... + def update_url(self, url) -> None: ... + def handle_dialect_kwargs(self, dialect_cls, dialect_args) -> None: ... + def handle_pool_kwargs(self, pool_cls, pool_args) -> None: ... + def engine_created(self, engine) -> None: ... + +class ExecutionContext: + def create_cursor(self) -> None: ... + def pre_exec(self) -> None: ... + def get_out_parameter_values(self, out_param_names) -> None: ... + def post_exec(self) -> None: ... + def get_result_cursor_strategy(self, result) -> None: ... + def handle_dbapi_exception(self, e) -> None: ... + def should_autocommit_text(self, statement) -> None: ... + def lastrow_has_defaults(self) -> None: ... + def get_rowcount(self) -> None: ... + +class Connectable: + @abstractmethod + def connect(self, **kwargs) -> Connection: ... + @property + def engine(self) -> Engine | None: ... + @abstractmethod + @overload + def execute( + self, + object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled, + *multiparams: Mapping[str, Any], + **params: Any, + ) -> CursorResult: ... + @abstractmethod + @overload + def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + @abstractmethod + @overload + def scalar( + self, + object_: ClauseElement | FunctionElement | DDLElement | DefaultGenerator | Compiled, + *multiparams: Mapping[str, Any], + **params: Any, + ) -> Any: ... + @abstractmethod + @overload + def scalar(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> Any: ... + +class ExceptionContext: + connection: Connection | None + engine: Engine | None + cursor: DBAPICursor | None + statement: str | None + parameters: Collection[Any] | None + original_exception: BaseException | None + sqlalchemy_exception: StatementError | None + chained_exception: BaseException | None + execution_context: ExecutionContext | None + is_disconnect: bool | None + invalidate_pool_on_disconnect: bool + +class AdaptedConnection: + @property + def driver_connection(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi new file mode 100644 index 000000000000..dc685760d8c0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/mock.pyi @@ -0,0 +1,32 @@ +from _typeshed import Self +from abc import abstractmethod +from collections.abc import Mapping +from typing import Any, overload + +from .base import _Executable +from .cursor import CursorResult +from .interfaces import Connectable, Dialect +from .url import URL + +class MockConnection(Connectable): + def __init__(self, dialect: Dialect, execute) -> None: ... + @property + def engine(self: Self) -> Self: ... # type: ignore[override] + @property + def dialect(self) -> Dialect: ... + @property + def name(self) -> str: ... + def schema_for_object(self, obj): ... + def connect(self, **kwargs): ... + def execution_options(self, **kw): ... + def compiler(self, statement, parameters, **kwargs): ... + def create(self, entity, **kwargs) -> None: ... + def drop(self, entity, **kwargs) -> None: ... + @abstractmethod + @overload + def execute(self, object_: _Executable, *multiparams: Mapping[str, Any], **params: Any) -> CursorResult: ... + @abstractmethod + @overload + def execute(self, object_: str, *multiparams: Any | tuple[Any, ...] | Mapping[str, Any], **params: Any) -> CursorResult: ... + +def create_mock_engine(url: URL | str, executor, **kw) -> MockConnection: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi new file mode 100644 index 000000000000..fcfd262c2a88 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/reflection.pyi @@ -0,0 +1,32 @@ +from typing import Any + +def cache(fn, self, con, *args, **kw): ... + +class Inspector: + def __init__(self, bind): ... + @classmethod + def from_engine(cls, bind): ... + @property + def default_schema_name(self): ... + def get_schema_names(self): ... + def get_table_names(self, schema: Any | None = ...): ... + def has_table(self, table_name, schema: Any | None = ...): ... + def has_sequence(self, sequence_name, schema: Any | None = ...): ... + def get_sorted_table_and_fkc_names(self, schema: Any | None = ...): ... + def get_temp_table_names(self): ... + def get_temp_view_names(self): ... + def get_table_options(self, table_name, schema: Any | None = ..., **kw): ... + def get_view_names(self, schema: Any | None = ...): ... + def get_sequence_names(self, schema: Any | None = ...): ... + def get_view_definition(self, view_name, schema: Any | None = ...): ... + def get_columns(self, table_name, schema: Any | None = ..., **kw): ... + def get_pk_constraint(self, table_name, schema: Any | None = ..., **kw): ... + def get_foreign_keys(self, table_name, schema: Any | None = ..., **kw): ... + def get_indexes(self, table_name, schema: Any | None = ..., **kw): ... + def get_unique_constraints(self, table_name, schema: Any | None = ..., **kw): ... + def get_table_comment(self, table_name, schema: Any | None = ..., **kw): ... + def get_check_constraints(self, table_name, schema: Any | None = ..., **kw): ... + def reflecttable(self, *args, **kwargs): ... + def reflect_table( + self, table, include_columns, exclude_columns=..., resolve_fks: bool = ..., _extend_on: Any | None = ... + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi new file mode 100644 index 000000000000..e6bb880db391 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/result.pyi @@ -0,0 +1,118 @@ +from _typeshed import Self +from collections.abc import Generator, KeysView +from typing import Any + +from ..sql.base import InPlaceGenerative +from .row import Row + +class ResultMetaData: + @property + def keys(self): ... + +class RMKeyView(KeysView[Any]): + def __init__(self, parent) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __contains__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class SimpleResultMetaData(ResultMetaData): + def __init__( + self, + keys, + extra: Any | None = ..., + _processors: Any | None = ..., + _tuplefilter: Any | None = ..., + _translated_indexes: Any | None = ..., + _unique_filters: Any | None = ..., + ) -> None: ... + +def result_tuple(fields, extra: Any | None = ...): ... + +class ResultInternal(InPlaceGenerative): ... + +class _WithKeys: + def keys(self): ... + +class Result(_WithKeys, ResultInternal): + def __init__(self, cursor_metadata) -> None: ... + def close(self) -> None: ... + def yield_per(self: Self, num: int) -> Self: ... + def unique(self: Self, strategy: Any | None = ...) -> Self: ... + def columns(self, *col_expressions): ... + def scalars(self, index: int = ...) -> ScalarResult: ... + def mappings(self) -> MappingResult: ... + def __iter__(self): ... + def __next__(self): ... + def partitions(self, size: int | None = ...) -> Generator[list[Row], None, None]: ... + def fetchall(self) -> list[Row]: ... + def fetchone(self) -> Row | None: ... + def fetchmany(self, size: int | None = ...) -> list[Row]: ... + def all(self) -> list[Row]: ... + def first(self) -> Row | None: ... + def one_or_none(self) -> Row | None: ... + def scalar_one(self) -> Any: ... + def scalar_one_or_none(self) -> Any | None: ... + def one(self) -> Row: ... + def scalar(self) -> Any | None: ... + def freeze(self) -> FrozenResult: ... + def merge(self, *others) -> MergedResult: ... + +class FilterResult(ResultInternal): ... + +class ScalarResult(FilterResult): + def __init__(self, real_result, index) -> None: ... + def unique(self, strategy: Any | None = ...): ... + def partitions(self, size: Any | None = ...) -> None: ... + def fetchall(self): ... + def fetchmany(self, size: Any | None = ...): ... + def all(self): ... + def __iter__(self): ... + def __next__(self): ... + def first(self): ... + def one_or_none(self): ... + def one(self): ... + +class MappingResult(_WithKeys, FilterResult): + def __init__(self, result) -> None: ... + def unique(self, strategy: Any | None = ...): ... + def columns(self, *col_expressions): ... + def partitions(self, size: Any | None = ...) -> None: ... + def fetchall(self): ... + def fetchone(self): ... + def fetchmany(self, size: Any | None = ...): ... + def all(self): ... + def __iter__(self): ... + def __next__(self): ... + def first(self): ... + def one_or_none(self): ... + def one(self): ... + +class FrozenResult: + metadata: Any + data: Any + def __init__(self, result) -> None: ... + def rewrite_rows(self): ... + def with_new_rows(self, tuple_data): ... + def __call__(self): ... + +class IteratorResult(Result): + iterator: Any + raw: Any + def __init__(self, cursor_metadata, iterator, raw: Any | None = ..., _source_supports_scalars: bool = ...) -> None: ... + +def null_result() -> IteratorResult: ... + +class ChunkedIteratorResult(IteratorResult): + chunks: Any + raw: Any + iterator: Any + dynamic_yield_per: Any + def __init__( + self, cursor_metadata, chunks, source_supports_scalars: bool = ..., raw: Any | None = ..., dynamic_yield_per: bool = ... + ) -> None: ... + +class MergedResult(IteratorResult): + closed: bool + def __init__(self, cursor_metadata, results) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi new file mode 100644 index 000000000000..31167fca73c9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/row.pyi @@ -0,0 +1,55 @@ +import abc +from collections.abc import ItemsView, KeysView, Mapping, Sequence, ValuesView +from typing import Any + +from ..cresultproxy import BaseRow as BaseRow + +MD_INDEX: int + +def rowproxy_reconstructor(cls, state): ... + +KEY_INTEGER_ONLY: int +KEY_OBJECTS_ONLY: int +KEY_OBJECTS_BUT_WARN: int +KEY_OBJECTS_NO_WARN: int + +class Row(BaseRow, Sequence[Any], metaclass=abc.ABCMeta): + count: Any + index: Any + def __contains__(self, key): ... + __hash__: Any + def __lt__(self, other): ... + def __le__(self, other): ... + def __ge__(self, other): ... + def __gt__(self, other): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def keys(self): ... + +class LegacyRow(Row, metaclass=abc.ABCMeta): + def __contains__(self, key): ... + def has_key(self, key): ... + def items(self): ... + def iterkeys(self): ... + def itervalues(self): ... + def values(self): ... + +BaseRowProxy = BaseRow +RowProxy = Row + +class ROMappingView(KeysView[Any], ValuesView[Any], ItemsView[Any, Any]): + def __init__(self, mapping, items) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __contains__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class RowMapping(BaseRow, Mapping[Any, Any]): + __getitem__: Any + def __iter__(self): ... + def __len__(self): ... + def __contains__(self, key): ... + def items(self): ... + def keys(self): ... + def values(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi new file mode 100644 index 000000000000..25239d17b276 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/strategies.pyi @@ -0,0 +1,4 @@ +from typing import Any + +class MockEngineStrategy: + MockConnection: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi new file mode 100644 index 000000000000..0e0f535fd7f3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/url.pyi @@ -0,0 +1,60 @@ +from _typeshed import Self, SupportsItems +from collections.abc import Iterable, Mapping, Sequence +from typing import Any, NamedTuple + +from ..util import immutabledict +from .interfaces import Dialect + +# stub-only helper class +class _URLTuple(NamedTuple): + drivername: str + username: str | None + password: str | object | None # object that produces a password when called with str() + host: str | None + port: int | None + database: str | None + query: immutabledict[str, str | tuple[str, ...]] + +_Query = Mapping[str, str | Sequence[str]] | Sequence[tuple[str, str | Sequence[str]]] + +class URL(_URLTuple): + @classmethod + def create( + cls, + drivername: str, + username: str | None = ..., + password: str | object | None = ..., # object that produces a password when called with str() + host: str | None = ..., + port: int | None = ..., + database: str | None = ..., + query: _Query | None = ..., + ) -> URL: ... + def set( + self: Self, + drivername: str | None = ..., + username: str | None = ..., + password: str | object | None = ..., + host: str | None = ..., + port: int | None = ..., + database: str | None = ..., + query: _Query | None = ..., + ) -> Self: ... + def update_query_string(self: Self, query_string: str, append: bool = ...) -> Self: ... + def update_query_pairs(self: Self, key_value_pairs: Iterable[tuple[str, str]], append: bool = ...) -> Self: ... + def update_query_dict(self: Self, query_parameters: SupportsItems[str, str | Sequence[str]], append: bool = ...) -> Self: ... + def difference_update_query(self, names: Iterable[str]) -> URL: ... + @property + def normalized_query(self) -> immutabledict[str, tuple[str, ...]]: ... + def __to_string__(self, hide_password: bool = ...) -> str: ... + def render_as_string(self, hide_password: bool = ...) -> str: ... + def __copy__(self: Self) -> Self: ... + def __deepcopy__(self: Self, memo: object) -> Self: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def get_backend_name(self) -> str: ... + def get_driver_name(self) -> str: ... + def get_dialect(self) -> type[Dialect]: ... + def translate_connect_args(self, names: list[str] | None = ..., **kw: str) -> dict[str, Any]: ... + +def make_url(name_or_url: str | URL) -> URL: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi new file mode 100644 index 000000000000..f711f0c83d0a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/engine/util.pyi @@ -0,0 +1,12 @@ +from _typeshed import Self +from collections.abc import Callable +from types import TracebackType +from typing import Any + +def connection_memoize(key: str) -> Callable[..., Any]: ... + +class TransactionalContext: + def __enter__(self: Self) -> Self: ... + def __exit__( + self, type_: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi new file mode 100644 index 000000000000..a0b0fcea8bff --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/__init__.pyi @@ -0,0 +1,10 @@ +from .api import ( + CANCEL as CANCEL, + NO_RETVAL as NO_RETVAL, + contains as contains, + listen as listen, + listens_for as listens_for, + remove as remove, +) +from .attr import RefCollection as RefCollection +from .base import Events as Events, dispatcher as dispatcher diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/api.pyi new file mode 100644 index 000000000000..bea12af06067 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/api.pyi @@ -0,0 +1,9 @@ +from typing import Any + +CANCEL: Any +NO_RETVAL: Any + +def listen(target, identifier, fn, *args, **kw) -> None: ... +def listens_for(target, identifier, *args, **kw): ... +def remove(target, identifier, fn) -> None: ... +def contains(target, identifier, fn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi new file mode 100644 index 000000000000..f0d04c802b24 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/attr.pyi @@ -0,0 +1,85 @@ +from typing import Any + +from .. import util + +class RefCollection(util.MemoizedSlots): + ref: Any + +class _empty_collection: + def append(self, element) -> None: ... + def extend(self, other) -> None: ... + def remove(self, element) -> None: ... + def __iter__(self): ... + def clear(self) -> None: ... + +class _ClsLevelDispatch(RefCollection): + name: Any + clsname: Any + arg_names: Any + has_kw: Any + legacy_signatures: Any + def __init__(self, parent_dispatch_cls, fn): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def update_subclass(self, target) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... + def for_modify(self, obj): ... + +class _InstanceLevelDispatch(RefCollection): ... + +class _EmptyListener(_InstanceLevelDispatch): + propagate: Any + listeners: Any + parent: Any + parent_listeners: Any + name: Any + def __init__(self, parent, target_cls) -> None: ... + def for_modify(self, obj): ... + exec_once: Any + exec_once_unless_exception: Any + insert: Any + append: Any + remove: Any + clear: Any + def __call__(self, *args, **kw) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __bool__(self): ... + __nonzero__: Any + +class _CompoundListener(_InstanceLevelDispatch): + def exec_once(self, *args, **kw) -> None: ... + def exec_once_unless_exception(self, *args, **kw) -> None: ... + def __call__(self, *args, **kw) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __bool__(self): ... + __nonzero__: Any + +class _ListenerCollection(_CompoundListener): + parent_listeners: Any + parent: Any + name: Any + listeners: Any + propagate: Any + def __init__(self, parent, target_cls) -> None: ... + def for_modify(self, obj): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... + +class _JoinedListener(_CompoundListener): + parent: Any + name: Any + local: Any + parent_listeners: Any + def __init__(self, parent, name, local) -> None: ... + @property + def listeners(self): ... + def for_modify(self, obj): ... + def insert(self, event_key, propagate) -> None: ... + def append(self, event_key, propagate) -> None: ... + def remove(self, event_key) -> None: ... + def clear(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/base.pyi new file mode 100644 index 000000000000..4237aa096102 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/base.pyi @@ -0,0 +1,30 @@ +from typing import Any + +class _UnpickleDispatch: + def __call__(self, _instance_cls): ... + +class _Dispatch: + def __init__(self, parent, instance_cls: Any | None = ...) -> None: ... + def __getattr__(self, name): ... + def __reduce__(self): ... + +class _EventMeta(type): + def __init__(cls, classname, bases, dict_) -> None: ... + +class Events: + dispatch: Any + +class _JoinedDispatcher: + local: Any + parent: Any + def __init__(self, local, parent) -> None: ... + def __getattr__(self, name): ... + +class dispatcher: + dispatch: Any + events: Any + def __init__(self, events) -> None: ... + def __get__(self, obj, cls): ... + +class slots_dispatcher(dispatcher): + def __get__(self, obj, cls): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/legacy.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/legacy.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi new file mode 100644 index 000000000000..54fcc63a98bf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/event/registry.pyi @@ -0,0 +1,21 @@ +from typing import Any + +class _EventKey: + target: Any + identifier: Any + fn: Any + fn_key: Any + fn_wrap: Any + dispatch_target: Any + def __init__(self, target, identifier, fn, dispatch_target, _fn_wrap: Any | None = ...) -> None: ... + def with_wrapper(self, fn_wrap): ... + def with_dispatch_target(self, dispatch_target): ... + def listen(self, *args, **kw) -> None: ... + def remove(self) -> None: ... + def contains(self): ... + def base_listen( + self, propagate: bool = ..., insert: bool = ..., named: bool = ..., retval: Any | None = ..., asyncio: bool = ... + ) -> None: ... + def append_to_list(self, owner, list_): ... + def remove_from_list(self, owner, list_) -> None: ... + def prepend_to_list(self, owner, list_): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/events.pyi new file mode 100644 index 000000000000..8bbbfa58d8d8 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/events.pyi @@ -0,0 +1,4 @@ +from .engine.events import ConnectionEvents as ConnectionEvents, DialectEvents as DialectEvents +from .pool.events import PoolEvents as PoolEvents +from .sql.base import SchemaEventTarget as SchemaEventTarget +from .sql.events import DDLEvents as DDLEvents diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/exc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/exc.pyi new file mode 100644 index 000000000000..b8775e9ad799 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/exc.pyi @@ -0,0 +1,139 @@ +from typing import Any, ClassVar + +class HasDescriptionCode: + code: str | None + def __init__(self, *arg: Any, code: str | None = ..., **kw: Any) -> None: ... + +class SQLAlchemyError(HasDescriptionCode, Exception): + def __unicode__(self) -> str: ... + +class ArgumentError(SQLAlchemyError): ... + +class ObjectNotExecutableError(ArgumentError): + target: Any + def __init__(self, target) -> None: ... + def __reduce__(self): ... + +class NoSuchModuleError(ArgumentError): ... +class NoForeignKeysError(ArgumentError): ... +class AmbiguousForeignKeysError(ArgumentError): ... + +class CircularDependencyError(SQLAlchemyError): + cycles: Any + edges: Any + def __init__(self, message, cycles, edges, msg: Any | None = ..., code: Any | None = ...) -> None: ... + def __reduce__(self): ... + +class CompileError(SQLAlchemyError): ... + +class UnsupportedCompilationError(CompileError): + code: str + compiler: Any + element_type: Any + message: str | None + def __init__(self, compiler, element_type, message: str | None = ...) -> None: ... + def __reduce__(self): ... + +class IdentifierError(SQLAlchemyError): ... + +class DisconnectionError(SQLAlchemyError): + invalidate_pool: bool + +class InvalidatePoolError(DisconnectionError): + invalidate_pool: bool + +class TimeoutError(SQLAlchemyError): ... +class InvalidRequestError(SQLAlchemyError): ... +class NoInspectionAvailable(InvalidRequestError): ... +class PendingRollbackError(InvalidRequestError): ... +class ResourceClosedError(InvalidRequestError): ... +class NoSuchColumnError(InvalidRequestError, KeyError): ... +class NoResultFound(InvalidRequestError): ... +class MultipleResultsFound(InvalidRequestError): ... +class NoReferenceError(InvalidRequestError): ... + +class AwaitRequired(InvalidRequestError): + code: str + +class MissingGreenlet(InvalidRequestError): + code: str + +class NoReferencedTableError(NoReferenceError): + table_name: Any + def __init__(self, message, tname) -> None: ... + def __reduce__(self): ... + +class NoReferencedColumnError(NoReferenceError): + table_name: Any + column_name: Any + def __init__(self, message, tname, cname) -> None: ... + def __reduce__(self): ... + +class NoSuchTableError(InvalidRequestError): ... +class UnreflectableTableError(InvalidRequestError): ... +class UnboundExecutionError(InvalidRequestError): ... +class DontWrapMixin: ... + +class StatementError(SQLAlchemyError): + statement: Any + params: Any + orig: Any + ismulti: Any + hide_parameters: Any + detail: Any + def __init__( + self, message, statement, params, orig, hide_parameters: bool = ..., code: Any | None = ..., ismulti: Any | None = ... + ) -> None: ... + def add_detail(self, msg) -> None: ... + def __reduce__(self): ... + +class DBAPIError(StatementError): + code: str + @classmethod + def instance( + cls, + statement, + params, + orig, + dbapi_base_err, + hide_parameters: bool = ..., + connection_invalidated: bool = ..., + dialect: Any | None = ..., + ismulti: Any | None = ..., + ): ... + def __reduce__(self): ... + connection_invalidated: Any + def __init__( + self, + statement, + params, + orig, + hide_parameters: bool = ..., + connection_invalidated: bool = ..., + code: Any | None = ..., + ismulti: Any | None = ..., + ) -> None: ... + +class InterfaceError(DBAPIError): ... +class DatabaseError(DBAPIError): ... +class DataError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +class SADeprecationWarning(HasDescriptionCode, DeprecationWarning): + deprecated_since: ClassVar[str | None] + +class Base20DeprecationWarning(SADeprecationWarning): + deprecated_since: ClassVar[str] + +class LegacyAPIWarning(Base20DeprecationWarning): ... +class RemovedIn20Warning(Base20DeprecationWarning): ... +class MovedIn20Warning(RemovedIn20Warning): ... + +class SAPendingDeprecationWarning(PendingDeprecationWarning): + deprecated_since: ClassVar[str | None] + +class SAWarning(HasDescriptionCode, RuntimeWarning): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi new file mode 100644 index 000000000000..bc6e384dd0b1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/associationproxy.pyi @@ -0,0 +1,198 @@ +from typing import Any + +from ..orm import interfaces +from ..sql.operators import ColumnOperators +from ..util import memoized_property + +def association_proxy(target_collection, attr, **kw): ... + +ASSOCIATION_PROXY: Any + +class AssociationProxy(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + target_collection: Any + value_attr: Any + creator: Any + getset_factory: Any + proxy_factory: Any + proxy_bulk_set: Any + cascade_scalar_deletes: Any + key: Any + info: Any + def __init__( + self, + target_collection, + attr, + creator: Any | None = ..., + getset_factory: Any | None = ..., + proxy_factory: Any | None = ..., + proxy_bulk_set: Any | None = ..., + info: Any | None = ..., + cascade_scalar_deletes: bool = ..., + ) -> None: ... + def __get__(self, obj, class_): ... + def __set__(self, obj, values): ... + def __delete__(self, obj): ... + def for_class(self, class_, obj: Any | None = ...): ... + +class AssociationProxyInstance: + parent: Any + key: Any + owning_class: Any + target_collection: Any + collection_class: Any + target_class: Any + value_attr: Any + def __init__(self, parent, owning_class, target_class, value_attr) -> None: ... + @classmethod + def for_proxy(cls, parent, owning_class, parent_instance): ... + def __clause_element__(self) -> None: ... + @property + def remote_attr(self): ... + @property + def local_attr(self): ... + @property + def attr(self): ... + @memoized_property + def scalar(self): ... + @property + def info(self): ... + def get(self, obj): ... + def set(self, obj, values) -> None: ... + def delete(self, obj) -> None: ... + def any(self, criterion: Any | None = ..., **kwargs): ... + def has(self, criterion: Any | None = ..., **kwargs): ... + +class AmbiguousAssociationProxyInstance(AssociationProxyInstance): + def get(self, obj): ... + def __eq__(self, obj): ... + def __ne__(self, obj): ... + def any(self, criterion: Any | None = ..., **kwargs) -> None: ... + def has(self, criterion: Any | None = ..., **kwargs) -> None: ... + +class ObjectAssociationProxyInstance(AssociationProxyInstance): + def contains(self, obj): ... + def __eq__(self, obj): ... + def __ne__(self, obj): ... + +class ColumnAssociationProxyInstance(ColumnOperators[Any], AssociationProxyInstance): + def __eq__(self, other): ... + def operate(self, op, *other, **kwargs): ... + +class _lazy_collection: + parent: Any + target: Any + def __init__(self, obj, target) -> None: ... + def __call__(self): ... + +class _AssociationCollection: + lazy_collection: Any + creator: Any + getter: Any + setter: Any + parent: Any + def __init__(self, lazy_collection, creator, getter, setter, parent) -> None: ... + col: Any + def __len__(self): ... + def __bool__(self): ... + __nonzero__: Any + +class _AssociationList(_AssociationCollection): + def __getitem__(self, index): ... + def __setitem__(self, index, value) -> None: ... + def __delitem__(self, index) -> None: ... + def __contains__(self, value): ... + def __getslice__(self, start, end): ... + def __setslice__(self, start, end, values) -> None: ... + def __delslice__(self, start, end) -> None: ... + def __iter__(self): ... + def append(self, value) -> None: ... + def count(self, value): ... + def extend(self, values) -> None: ... + def insert(self, index, value) -> None: ... + def pop(self, index: int = ...): ... + def remove(self, value) -> None: ... + def reverse(self) -> None: ... + def sort(self) -> None: ... + def clear(self) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __cmp__(self, other): ... + def __add__(self, iterable): ... + def __radd__(self, iterable): ... + def __mul__(self, n): ... + __rmul__: Any + def __iadd__(self, iterable): ... + def __imul__(self, n): ... + def index(self, item, *args): ... + def copy(self): ... + def __hash__(self): ... + +class _AssociationDict(_AssociationCollection): + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __contains__(self, key): ... + def has_key(self, key): ... + def __iter__(self): ... + def clear(self) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __cmp__(self, other): ... + def get(self, key, default: Any | None = ...): ... + def setdefault(self, key, default: Any | None = ...): ... + def keys(self): ... + def items(self): ... + def values(self): ... + def pop(self, key, default=...): ... + def popitem(self): ... + def update(self, *a, **kw) -> None: ... + def copy(self): ... + def __hash__(self): ... + +class _AssociationSet(_AssociationCollection): + def __len__(self): ... + def __bool__(self): ... + __nonzero__: Any + def __contains__(self, value): ... + def __iter__(self): ... + def add(self, value) -> None: ... + def discard(self, value) -> None: ... + def remove(self, value) -> None: ... + def pop(self): ... + def update(self, other) -> None: ... + def __ior__(self, other): ... # type: ignore[misc] + def union(self, other): ... + __or__: Any + def difference(self, other): ... + __sub__: Any + def difference_update(self, other) -> None: ... + def __isub__(self, other): ... # type: ignore[misc] + def intersection(self, other): ... + __and__: Any + def intersection_update(self, other) -> None: ... + def __iand__(self, other): ... # type: ignore[misc] + def symmetric_difference(self, other): ... + __xor__: Any + def symmetric_difference_update(self, other) -> None: ... + def __ixor__(self, other): ... # type: ignore[misc] + def issubset(self, other): ... + def issuperset(self, other): ... + def clear(self) -> None: ... + def copy(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __hash__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi new file mode 100644 index 000000000000..e065d748f177 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/__init__.pyi @@ -0,0 +1,15 @@ +from .engine import ( + AsyncConnection as AsyncConnection, + AsyncEngine as AsyncEngine, + AsyncTransaction as AsyncTransaction, + create_async_engine as create_async_engine, +) +from .events import AsyncConnectionEvents as AsyncConnectionEvents, AsyncSessionEvents as AsyncSessionEvents +from .result import AsyncMappingResult as AsyncMappingResult, AsyncResult as AsyncResult, AsyncScalarResult as AsyncScalarResult +from .scoping import async_scoped_session as async_scoped_session +from .session import ( + AsyncSession as AsyncSession, + AsyncSessionTransaction as AsyncSessionTransaction, + async_object_session as async_object_session, + async_session as async_session, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi new file mode 100644 index 000000000000..8c88946875de --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/base.pyi @@ -0,0 +1,16 @@ +import abc + +class ReversibleProxy: ... + +class StartableContext(abc.ABC, metaclass=abc.ABCMeta): + @abc.abstractmethod + async def start(self, is_ctxmanager: bool = ...): ... + def __await__(self): ... + async def __aenter__(self): ... + @abc.abstractmethod + async def __aexit__(self, type_, value, traceback): ... + +class ProxyComparable(ReversibleProxy): + def __hash__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi new file mode 100644 index 000000000000..5bb62f98b3fa --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/engine.pyi @@ -0,0 +1,93 @@ +from typing import Any + +from .base import ProxyComparable, StartableContext + +def create_async_engine(*arg, **kw): ... + +class AsyncConnectable: ... + +class AsyncConnection(ProxyComparable, StartableContext, AsyncConnectable): + engine: Any + sync_engine: Any + sync_connection: Any + def __init__(self, async_engine, sync_connection: Any | None = ...) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + @property + def connection(self) -> None: ... + async def get_raw_connection(self): ... + @property + def info(self): ... + def begin(self): ... + def begin_nested(self): ... + async def invalidate(self, exception: Any | None = ...): ... + async def get_isolation_level(self): ... + async def set_isolation_level(self): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + async def execution_options(self, **opt): ... + async def commit(self) -> None: ... + async def rollback(self) -> None: ... + async def close(self) -> None: ... + async def exec_driver_sql(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def stream(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def execute(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def scalar(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def scalars(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def stream_scalars(self, statement, parameters: Any | None = ..., execution_options=...): ... + async def run_sync(self, fn, *arg, **kw): ... + def __await__(self): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + # proxied from Connection + dialect: Any + @property + def closed(self): ... + @property + def invalidated(self): ... + @property + def default_isolation_level(self): ... + +class AsyncEngine(ProxyComparable, AsyncConnectable): + class _trans_ctx(StartableContext): + conn: Any + def __init__(self, conn) -> None: ... + transaction: Any + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + sync_engine: Any + def __init__(self, sync_engine) -> None: ... + def begin(self): ... + def connect(self): ... + async def raw_connection(self): ... + def execution_options(self, **opt): ... + async def dispose(self): ... + # proxied from Engine + url: Any + pool: Any + dialect: Any + echo: Any + @property + def engine(self): ... + @property + def name(self): ... + @property + def driver(self): ... + def clear_compiled_cache(self) -> None: ... + def update_execution_options(self, **opt) -> None: ... + def get_execution_options(self): ... + +class AsyncTransaction(ProxyComparable, StartableContext): + connection: Any + sync_transaction: Any + nested: Any + def __init__(self, connection, nested: bool = ...) -> None: ... + @property + def is_valid(self): ... + @property + def is_active(self): ... + async def close(self) -> None: ... + async def rollback(self) -> None: ... + async def commit(self) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi new file mode 100644 index 000000000000..e9a8bf1a5916 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/events.pyi @@ -0,0 +1,5 @@ +from ...engine import events as engine_event +from ...orm import events as orm_event + +class AsyncConnectionEvents(engine_event.ConnectionEvents): ... +class AsyncSessionEvents(orm_event.SessionEvents): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi new file mode 100644 index 000000000000..56f3b638ab1b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/exc.pyi @@ -0,0 +1,5 @@ +from ...exc import InvalidRequestError + +class AsyncMethodRequired(InvalidRequestError): ... +class AsyncContextNotStarted(InvalidRequestError): ... +class AsyncContextAlreadyStarted(InvalidRequestError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi new file mode 100644 index 000000000000..c04258ab192f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/result.pyi @@ -0,0 +1,57 @@ +from typing import Any + +from ...engine.result import FilterResult + +class AsyncCommon(FilterResult): + async def close(self) -> None: ... + +class AsyncResult(AsyncCommon): + def __init__(self, real_result) -> None: ... + def keys(self): ... + def unique(self, strategy: Any | None = ...): ... + def columns(self, *col_expressions): ... + async def partitions(self, size: Any | None = ...) -> None: ... + async def fetchone(self): ... + async def fetchmany(self, size: Any | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def scalar_one(self): ... + async def scalar_one_or_none(self): ... + async def one(self): ... + async def scalar(self): ... + async def freeze(self): ... + def merge(self, *others): ... + def scalars(self, index: int = ...): ... + def mappings(self): ... + +class AsyncScalarResult(AsyncCommon): + def __init__(self, real_result, index) -> None: ... + def unique(self, strategy: Any | None = ...): ... + async def partitions(self, size: Any | None = ...) -> None: ... + async def fetchall(self): ... + async def fetchmany(self, size: Any | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def one(self): ... + +class AsyncMappingResult(AsyncCommon): + def __init__(self, result) -> None: ... + def keys(self): ... + def unique(self, strategy: Any | None = ...): ... + def columns(self, *col_expressions): ... + async def partitions(self, size: Any | None = ...) -> None: ... + async def fetchall(self): ... + async def fetchone(self): ... + async def fetchmany(self, size: Any | None = ...): ... + async def all(self): ... + def __aiter__(self): ... + async def __anext__(self): ... + async def first(self): ... + async def one_or_none(self): ... + async def one(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi new file mode 100644 index 000000000000..90e44bc0de74 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/scoping.pyi @@ -0,0 +1,76 @@ +from typing import Any + +from ...orm.scoping import ScopedSessionMixin +from ...util import memoized_property + +class async_scoped_session(ScopedSessionMixin): + session_factory: Any + registry: Any + def __init__(self, session_factory, scopefunc) -> None: ... + async def remove(self) -> None: ... + # proxied from Session + @classmethod + async def close_all(cls): ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + bind: Any + identity_map: Any + autoflush: Any + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def begin(self, **kw): ... + def begin_nested(self, **kw): ... + async def close(self): ... + async def commit(self): ... + async def connection(self, **kw): ... + async def delete(self, instance): ... + async def execute( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + def expire(self, instance, attribute_names: Any | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + async def flush(self, objects: Any | None = ...) -> None: ... + async def get( + self, + entity, + ident, + options: Any | None = ..., + populate_existing: bool = ..., + with_for_update: Any | None = ..., + identity_token: Any | None = ..., + ): ... + def get_bind(self, mapper: Any | None = ..., clause: Any | None = ..., bind: Any | None = ..., **kw): ... + def is_modified(self, instance, include_collections: bool = ...): ... + async def merge(self, instance, load: bool = ..., options: Any | None = ...): ... + async def refresh(self, instance, attribute_names: Any | None = ..., with_for_update: Any | None = ...): ... + async def rollback(self): ... + async def scalar( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def scalars( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def stream( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def stream_scalars( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @property + def is_active(self): ... + @property + def no_autoflush(self) -> None: ... + @memoized_property + def info(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi new file mode 100644 index 000000000000..94b3d1bde3ac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/asyncio/session.pyi @@ -0,0 +1,106 @@ +from typing import Any + +from ...util import memoized_property +from .base import ReversibleProxy, StartableContext + +class AsyncSession(ReversibleProxy): + dispatch: Any + bind: Any + binds: Any + sync_session_class: Any + sync_session: Any + def __init__(self, bind: Any | None = ..., binds: Any | None = ..., sync_session_class: Any | None = ..., **kw) -> None: ... + async def refresh(self, instance, attribute_names: Any | None = ..., with_for_update: Any | None = ...): ... + async def run_sync(self, fn, *arg, **kw): ... + async def execute( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def scalar( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def scalars( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def get( + self, + entity, + ident, + options: Any | None = ..., + populate_existing: bool = ..., + with_for_update: Any | None = ..., + identity_token: Any | None = ..., + ): ... + async def stream( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def stream_scalars( + self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw + ): ... + async def delete(self, instance): ... + async def merge(self, instance, load: bool = ..., options: Any | None = ...): ... + async def flush(self, objects: Any | None = ...) -> None: ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + def get_bind(self, mapper: Any | None = ..., clause: Any | None = ..., bind: Any | None = ..., **kw): ... + async def connection(self, **kw): ... + def begin(self, **kw): ... + def begin_nested(self, **kw): ... + async def rollback(self): ... + async def commit(self): ... + async def close(self): ... + @classmethod + async def close_all(cls): ... + async def __aenter__(self): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + # proxied from Session + identity_map: Any + autoflush: Any + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def expire(self, instance, attribute_names: Any | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + def is_modified(self, instance, include_collections: bool = ...): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + @property + def no_autoflush(self) -> None: ... + @property + def is_active(self): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @memoized_property + def info(self): ... + +class _AsyncSessionContextManager: + async_session: Any + def __init__(self, async_session) -> None: ... + trans: Any + async def __aenter__(self): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + +class AsyncSessionTransaction(ReversibleProxy, StartableContext): + session: Any + nested: Any + sync_transaction: Any + def __init__(self, session, nested: bool = ...) -> None: ... + @property + def is_active(self): ... + async def rollback(self) -> None: ... + async def commit(self) -> None: ... + async def start(self, is_ctxmanager: bool = ...): ... + async def __aexit__(self, type_, value, traceback) -> None: ... + +def async_object_session(instance): ... +def async_session(session): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi new file mode 100644 index 000000000000..13b70990366c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/automap.pyi @@ -0,0 +1,26 @@ +from typing import Any + +def classname_for_table(base, tablename, table): ... +def name_for_scalar_relationship(base, local_cls, referred_cls, constraint): ... +def name_for_collection_relationship(base, local_cls, referred_cls, constraint): ... +def generate_relationship(base, direction, return_fn, attrname, local_cls, referred_cls, **kw): ... + +class AutomapBase: + __abstract__: bool + classes: Any + @classmethod + def prepare( + cls, + autoload_with: Any | None = ..., + engine: Any | None = ..., + reflect: bool = ..., + schema: Any | None = ..., + classname_for_table: Any | None = ..., + collection_class: Any | None = ..., + name_for_scalar_relationship: Any | None = ..., + name_for_collection_relationship: Any | None = ..., + generate_relationship: Any | None = ..., + reflection_options=..., + ) -> None: ... + +def automap_base(declarative_base: Any | None = ..., **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi new file mode 100644 index 000000000000..664c226aca12 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/baked.pyi @@ -0,0 +1,45 @@ +from typing import Any + +log: Any + +class Bakery: + cls: Any + cache: Any + def __init__(self, cls_, cache) -> None: ... + def __call__(self, initial_fn, *args): ... + +class BakedQuery: + steps: Any + def __init__(self, bakery, initial_fn, args=...) -> None: ... + @classmethod + def bakery(cls, size: int = ..., _size_alert: Any | None = ...): ... + def __iadd__(self, other): ... + def __add__(self, other): ... + def add_criteria(self, fn, *args): ... + def with_criteria(self, fn, *args): ... + def for_session(self, session): ... + def __call__(self, session): ... + def spoil(self, full: bool = ...): ... + def to_query(self, query_or_session): ... + +class Result: + bq: Any + session: Any + def __init__(self, bq, session) -> None: ... + def params(self, *args, **kw): ... + def with_post_criteria(self, fn): ... + def __iter__(self): ... + def count(self): ... + def scalar(self): ... + def first(self): ... + def one(self): ... + def one_or_none(self): ... + def all(self): ... + def get(self, ident): ... + +def bake_lazy_loaders() -> None: ... +def unbake_lazy_loaders() -> None: ... + +baked_lazyload: Any +baked_lazyload_all: Any +bakery: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi new file mode 100644 index 000000000000..cc2363f1ae59 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/compiler.pyi @@ -0,0 +1,9 @@ +from typing import Any + +def compiles(class_, *specs): ... +def deregister(class_) -> None: ... + +class _dispatcher: + specs: Any + def __init__(self) -> None: ... + def __call__(self, element, compiler, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi new file mode 100644 index 000000000000..7eddf8603c37 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/__init__.pyi @@ -0,0 +1,27 @@ +from ...orm.decl_api import ( + DeclarativeMeta as DeclarativeMeta, + as_declarative as as_declarative, + declarative_base as declarative_base, + declared_attr as declared_attr, + has_inherited_table as has_inherited_table, + synonym_for as synonym_for, +) +from .extensions import ( + AbstractConcreteBase as AbstractConcreteBase, + ConcreteBase as ConcreteBase, + DeferredReflection as DeferredReflection, + instrument_declarative as instrument_declarative, +) + +__all__ = [ + "declarative_base", + "synonym_for", + "has_inherited_table", + "instrument_declarative", + "declared_attr", + "as_declarative", + "ConcreteBase", + "AbstractConcreteBase", + "DeclarativeMeta", + "DeferredReflection", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi new file mode 100644 index 000000000000..c4fbf0ff5444 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/declarative/extensions.pyi @@ -0,0 +1,14 @@ +def instrument_declarative(cls, cls_registry, metadata) -> None: ... + +class ConcreteBase: + @classmethod + def __declare_first__(cls) -> None: ... + +class AbstractConcreteBase(ConcreteBase): + __no_table__: bool + @classmethod + def __declare_first__(cls) -> None: ... + +class DeferredReflection: + @classmethod + def prepare(cls, engine) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi new file mode 100644 index 000000000000..b217b31b9e0a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/horizontal_shard.pyi @@ -0,0 +1,25 @@ +from typing import Any, Generic, TypeVar + +from ..orm.query import Query +from ..orm.session import Session + +_T = TypeVar("_T") + +class ShardedQuery(Query[_T], Generic[_T]): + id_chooser: Any + query_chooser: Any + execute_chooser: Any + def __init__(self, *args, **kwargs) -> None: ... + def set_shard(self, shard_id): ... + +class ShardedSession(Session): + shard_chooser: Any + id_chooser: Any + execute_chooser: Any + query_chooser: Any + def __init__( + self, shard_chooser, id_chooser, execute_chooser: Any | None = ..., shards: Any | None = ..., query_cls=..., **kwargs + ): ... + def connection_callable(self, mapper: Any | None = ..., instance: Any | None = ..., shard_id: Any | None = ..., **kwargs): ... + def get_bind(self, mapper: Any | None = ..., shard_id: Any | None = ..., instance: Any | None = ..., clause: Any | None = ..., **kw): ... # type: ignore[override] + def bind_shard(self, shard_id, bind) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi new file mode 100644 index 000000000000..84cc1f5b465b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/hybrid.pyi @@ -0,0 +1,67 @@ +from typing import Any + +from ..orm import interfaces + +HYBRID_METHOD: Any +HYBRID_PROPERTY: Any + +class hybrid_method(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + func: Any + def __init__(self, func, expr: Any | None = ...) -> None: ... + def __get__(self, instance, owner): ... + expr: Any + def expression(self, expr): ... + +class hybrid_property(interfaces.InspectionAttrInfo): + is_attribute: bool + extension_type: Any + fget: Any + fset: Any + fdel: Any + expr: Any + custom_comparator: Any + update_expr: Any + def __init__( + self, + fget, + fset: Any | None = ..., + fdel: Any | None = ..., + expr: Any | None = ..., + custom_comparator: Any | None = ..., + update_expr: Any | None = ..., + ) -> None: ... + def __get__(self, instance, owner): ... + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + @property + def overrides(self): ... + def getter(self, fget): ... + def setter(self, fset): ... + def deleter(self, fdel): ... + def expression(self, expr): ... + def comparator(self, comparator): ... + def update_expression(self, meth): ... + +class Comparator(interfaces.PropComparator): + property: Any + expression: Any + def __init__(self, expression) -> None: ... + def __clause_element__(self): ... + def adapt_to_entity(self, adapt_to_entity): ... + +_property = property + +class ExprComparator(Comparator): + cls: Any + expression: Any + hybrid: Any + def __init__(self, cls, expression, hybrid) -> None: ... + def __getattr__(self, key): ... + @_property + def info(self): ... + @_property + def property(self): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi new file mode 100644 index 000000000000..0150ec714b66 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/indexable.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from ..ext.hybrid import hybrid_property + +class index_property(hybrid_property): + attr_name: Any + index: Any + default: Any + datatype: Any + onebased: Any + def __init__(self, attr_name, index, default=..., datatype: Any | None = ..., mutable: bool = ..., onebased: bool = ...): ... + def fget(self, instance): ... + def fset(self, instance, value) -> None: ... + def fdel(self, instance) -> None: ... + def expr(self, model): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi new file mode 100644 index 000000000000..6420c913f7dc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/instrumentation.pyi @@ -0,0 +1,54 @@ +from typing import Any + +from ..orm.instrumentation import ClassManager, InstrumentationFactory + +INSTRUMENTATION_MANAGER: str + +def find_native_user_instrumentation_hook(cls): ... + +instrumentation_finders: Any + +class ExtendedInstrumentationRegistry(InstrumentationFactory): + def unregister(self, class_) -> None: ... + def manager_of_class(self, cls): ... + def state_of(self, instance): ... + def dict_of(self, instance): ... + +class InstrumentationManager: + def __init__(self, class_) -> None: ... + def manage(self, class_, manager) -> None: ... + def unregister(self, class_, manager) -> None: ... + def manager_getter(self, class_): ... + def instrument_attribute(self, class_, key, inst) -> None: ... + def post_configure_attribute(self, class_, key, inst) -> None: ... + def install_descriptor(self, class_, key, inst) -> None: ... + def uninstall_descriptor(self, class_, key) -> None: ... + def install_member(self, class_, key, implementation) -> None: ... + def uninstall_member(self, class_, key) -> None: ... + def instrument_collection_class(self, class_, key, collection_class): ... + def get_instance_dict(self, class_, instance): ... + def initialize_instance_dict(self, class_, instance) -> None: ... + def install_state(self, class_, instance, state) -> None: ... + def remove_state(self, class_, instance) -> None: ... + def state_getter(self, class_): ... + def dict_getter(self, class_): ... + +class _ClassInstrumentationAdapter(ClassManager): + def __init__(self, class_, override) -> None: ... + def manage(self) -> None: ... + def unregister(self) -> None: ... + def manager_getter(self): ... + def instrument_attribute(self, key, inst, propagated: bool = ...) -> None: ... + def post_configure_attribute(self, key) -> None: ... + def install_descriptor(self, key, inst) -> None: ... + def uninstall_descriptor(self, key) -> None: ... + def install_member(self, key, implementation) -> None: ... + def uninstall_member(self, key) -> None: ... + def instrument_collection_class(self, key, collection_class): ... + def initialize_collection(self, key, state, factory): ... + def new_instance(self, state: Any | None = ...): ... + def setup_instance(self, instance, state: Any | None = ...): ... + def teardown_instance(self, instance) -> None: ... + def has_state(self, instance): ... + def state_getter(self): ... + def dict_getter(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi new file mode 100644 index 000000000000..c1e4be44b324 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mutable.pyi @@ -0,0 +1,64 @@ +from typing import Any + +class MutableBase: + @classmethod + def coerce(cls, key, value) -> None: ... + +class Mutable(MutableBase): + def changed(self) -> None: ... + @classmethod + def associate_with_attribute(cls, attribute) -> None: ... + @classmethod + def associate_with(cls, sqltype) -> None: ... + @classmethod + def as_mutable(cls, sqltype): ... + +class MutableComposite(MutableBase): + def changed(self) -> None: ... + +class MutableDict(Mutable, dict[Any, Any]): + def __setitem__(self, key, value) -> None: ... + def setdefault(self, key, value): ... + def __delitem__(self, key) -> None: ... + def update(self, *a, **kw) -> None: ... + def pop(self, *arg): ... + def popitem(self): ... + def clear(self) -> None: ... + @classmethod + def coerce(cls, key, value): ... + +class MutableList(Mutable, list[Any]): + def __reduce_ex__(self, proto): ... + def __setitem__(self, index, value) -> None: ... + def __setslice__(self, start, end, value) -> None: ... + def __delitem__(self, index) -> None: ... + def __delslice__(self, start, end) -> None: ... + def pop(self, *arg): ... + def append(self, x) -> None: ... + def extend(self, x) -> None: ... + def __iadd__(self, x): ... # type: ignore[misc] + def insert(self, i, x) -> None: ... + def remove(self, i) -> None: ... + def clear(self) -> None: ... + def sort(self, **kw) -> None: ... + def reverse(self) -> None: ... + @classmethod + def coerce(cls, index, value): ... + +class MutableSet(Mutable, set[Any]): + def update(self, *arg) -> None: ... + def intersection_update(self, *arg) -> None: ... + def difference_update(self, *arg) -> None: ... + def symmetric_difference_update(self, *arg) -> None: ... + def __ior__(self, other): ... # type: ignore[misc] + def __iand__(self, other): ... # type: ignore[misc] + def __ixor__(self, other): ... # type: ignore[misc] + def __isub__(self, other): ... # type: ignore[misc] + def add(self, elem) -> None: ... + def remove(self, elem) -> None: ... + def discard(self, elem) -> None: ... + def pop(self, *arg): ... + def clear(self) -> None: ... + @classmethod + def coerce(cls, index, value): ... + def __reduce_ex__(self, proto): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/apply.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/apply.pyi new file mode 100644 index 000000000000..44aacc72f47f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/apply.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from . import util + +AssignmentStmt = Any # from mypy.nodes +NameExpr = Any # from mypy.nodes +StrExpr = Any # from mypy.nodes +SemanticAnalyzerPluginInterface = Any # from mypy.plugin +ProperType = Any # from mypy.types + +def apply_mypy_mapped_attr( + cls, api: SemanticAnalyzerPluginInterface, item: NameExpr | StrExpr, attributes: list[util.SQLAlchemyAttribute] +) -> None: ... +def re_apply_declarative_assignments( + cls, api: SemanticAnalyzerPluginInterface, attributes: list[util.SQLAlchemyAttribute] +) -> None: ... +def apply_type_to_mapped_statement( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + lvalue: NameExpr, + left_hand_explicit_type: ProperType | None, + python_type_for_type: ProperType | None, +) -> None: ... +def add_additional_orm_attributes( + cls, api: SemanticAnalyzerPluginInterface, attributes: list[util.SQLAlchemyAttribute] +) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/decl_class.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/decl_class.pyi new file mode 100644 index 000000000000..0a417cb58a8f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/decl_class.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from . import util + +SemanticAnalyzerPluginInterface = Any # from mypy.plugin + +def scan_declarative_assignments_and_apply_types( + cls, api: SemanticAnalyzerPluginInterface, is_mixin_scan: bool = ... +) -> list[util.SQLAlchemyAttribute] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/infer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/infer.pyi new file mode 100644 index 000000000000..7faabc366261 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/infer.pyi @@ -0,0 +1,25 @@ +from collections.abc import Sequence +from typing import Any + +AssignmentStmt = Any # from mypy.nodes +Expression = Any # from mypy.nodes +RefExpr = Any # from mypy.nodes +TypeInfo = Any # from mypy.nodes +Var = Any # from mypy.nodes +StrExpr = Any # from mypy.nodes +SemanticAnalyzerPluginInterface = Any # from mypy.plugin +ProperType = Any # from mypy.types + +def infer_type_from_right_hand_nameexpr( + api: SemanticAnalyzerPluginInterface, + stmt: AssignmentStmt, + node: Var, + left_hand_explicit_type: ProperType | None, + infer_from_right_side: RefExpr, +) -> ProperType | None: ... +def infer_type_from_left_hand_type_only( + api: SemanticAnalyzerPluginInterface, node: Var, left_hand_explicit_type: ProperType | None +) -> ProperType | None: ... +def extract_python_type_from_typeengine( + api: SemanticAnalyzerPluginInterface, node: TypeInfo, type_args: Sequence[Expression] +) -> ProperType: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/names.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/names.pyi new file mode 100644 index 000000000000..f3c4b45c4e48 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/names.pyi @@ -0,0 +1,38 @@ +from typing import Any + +from ...util import symbol + +ClassDef = Any # from mypy.nodes +Expression = Any # from mypy.nodes +MemberExpr = Any # from mypy.nodes +NameExpr = Any # from mypy.nodes +SymbolNode = Any # from mypy.nodes +TypeInfo = Any # from mypy.nodes +StrExpr = Any # from mypy.nodes +SemanticAnalyzerPluginInterface = Any # from mypy.plugin +UnboundType = Any # from mypy.types + +COLUMN: symbol +RELATIONSHIP: symbol +REGISTRY: symbol +COLUMN_PROPERTY: symbol +TYPEENGINE: symbol +MAPPED: symbol +DECLARATIVE_BASE: symbol +DECLARATIVE_META: symbol +MAPPED_DECORATOR: symbol +SYNONYM_PROPERTY: symbol +COMPOSITE_PROPERTY: symbol +DECLARED_ATTR: symbol +MAPPER_PROPERTY: symbol +AS_DECLARATIVE: symbol +AS_DECLARATIVE_BASE: symbol +DECLARATIVE_MIXIN: symbol +QUERY_EXPRESSION: symbol + +def has_base_type_id(info: TypeInfo, type_id: int) -> bool: ... +def mro_has_id(mro: list[TypeInfo], type_id: int) -> bool: ... +def type_id_for_unbound_type(type_: UnboundType, cls: ClassDef, api: SemanticAnalyzerPluginInterface) -> int | None: ... +def type_id_for_callee(callee: Expression) -> int | None: ... +def type_id_for_named_node(node: NameExpr | MemberExpr | SymbolNode) -> int | None: ... +def type_id_for_fullname(fullname: str) -> int | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/plugin.pyi new file mode 100644 index 000000000000..8ec411585dcf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/plugin.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable +from typing import Any + +MypyFile = Any # from mypy.nodes +AttributeContext = Any # from mypy.plugin +ClassDefContext = Any # from mypy.plugin +DynamicClassDefContext = Any # from mypy.plugin +Plugin = Any # from mypy.plugin +Type = Any # from mypy.types + +class SQLAlchemyPlugin(Plugin): + def get_dynamic_class_hook(self, fullname: str) -> Callable[[DynamicClassDefContext], None] | None: ... + def get_customize_class_mro_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: ... + def get_class_decorator_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: ... + def get_metaclass_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: ... + def get_base_class_hook(self, fullname: str) -> Callable[[ClassDefContext], None] | None: ... + def get_attribute_hook(self, fullname: str) -> Callable[[AttributeContext], Type] | None: ... + def get_additional_deps(self, file: MypyFile) -> list[tuple[int, str, int]]: ... + +def plugin(version: str) -> type[SQLAlchemyPlugin]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/util.pyi new file mode 100644 index 000000000000..73f16b6b265f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/mypy/util.pyi @@ -0,0 +1,49 @@ +from collections.abc import Iterable, Iterator +from typing import Any, TypeVar, overload + +CallExpr = Any # from mypy.nodes +Context = Any # from mypy.nodes +Expression = Any # from mypy.nodes +JsonDict = Any # from mypy.nodes +NameExpr = Any # from mypy.nodes +Statement = Any # from mypy.nodes +TypeInfo = Any # from mypy.nodes +AttributeContext = Any # from mypy.plugin +ClassDefContext = Any # from mypy.plugin +DynamicClassDefContext = Any # from mypy.plugin +SemanticAnalyzerPluginInterface = Any # from mypy.plugin +Type = Any # from mypy.types + +_TArgType = TypeVar("_TArgType", bound=CallExpr | NameExpr) + +class SQLAlchemyAttribute: + name: Any + line: Any + column: Any + type: Any + info: Any + def __init__(self, name: str, line: int, column: int, typ: Type | None, info: TypeInfo) -> None: ... + def serialize(self) -> JsonDict: ... + def expand_typevar_from_subtype(self, sub_type: TypeInfo) -> None: ... + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict, api: SemanticAnalyzerPluginInterface) -> SQLAlchemyAttribute: ... + +def name_is_dunder(name): ... +def establish_as_sqlalchemy(info: TypeInfo) -> None: ... +def set_is_base(info: TypeInfo) -> None: ... +def get_is_base(info: TypeInfo) -> bool: ... +def has_declarative_base(info: TypeInfo) -> bool: ... +def set_has_table(info: TypeInfo) -> None: ... +def get_has_table(info: TypeInfo) -> bool: ... +def get_mapped_attributes(info: TypeInfo, api: SemanticAnalyzerPluginInterface) -> list[SQLAlchemyAttribute] | None: ... +def set_mapped_attributes(info: TypeInfo, attributes: list[SQLAlchemyAttribute]) -> None: ... +def fail(api: SemanticAnalyzerPluginInterface, msg: str, ctx: Context) -> None: ... +def add_global(ctx: ClassDefContext | DynamicClassDefContext, module: str, symbol_name: str, asname: str) -> None: ... +@overload +def get_callexpr_kwarg(callexpr: CallExpr, name: str, *, expr_types: None = ...) -> CallExpr | NameExpr | None: ... +@overload +def get_callexpr_kwarg(callexpr: CallExpr, name: str, *, expr_types: tuple[type[_TArgType], ...]) -> _TArgType | None: ... +def flatten_typechecking(stmts: Iterable[Statement]) -> Iterator[Statement]: ... +def unbound_to_instance(api: SemanticAnalyzerPluginInterface, typ: Type) -> Type: ... +def info_for_cls(cls, api: SemanticAnalyzerPluginInterface) -> TypeInfo | None: ... +def expr_to_mapped_constructor(expr: Expression) -> CallExpr: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi new file mode 100644 index 000000000000..a4fe8305f455 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/orderinglist.pyi @@ -0,0 +1,21 @@ +from typing import Any + +def ordering_list(attr, count_from: Any | None = ..., **kw): ... + +class OrderingList(list[Any]): + ordering_attr: Any + ordering_func: Any + reorder_on_append: Any + def __init__( + self, ordering_attr: Any | None = ..., ordering_func: Any | None = ..., reorder_on_append: bool = ... + ) -> None: ... + def reorder(self) -> None: ... + def append(self, entity) -> None: ... + def insert(self, index, entity) -> None: ... + def remove(self, entity) -> None: ... + def pop(self, index: int = ...): ... # type: ignore[override] + def __setitem__(self, index, entity) -> None: ... + def __delitem__(self, index) -> None: ... + def __setslice__(self, start, end, values) -> None: ... + def __delslice__(self, start, end) -> None: ... + def __reduce__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi new file mode 100644 index 000000000000..d4a4a2cad506 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/ext/serializer.pyi @@ -0,0 +1,6 @@ +from typing import Any + +def Serializer(*args, **kw): ... +def Deserializer(file, metadata: Any | None = ..., scoped_session: Any | None = ..., engine: Any | None = ...): ... +def dumps(obj, protocol=...): ... +def loads(data, metadata: Any | None = ..., scoped_session: Any | None = ..., engine: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi new file mode 100644 index 000000000000..00f3a300bcd7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/__init__.pyi @@ -0,0 +1,5 @@ +from typing import Any + +from .engine import Connection as Connection, Engine as Engine, create_engine as create_engine + +select: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi new file mode 100644 index 000000000000..bd4ff1dfaf1a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/engine.pyi @@ -0,0 +1,37 @@ +from typing import Any, overload +from typing_extensions import Literal + +from ..engine import Connection as _LegacyConnection, Engine as _LegacyEngine +from ..engine.base import OptionEngineMixin +from ..engine.mock import MockConnection +from ..engine.url import URL + +NO_OPTIONS: Any + +@overload +def create_engine(url: URL | str, *, strategy: Literal["mock"], **kwargs) -> MockConnection: ... # type: ignore[misc] +@overload +def create_engine( + url: URL | str, *, module: Any | None = ..., enable_from_linting: bool = ..., future: bool = ..., **kwargs +) -> Engine: ... + +class Connection(_LegacyConnection): + def begin(self): ... + def begin_nested(self): ... + def commit(self) -> None: ... + def rollback(self) -> None: ... + def close(self) -> None: ... + def execute(self, statement, parameters: Any | None = ..., execution_options: Any | None = ...): ... # type: ignore[override] + def scalar(self, statement, parameters: Any | None = ..., execution_options: Any | None = ...): ... # type: ignore[override] + +class Engine(_LegacyEngine): + transaction: Any + run_callable: Any + execute: Any + scalar: Any + table_names: Any + has_table: Any + def begin(self) -> None: ... # type: ignore[override] + def connect(self): ... + +class OptionEngine(OptionEngineMixin, Engine): ... # type: ignore[misc] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/orm/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/future/orm/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/inspection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/inspection.pyi new file mode 100644 index 000000000000..d758818c57fc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/inspection.pyi @@ -0,0 +1 @@ +def inspect(subject, raiseerr: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/log.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/log.pyi new file mode 100644 index 000000000000..2f111522845a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/log.pyi @@ -0,0 +1,39 @@ +from _typeshed import Self +from logging import Logger +from typing import Any, TypeVar, overload +from typing_extensions import Literal + +_ClsT = TypeVar("_ClsT", bound=type) +_EchoFlag = bool | Literal["debug"] | None + +rootlogger: Any + +def class_logger(cls: _ClsT) -> _ClsT: ... + +class Identified: + logging_name: str | None + +class InstanceLogger: + echo: _EchoFlag + logger: Logger + def __init__(self, echo: _EchoFlag, name: str | None) -> None: ... + def debug(self, msg, *args, **kwargs) -> None: ... + def info(self, msg, *args, **kwargs) -> None: ... + def warning(self, msg, *args, **kwargs) -> None: ... + warn = warning + def error(self, msg, *args, **kwargs) -> None: ... + def exception(self, msg, *args, **kwargs) -> None: ... + def critical(self, msg, *args, **kwargs) -> None: ... + def log(self, level, msg, *args, **kwargs) -> None: ... + def isEnabledFor(self, level): ... + def getEffectiveLevel(self): ... + +def instance_logger(instance: Identified, echoflag: _EchoFlag = ...) -> None: ... + +class echo_property: + __doc__: str + @overload + def __get__(self: Self, instance: None, owner: object) -> Self: ... + @overload + def __get__(self, instance: Identified, owner: object) -> _EchoFlag: ... + def __set__(self, instance: Identified, value: _EchoFlag) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi new file mode 100644 index 000000000000..e4b08eeb92a0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/__init__.pyi @@ -0,0 +1,125 @@ +from typing import Any + +from ..util.langhelpers import public_factory as public_factory +from . import exc as exc, strategy_options as strategy_options +from .attributes import ( + AttributeEvent as AttributeEvent, + InstrumentedAttribute as InstrumentedAttribute, + Mapped as Mapped, + QueryableAttribute as QueryableAttribute, +) +from .context import QueryContext as QueryContext +from .decl_api import ( + DeclarativeMeta as DeclarativeMeta, + as_declarative as as_declarative, + declarative_base as declarative_base, + declarative_mixin as declarative_mixin, + declared_attr as declared_attr, + has_inherited_table as has_inherited_table, + registry as registry, + synonym_for as synonym_for, +) +from .descriptor_props import CompositeProperty as CompositeProperty, SynonymProperty as SynonymProperty +from .dynamic import AppenderQuery as AppenderQuery +from .events import ( + AttributeEvents as AttributeEvents, + InstanceEvents as InstanceEvents, + InstrumentationEvents as InstrumentationEvents, + MapperEvents as MapperEvents, + QueryEvents as QueryEvents, + SessionEvents as SessionEvents, +) +from .identity import IdentityMap as IdentityMap +from .instrumentation import ClassManager as ClassManager +from .interfaces import ( + EXT_CONTINUE as EXT_CONTINUE, + EXT_SKIP as EXT_SKIP, + EXT_STOP as EXT_STOP, + MANYTOMANY as MANYTOMANY, + MANYTOONE as MANYTOONE, + NOT_EXTENSION as NOT_EXTENSION, + ONETOMANY as ONETOMANY, + InspectionAttr as InspectionAttr, + InspectionAttrInfo as InspectionAttrInfo, + MapperProperty as MapperProperty, + PropComparator as PropComparator, + UserDefinedOption as UserDefinedOption, +) +from .loading import merge_frozen_result as merge_frozen_result, merge_result as merge_result +from .mapper import ( + Mapper as Mapper, + class_mapper as class_mapper, + configure_mappers as configure_mappers, + reconstructor as reconstructor, + validates as validates, +) +from .properties import ColumnProperty as ColumnProperty +from .query import AliasOption as AliasOption, FromStatement as FromStatement, Query as Query +from .relationships import RelationshipProperty as RelationshipProperty, foreign as foreign, remote as remote +from .scoping import scoped_session as scoped_session +from .session import ( + ORMExecuteState as ORMExecuteState, + Session as Session, + SessionTransaction as SessionTransaction, + close_all_sessions as close_all_sessions, + make_transient as make_transient, + make_transient_to_detached as make_transient_to_detached, + object_session as object_session, + sessionmaker as sessionmaker, +) +from .state import AttributeState as AttributeState, InstanceState as InstanceState +from .strategy_options import Load as Load +from .unitofwork import UOWTransaction as UOWTransaction +from .util import ( + Bundle as Bundle, + CascadeOptions as CascadeOptions, + LoaderCriteriaOption as LoaderCriteriaOption, + aliased as aliased, + join as join, + object_mapper as object_mapper, + outerjoin as outerjoin, + polymorphic_union as polymorphic_union, + was_deleted as was_deleted, + with_parent as with_parent, + with_polymorphic as with_polymorphic, +) + +def create_session(bind: Any | None = ..., **kwargs): ... + +with_loader_criteria: Any +relationship: Any + +def relation(*arg, **kw): ... +def dynamic_loader(argument, **kw): ... + +column_property: Any +composite: Any + +def backref(name, **kwargs): ... +def deferred(*columns, **kw): ... +def query_expression(default_expr=...): ... + +mapper: Any +synonym: Any + +def clear_mappers() -> None: ... + +joinedload: Any +contains_eager: Any +defer: Any +undefer: Any +undefer_group: Any +with_expression: Any +load_only: Any +lazyload: Any +subqueryload: Any +selectinload: Any +immediateload: Any +noload: Any +raiseload: Any +defaultload: Any +selectin_polymorphic: Any + +def eagerload(*args, **kwargs): ... + +contains_alias: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi new file mode 100644 index 000000000000..bd79f138dadb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/attributes.pyi @@ -0,0 +1,250 @@ +from typing import Any, Generic, NamedTuple, TypeVar + +from ..sql import base as sql_base, roles, traversals +from ..util import memoized_property +from . import interfaces + +_T = TypeVar("_T") + +class NoKey(str): ... + +NO_KEY: Any + +class QueryableAttribute( + interfaces._MappedAttribute, + interfaces.InspectionAttr, + interfaces.PropComparator, + traversals.HasCopyInternals, + roles.JoinTargetRole, + roles.OnClauseRole, + sql_base.Immutable, + sql_base.MemoizedHasCacheKey, +): + is_attribute: bool + __visit_name__: str + class_: Any + key: Any + impl: Any + comparator: Any + def __init__( + self, + class_, + key, + parententity, + impl: Any | None = ..., + comparator: Any | None = ..., + of_type: Any | None = ..., + extra_criteria=..., + ) -> None: ... + def __reduce__(self): ... + def get_history(self, instance, passive=...): ... + @memoized_property + def info(self): ... + @memoized_property + def parent(self): ... + @memoized_property + def expression(self): ... + def __clause_element__(self): ... + def adapt_to_entity(self, adapt_to_entity): ... + def of_type(self, entity): ... + def and_(self, *other): ... + def label(self, name): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def hasparent(self, state, optimistic: bool = ...): ... + def __getattr__(self, key): ... + @memoized_property + def property(self): ... + +class Mapped(QueryableAttribute, Generic[_T]): + def __get__(self, instance, owner) -> None: ... + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + +class InstrumentedAttribute(Mapped[Any]): + inherit_cache: bool + def __set__(self, instance, value) -> None: ... + def __delete__(self, instance) -> None: ... + def __get__(self, instance, owner): ... + +class _HasEntityNamespace(NamedTuple): + entity_namespace: Any + +class HasEntityNamespace(_HasEntityNamespace): + is_mapper: bool + is_aliased_class: bool + +def create_proxied_attribute(descriptor): ... + +OP_REMOVE: Any +OP_APPEND: Any +OP_REPLACE: Any +OP_BULK_REPLACE: Any +OP_MODIFIED: Any + +class AttributeEvent: + impl: Any + op: Any + parent_token: Any + def __init__(self, attribute_impl, op) -> None: ... + def __eq__(self, other): ... + @property + def key(self): ... + def hasparent(self, state): ... + +Event = AttributeEvent + +class AttributeImpl: + class_: Any + key: Any + callable_: Any + dispatch: Any + trackparent: Any + parent_token: Any + send_modified_events: Any + is_equal: Any + accepts_scalar_loader: Any + load_on_unexpire: Any + def __init__( + self, + class_, + key, + callable_, + dispatch, + trackparent: bool = ..., + compare_function: Any | None = ..., + active_history: bool = ..., + parent_token: Any | None = ..., + load_on_unexpire: bool = ..., + send_modified_events: bool = ..., + accepts_scalar_loader: Any | None = ..., + **kwargs, + ) -> None: ... + active_history: Any + def hasparent(self, state, optimistic: bool = ...): ... + def sethasparent(self, state, parent_state, value) -> None: ... + def get_history(self, state, dict_, passive=...) -> None: ... + def get_all_pending(self, state, dict_, passive=...) -> None: ... + def get(self, state, dict_, passive=...): ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Any | None = ..., pop: bool = ...) -> None: ... + def get_committed_value(self, state, dict_, passive=...): ... + def set_committed_value(self, state, dict_, value): ... + +class ScalarAttributeImpl(AttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + dynamic: bool + def __init__(self, *arg, **kw) -> None: ... + def delete(self, state, dict_) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Any | None = ..., pop: bool = ...) -> None: ... + def fire_replace_event(self, state, dict_, value, previous, initiator): ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + @property + def type(self) -> None: ... + +class ScalarObjectAttributeImpl(ScalarAttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + def delete(self, state, dict_) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def set(self, state, dict_, value, initiator, passive=..., check_old: Any | None = ..., pop: bool = ...) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + def fire_replace_event(self, state, dict_, value, previous, initiator): ... + +class CollectionAttributeImpl(AttributeImpl): + default_accepts_scalar_loader: bool + uses_objects: bool + supports_population: bool + collection: bool + dynamic: bool + copy: Any + collection_factory: Any + def __init__( + self, + class_, + key, + callable_, + dispatch, + typecallable: Any | None = ..., + trackparent: bool = ..., + copy_function: Any | None = ..., + compare_function: Any | None = ..., + **kwargs, + ) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def fire_append_event(self, state, dict_, value, initiator): ... + def fire_append_wo_mutation_event(self, state, dict_, value, initiator): ... + def fire_pre_remove_event(self, state, dict_, initiator) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator) -> None: ... + def delete(self, state, dict_) -> None: ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + def set( + self, + state, + dict_, + value, + initiator: Any | None = ..., + passive=..., + check_old: Any | None = ..., + pop: bool = ..., + _adapt: bool = ..., + ) -> None: ... + def set_committed_value(self, state, dict_, value): ... + def get_collection(self, state, dict_, user_data: Any | None = ..., passive=...): ... + +def backref_listeners(attribute, key, uselist): ... + +class History: + def __bool__(self): ... + __nonzero__: Any + def empty(self): ... + def sum(self): ... + def non_deleted(self): ... + def non_added(self): ... + def has_changes(self): ... + def as_state(self): ... + @classmethod + def from_scalar_attribute(cls, attribute, state, current): ... + @classmethod + def from_object_attribute(cls, attribute, state, current, original=...): ... + @classmethod + def from_collection(cls, attribute, state, current): ... + +HISTORY_BLANK: Any + +def get_history(obj, key, passive=...): ... +def get_state_history(state, key, passive=...): ... +def has_parent(cls, obj, key, optimistic: bool = ...): ... +def register_attribute(class_, key, **kw): ... +def register_attribute_impl( + class_, + key, + uselist: bool = ..., + callable_: Any | None = ..., + useobject: bool = ..., + impl_class: Any | None = ..., + backref: Any | None = ..., + **kw, +): ... +def register_descriptor(class_, key, comparator: Any | None = ..., parententity: Any | None = ..., doc: Any | None = ...): ... +def unregister_attribute(class_, key) -> None: ... +def init_collection(obj, key): ... +def init_state_collection(state, dict_, key): ... +def set_committed_value(instance, key, value) -> None: ... +def set_attribute(instance, key, value, initiator: Any | None = ...) -> None: ... +def get_attribute(instance, key): ... +def del_attribute(instance, key) -> None: ... +def flag_modified(instance, key) -> None: ... +def flag_dirty(instance) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi new file mode 100644 index 000000000000..4d247f53c726 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/base.pyi @@ -0,0 +1,75 @@ +from typing import Any + +from ..util import memoized_property + +PASSIVE_NO_RESULT: Any +PASSIVE_CLASS_MISMATCH: Any +ATTR_WAS_SET: Any +ATTR_EMPTY: Any +NO_VALUE: Any +NEVER_SET: Any +NO_CHANGE: Any +CALLABLES_OK: Any +SQL_OK: Any +RELATED_OBJECT_OK: Any +INIT_OK: Any +NON_PERSISTENT_OK: Any +LOAD_AGAINST_COMMITTED: Any +NO_AUTOFLUSH: Any +NO_RAISE: Any +DEFERRED_HISTORY_LOAD: Any +PASSIVE_OFF: Any +PASSIVE_RETURN_NO_VALUE: Any +PASSIVE_NO_INITIALIZE: Any +PASSIVE_NO_FETCH: Any +PASSIVE_NO_FETCH_RELATED: Any +PASSIVE_ONLY_PERSISTENT: Any +DEFAULT_MANAGER_ATTR: str +DEFAULT_STATE_ATTR: str +EXT_CONTINUE: Any +EXT_STOP: Any +EXT_SKIP: Any +ONETOMANY: Any +MANYTOONE: Any +MANYTOMANY: Any +NOT_EXTENSION: Any + +_never_set: frozenset[Any] +_none_set: frozenset[Any] + +def manager_of_class(cls): ... + +instance_state: Any +instance_dict: Any + +def instance_str(instance): ... +def state_str(state): ... +def state_class_str(state): ... +def attribute_str(instance, attribute): ... +def state_attribute_str(state, attribute): ... +def object_mapper(instance): ... +def object_state(instance): ... +def _class_to_mapper(class_or_mapper): ... +def _mapper_or_none(entity): ... +def _is_mapped_class(entity): ... + +_state_mapper: Any + +def class_mapper(class_, configure: bool = ...): ... + +class InspectionAttr: + is_selectable: bool + is_aliased_class: bool + is_instance: bool + is_mapper: bool + is_bundle: bool + is_property: bool + is_attribute: bool + is_clause_element: bool + extension_type: Any + +class InspectionAttrInfo(InspectionAttr): + @memoized_property + def info(self): ... + +class _MappedAttribute: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi new file mode 100644 index 000000000000..585189669aa2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/clsregistry.pyi @@ -0,0 +1,51 @@ +from typing import Any + +def add_class(classname, cls, decl_class_registry) -> None: ... +def remove_class(classname, cls, decl_class_registry) -> None: ... + +class _MultipleClassMarker: + on_remove: Any + contents: Any + def __init__(self, classes, on_remove: Any | None = ...) -> None: ... + def remove_item(self, cls) -> None: ... + def __iter__(self): ... + def attempt_get(self, path, key): ... + def add_item(self, item) -> None: ... + +class _ModuleMarker: + parent: Any + name: Any + contents: Any + mod_ns: Any + path: Any + def __init__(self, name, parent) -> None: ... + def __contains__(self, name): ... + def __getitem__(self, name): ... + def resolve_attr(self, key): ... + def get_module(self, name): ... + def add_class(self, name, cls): ... + def remove_class(self, name, cls) -> None: ... + +class _ModNS: + def __init__(self, parent) -> None: ... + def __getattr__(self, key): ... + +class _GetColumns: + cls: Any + def __init__(self, cls) -> None: ... + def __getattr__(self, key): ... + +class _GetTable: + key: Any + metadata: Any + def __init__(self, key, metadata) -> None: ... + def __getattr__(self, key): ... + +class _class_resolver: + cls: Any + prop: Any + arg: Any + fallback: Any + favor_tables: Any + def __init__(self, cls, prop, fallback, arg, favor_tables: bool = ...) -> None: ... + def __call__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi new file mode 100644 index 000000000000..fd59745d0ecb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/collections.pyi @@ -0,0 +1,91 @@ +from typing import Any + +class _PlainColumnGetter: + cols: Any + composite: Any + def __init__(self, cols) -> None: ... + def __reduce__(self): ... + def __call__(self, value): ... + +class _SerializableColumnGetter: + colkeys: Any + composite: Any + def __init__(self, colkeys) -> None: ... + def __reduce__(self): ... + def __call__(self, value): ... + +class _SerializableColumnGetterV2(_PlainColumnGetter): + colkeys: Any + composite: Any + def __init__(self, colkeys) -> None: ... + def __reduce__(self): ... + +def column_mapped_collection(mapping_spec): ... + +class _SerializableAttrGetter: + name: Any + getter: Any + def __init__(self, name) -> None: ... + def __call__(self, target): ... + def __reduce__(self): ... + +def attribute_mapped_collection(attr_name): ... +def mapped_collection(keyfunc): ... + +class collection: + @staticmethod + def appender(fn): ... + @staticmethod + def remover(fn): ... + @staticmethod + def iterator(fn): ... + @staticmethod + def internally_instrumented(fn): ... + @staticmethod + def converter(fn): ... + @staticmethod + def adds(arg): ... + @staticmethod + def replaces(arg): ... + @staticmethod + def removes(arg): ... + @staticmethod + def removes_return(): ... + +collection_adapter: Any + +class CollectionAdapter: + attr: Any + owner_state: Any + invalidated: bool + empty: bool + def __init__(self, attr, owner_state, data) -> None: ... + @property + def data(self): ... + def bulk_appender(self): ... + def append_with_event(self, item, initiator: Any | None = ...) -> None: ... + def append_without_event(self, item) -> None: ... + def append_multiple_without_event(self, items) -> None: ... + def bulk_remover(self): ... + def remove_with_event(self, item, initiator: Any | None = ...) -> None: ... + def remove_without_event(self, item) -> None: ... + def clear_with_event(self, initiator: Any | None = ...) -> None: ... + def clear_without_event(self) -> None: ... + def __iter__(self): ... + def __len__(self): ... + def __bool__(self): ... + __nonzero__: Any + def fire_append_wo_mutation_event(self, item, initiator: Any | None = ...): ... + def fire_append_event(self, item, initiator: Any | None = ...): ... + def fire_remove_event(self, item, initiator: Any | None = ...) -> None: ... + def fire_pre_remove_event(self, initiator: Any | None = ...) -> None: ... + +class InstrumentedList(list[Any]): ... +class InstrumentedSet(set[Any]): ... +class InstrumentedDict(dict[Any, Any]): ... + +class MappedCollection(dict[Any, Any]): + keyfunc: Any + def __init__(self, keyfunc) -> None: ... + def set(self, value, _sa_initiator: Any | None = ...) -> None: ... + def remove(self, value, _sa_initiator: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi new file mode 100644 index 000000000000..b5fc3cd00396 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/context.pyi @@ -0,0 +1,189 @@ +from typing import Any + +from ..sql.base import CacheableOptions, CompileState, Options +from ..sql.selectable import SelectState + +LABEL_STYLE_LEGACY_ORM: Any + +class QueryContext: + class default_load_options(Options): ... + load_options: Any + execution_options: Any + bind_arguments: Any + compile_state: Any + query: Any + session: Any + loaders_require_buffering: bool + loaders_require_uniquing: bool + params: Any + create_eager_joins: Any + propagated_loader_options: Any + attributes: Any + runid: Any + partials: Any + post_load_paths: Any + autoflush: Any + populate_existing: Any + invoke_all_eagers: Any + version_check: Any + refresh_state: Any + yield_per: Any + identity_token: Any + def __init__( + self, + compile_state, + statement, + params, + session, + load_options, + execution_options: Any | None = ..., + bind_arguments: Any | None = ..., + ) -> None: ... + +class ORMCompileState(CompileState): + class default_compile_options(CacheableOptions): ... + current_path: Any + def __init__(self, *arg, **kw) -> None: ... + @classmethod + def create_for_statement(cls, statement_container, compiler, **kw) -> None: ... # type: ignore[override] + @classmethod + def get_column_descriptions(cls, statement): ... + @classmethod + def orm_pre_session_exec(cls, session, statement, params, execution_options, bind_arguments, is_reentrant_invoke): ... + @classmethod + def orm_setup_cursor_result(cls, session, statement, params, execution_options, bind_arguments, result): ... + +class ORMFromStatementCompileState(ORMCompileState): + multi_row_eager_loaders: bool + compound_eager_adapter: Any + extra_criteria_entities: Any + eager_joins: Any + use_legacy_query_style: Any + statement_container: Any + requested_statement: Any + dml_table: Any + compile_options: Any + statement: Any + current_path: Any + attributes: Any + global_attributes: Any + primary_columns: Any + secondary_columns: Any + dedupe_columns: Any + create_eager_joins: Any + order_by: Any + @classmethod + def create_for_statement(cls, statement_container, compiler, **kw): ... + +class ORMSelectCompileState(ORMCompileState, SelectState): # type: ignore # argument disparities between base classes + multi_row_eager_loaders: bool + compound_eager_adapter: Any + correlate: Any + correlate_except: Any + global_attributes: Any + select_statement: Any + for_statement: Any + use_legacy_query_style: Any + compile_options: Any + label_style: Any + current_path: Any + eager_order_by: Any + attributes: Any + primary_columns: Any + secondary_columns: Any + dedupe_columns: Any + eager_joins: Any + extra_criteria_entities: Any + create_eager_joins: Any + from_clauses: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + @classmethod + def determine_last_joined_entity(cls, statement): ... + @classmethod + def all_selected_columns(cls, statement) -> None: ... + @classmethod + def get_columns_clause_froms(cls, statement): ... + @classmethod + def from_statement(cls, statement, from_statement): ... + +class _QueryEntity: + use_id_for_hash: bool + @classmethod + def to_compile_state(cls, compile_state, entities, entities_collection, is_current_entities): ... + +class _MapperEntity(_QueryEntity): + expr: Any + mapper: Any + entity_zero: Any + is_aliased_class: Any + path: Any + selectable: Any + def __init__(self, compile_state, entity, entities_collection, is_current_entities) -> None: ... + supports_single_entity: bool + use_id_for_hash: bool + @property + def type(self): ... + @property + def entity_zero_or_selectable(self): ... + def corresponds_to(self, entity): ... + def row_processor(self, context, result): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _BundleEntity(_QueryEntity): + bundle: Any + expr: Any + type: Any + supports_single_entity: Any + def __init__( + self, compile_state, expr, entities_collection, setup_entities: bool = ..., parent_bundle: Any | None = ... + ) -> None: ... + @property + def mapper(self): ... + @property + def entity_zero(self): ... + def corresponds_to(self, entity): ... + @property + def entity_zero_or_selectable(self): ... + def setup_compile_state(self, compile_state) -> None: ... + def row_processor(self, context, result): ... + +class _ColumnEntity(_QueryEntity): + raw_column_index: Any + translate_raw_column: Any + @property + def type(self): ... + def row_processor(self, context, result): ... + +class _RawColumnEntity(_ColumnEntity): + entity_zero: Any + mapper: Any + supports_single_entity: bool + expr: Any + raw_column_index: Any + translate_raw_column: Any + column: Any + entity_zero_or_selectable: Any + def __init__(self, compile_state, column, entities_collection, raw_column_index, parent_bundle: Any | None = ...) -> None: ... + def corresponds_to(self, entity): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _ORMColumnEntity(_ColumnEntity): + supports_single_entity: bool + expr: Any + translate_raw_column: bool + raw_column_index: Any + entity_zero_or_selectable: Any + entity_zero: Any + mapper: Any + column: Any + def __init__( + self, compile_state, column, entities_collection, parententity, raw_column_index, parent_bundle: Any | None = ... + ) -> None: ... + def corresponds_to(self, entity): ... + def setup_compile_state(self, compile_state) -> None: ... + +class _IdentityTokenEntity(_ORMColumnEntity): + translate_raw_column: bool + def setup_compile_state(self, compile_state) -> None: ... + def row_processor(self, context, result): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi new file mode 100644 index 000000000000..482f4d3fca53 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_api.pyi @@ -0,0 +1,128 @@ +from collections.abc import Callable +from typing import Any, ClassVar, TypeVar, overload + +from ..engine.interfaces import Connectable +from ..sql.schema import MetaData +from ..util import hybridproperty +from . import interfaces + +_ClsT = TypeVar("_ClsT", bound=type[Any]) +_DeclT = TypeVar("_DeclT", bound=type[_DeclarativeBase]) + +# Dynamic class as created by registry.generate_base() via DeclarativeMeta +# or another metaclass. This class does not exist at runtime. +class _DeclarativeBase(Any): # super classes are dynamic + registry: ClassVar[registry] + metadata: ClassVar[MetaData] + __abstract__: ClassVar[bool] + # not always existing: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + __mapper_cls__: ClassVar[Any] + __class_getitem__: ClassVar[Any] + +# Meta class (or function) that creates a _DeclarativeBase class. +_DeclarativeBaseMeta = Callable[[str, tuple[type[Any], ...], dict[str, Any]], _DeclT] + +def has_inherited_table(cls: type[Any]) -> bool: ... + +class DeclarativeMeta(type): + def __init__(cls, classname: str, bases: tuple[type[Any], ...], dict_: dict[str, Any], **kw: object) -> None: ... + def __setattr__(cls, key: str, value: Any) -> None: ... + def __delattr__(cls, key: str) -> None: ... + +def synonym_for(name, map_column: bool = ...): ... + +class declared_attr(interfaces._MappedAttribute, property): + def __init__(self, fget, cascading: bool = ...) -> None: ... + def __get__(self, self_, cls): ... + @hybridproperty + def cascading(self): ... + +class _stateful_declared_attr(declared_attr): + kw: Any + def __init__(self, **kw) -> None: ... + def __call__(self, fn): ... + +def declarative_mixin(cls: _ClsT) -> _ClsT: ... +@overload +def declarative_base( + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + mapper: Any | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + constructor: Callable[..., None] = ..., + class_registry: dict[str, type[Any]] | None = ..., +) -> type[_DeclarativeBase]: ... +@overload +def declarative_base( + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + mapper: Any | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + constructor: Callable[..., None] = ..., + class_registry: dict[str, type[Any]] | None = ..., + *, + metaclass: _DeclarativeBaseMeta[_DeclT], +) -> _DeclT: ... +@overload +def declarative_base( + bind: Connectable | None, + metadata: MetaData | None, + mapper: Any | None, + cls: type[Any] | tuple[type[Any], ...], + name: str, + constructor: Callable[..., None], + class_registry: dict[str, type[Any]] | None, + metaclass: _DeclarativeBaseMeta[_DeclT], +) -> _DeclT: ... + +class registry: + metadata: MetaData + constructor: Callable[..., None] + def __init__( + self, + metadata: MetaData | None = ..., + class_registry: dict[str, type[Any]] | None = ..., + constructor: Callable[..., None] = ..., + _bind: Connectable | None = ..., + ) -> None: ... + @property + def mappers(self) -> frozenset[Any]: ... + def configure(self, cascade: bool = ...) -> None: ... + def dispose(self, cascade: bool = ...) -> None: ... + @overload + def generate_base( + self, mapper: Any | None = ..., cls: type[Any] | tuple[type[Any], ...] = ..., name: str = ... + ) -> type[_DeclarativeBase]: ... + @overload + def generate_base( + self, + mapper: Any | None = ..., + cls: type[Any] | tuple[type[Any], ...] = ..., + name: str = ..., + *, + metaclass: _DeclarativeBaseMeta[_DeclT], + ) -> _DeclT: ... + @overload + def generate_base( + self, mapper: Any | None, cls: type[Any] | tuple[type[Any], ...], name: str, metaclass: _DeclarativeBaseMeta[_DeclT] + ) -> type[_DeclarativeBase]: ... + def mapped(self, cls: _ClsT) -> _ClsT: ... + # Return type of the callable is a _DeclarativeBase class with the passed in class as base. + # This could be better approximated with Intersection[PassedInClass, _DeclarativeBase]. + def as_declarative_base( + self, *, mapper: Any | None = ..., metaclass: _DeclarativeBaseMeta[_DeclT] = ... + ) -> Callable[[_ClsT], _ClsT | _DeclT | Any]: ... + def map_declaratively(self, cls): ... + def map_imperatively(self, class_, local_table: Any | None = ..., **kw): ... + +def as_declarative( + *, + bind: Connectable | None = ..., + metadata: MetaData | None = ..., + class_registry: dict[str, type[Any]] | None = ..., + mapper: Any | None = ..., + metaclass: _DeclarativeBaseMeta[_DeclT] = ..., +) -> Callable[[_ClsT], _ClsT | _DeclT | Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi new file mode 100644 index 000000000000..9d5dbf1c0723 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/decl_base.pyi @@ -0,0 +1,47 @@ +from typing import Any + +class _MapperConfig: + @classmethod + def setup_mapping(cls, registry, cls_, dict_, table, mapper_kw): ... + cls: Any + classname: Any + properties: Any + declared_attr_reg: Any + def __init__(self, registry, cls_, mapper_kw) -> None: ... + def set_cls_attribute(self, attrname, value): ... + +class _ImperativeMapperConfig(_MapperConfig): + dict_: Any + local_table: Any + inherits: Any + def __init__(self, registry, cls_, table, mapper_kw) -> None: ... + def map(self, mapper_kw=...): ... + +class _ClassScanMapperConfig(_MapperConfig): + dict_: Any + local_table: Any + persist_selectable: Any + declared_columns: Any + column_copies: Any + table_args: Any + tablename: Any + mapper_args: Any + mapper_args_fn: Any + inherits: Any + def __init__(self, registry, cls_, dict_, table, mapper_kw) -> None: ... + def map(self, mapper_kw=...): ... + +class _DeferredMapperConfig(_ClassScanMapperConfig): + @property + def cls(self): ... + @cls.setter + def cls(self, class_) -> None: ... + @classmethod + def has_cls(cls, class_): ... + @classmethod + def raise_unmapped_for_cls(cls, class_) -> None: ... + @classmethod + def config_for_cls(cls, class_): ... + @classmethod + def classes_for_base(cls, base_cls, sort: bool = ...): ... + def map(self, mapper_kw=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi new file mode 100644 index 000000000000..8fe92087dc7e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dependency.pyi @@ -0,0 +1,74 @@ +from typing import Any + +class DependencyProcessor: + prop: Any + cascade: Any + mapper: Any + parent: Any + secondary: Any + direction: Any + post_update: Any + passive_deletes: Any + passive_updates: Any + enable_typechecks: Any + sort_key: Any + key: Any + def __init__(self, prop) -> None: ... + @classmethod + def from_relationship(cls, prop): ... + def hasparent(self, state): ... + def per_property_preprocessors(self, uow) -> None: ... + def per_property_flush_actions(self, uow) -> None: ... + def per_state_flush_actions(self, uow, states, isdelete) -> None: ... + def presort_deletes(self, uowcommit, states): ... + def presort_saves(self, uowcommit, states): ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + def prop_has_changes(self, uowcommit, states, isdelete): ... + +class OneToManyDP(DependencyProcessor): + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class ManyToOneDP(DependencyProcessor): + def __init__(self, prop) -> None: ... + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class DetectKeySwitch(DependencyProcessor): + def per_property_preprocessors(self, uow) -> None: ... + def per_property_flush_actions(self, uow) -> None: ... + def per_state_flush_actions(self, uow, states, isdelete) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uow, states) -> None: ... + def prop_has_changes(self, uow, states, isdelete): ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... + +class ManyToManyDP(DependencyProcessor): + def per_property_dependencies( + self, uow, parent_saves, child_saves, parent_deletes, child_deletes, after_save, before_delete + ) -> None: ... + def per_state_dependencies( + self, uow, save_parent, delete_parent, child_action, after_save, before_delete, isdelete, childisdelete + ) -> None: ... + def presort_deletes(self, uowcommit, states) -> None: ... + def presort_saves(self, uowcommit, states) -> None: ... + def process_deletes(self, uowcommit, states) -> None: ... + def process_saves(self, uowcommit, states) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi new file mode 100644 index 000000000000..f849acad086e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/descriptor_props.pyi @@ -0,0 +1,70 @@ +from typing import Any + +from ..util import memoized_property +from . import util as orm_util +from .interfaces import MapperProperty, PropComparator + +class DescriptorProperty(MapperProperty): + doc: Any + uses_objects: bool + key: Any + descriptor: Any + def instrument_class(self, mapper): ... + +class CompositeProperty(DescriptorProperty): + attrs: Any + composite_class: Any + active_history: Any + deferred: Any + group: Any + comparator_factory: Any + info: Any + def __init__(self, class_, *attrs, **kwargs) -> None: ... + def instrument_class(self, mapper) -> None: ... + def do_init(self) -> None: ... + @memoized_property + def props(self): ... + @property + def columns(self): ... + def get_history(self, state, dict_, passive=...): ... + + class CompositeBundle(orm_util.Bundle): + property: Any + def __init__(self, property_, expr) -> None: ... + def create_row_processor(self, query, procs, labels): ... + + class Comparator(PropComparator): + __hash__: Any + @memoized_property + def clauses(self): ... + def __clause_element__(self): ... + @memoized_property + def expression(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class ConcreteInheritedProperty(DescriptorProperty): + descriptor: Any + def __init__(self): ... + +class SynonymProperty(DescriptorProperty): + name: Any + map_column: Any + descriptor: Any + comparator_factory: Any + doc: Any + info: Any + def __init__( + self, + name, + map_column: Any | None = ..., + descriptor: Any | None = ..., + comparator_factory: Any | None = ..., + doc: Any | None = ..., + info: Any | None = ..., + ) -> None: ... + @property + def uses_objects(self): ... + def get_history(self, *arg, **kw): ... + parent: Any + def set_parent(self, parent, init) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi new file mode 100644 index 000000000000..801fe6aa93ba --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/dynamic.pyi @@ -0,0 +1,84 @@ +from typing import Any, Generic, TypeVar + +from . import attributes, strategies +from .query import Query + +_T = TypeVar("_T") + +class DynaLoader(strategies.AbstractRelationshipLoader): + logger: Any + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + +class DynamicAttributeImpl(attributes.AttributeImpl): + uses_objects: bool + default_accepts_scalar_loader: bool + supports_population: bool + collection: bool + dynamic: bool + order_by: Any + target_mapper: Any + query_class: Any + def __init__( + self, class_, key, typecallable, dispatch, target_mapper, order_by, query_class: Any | None = ..., **kw + ) -> None: ... + def get(self, state, dict_, passive=...): ... + def get_collection(self, state, dict_, user_data: Any | None = ..., passive=...): ... + def fire_append_event(self, state, dict_, value, initiator, collection_history: Any | None = ...) -> None: ... + def fire_remove_event(self, state, dict_, value, initiator, collection_history: Any | None = ...) -> None: ... + def set( + self, + state, + dict_, + value, + initiator: Any | None = ..., + passive=..., + check_old: Any | None = ..., + pop: bool = ..., + _adapt: bool = ..., + ) -> None: ... + def delete(self, *args, **kwargs) -> None: ... + def set_committed_value(self, state, dict_, value) -> None: ... + def get_history(self, state, dict_, passive=...): ... + def get_all_pending(self, state, dict_, passive=...): ... + def append(self, state, dict_, value, initiator, passive=...) -> None: ... + def remove(self, state, dict_, value, initiator, passive=...) -> None: ... + def pop(self, state, dict_, value, initiator, passive=...) -> None: ... + +class DynamicCollectionAdapter: + data: Any + def __init__(self, data) -> None: ... + def __iter__(self): ... + def __len__(self): ... + def __bool__(self): ... + __nonzero__: Any + +class AppenderMixin: + query_class: Any + instance: Any + attr: Any + def __init__(self, attr, state) -> None: ... + session: Any + def __getitem__(self, index): ... + def count(self): ... + def extend(self, iterator) -> None: ... + def append(self, item) -> None: ... + def remove(self, item) -> None: ... + +class AppenderQuery(AppenderMixin, Query[_T], Generic[_T]): ... + +def mixin_user_query(cls): ... + +class CollectionHistory: + unchanged_items: Any + added_items: Any + deleted_items: Any + def __init__(self, attr, state, apply_to: Any | None = ...) -> None: ... + @property + def added_plus_unchanged(self): ... + @property + def all_items(self): ... + def as_history(self): ... + def indexed(self, index): ... + def add_added(self, value) -> None: ... + def add_removed(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi new file mode 100644 index 000000000000..2e3d6479cb74 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/evaluator.pyi @@ -0,0 +1,24 @@ +from typing import Any + +from ..sql import operators + +class UnevaluatableError(Exception): ... + +class _NoObject(operators.ColumnOperators): + def operate(self, *arg, **kw) -> None: ... + def reverse_operate(self, *arg, **kw) -> None: ... + +class EvaluatorCompiler: + target_cls: Any + def __init__(self, target_cls: Any | None = ...) -> None: ... + def process(self, *clauses): ... + def visit_grouping(self, clause): ... + def visit_null(self, clause): ... + def visit_false(self, clause): ... + def visit_true(self, clause): ... + def visit_column(self, clause): ... + def visit_tuple(self, clause): ... + def visit_clauselist(self, clause): ... + def visit_binary(self, clause): ... + def visit_unary(self, clause): ... + def visit_bindparam(self, clause): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi new file mode 100644 index 000000000000..36a6414fce63 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/events.pyi @@ -0,0 +1,104 @@ +from typing import Any + +from .. import event + +class InstrumentationEvents(event.Events): + def class_instrument(self, cls) -> None: ... + def class_uninstrument(self, cls) -> None: ... + def attribute_instrument(self, cls, key, inst) -> None: ... + +class _InstrumentationEventsHold: + class_: Any + def __init__(self, class_) -> None: ... + dispatch: Any + +class InstanceEvents(event.Events): + def first_init(self, manager, cls) -> None: ... + def init(self, target, args, kwargs) -> None: ... + def init_failure(self, target, args, kwargs) -> None: ... + def load(self, target, context) -> None: ... + def refresh(self, target, context, attrs) -> None: ... + def refresh_flush(self, target, flush_context, attrs) -> None: ... + def expire(self, target, attrs) -> None: ... + def pickle(self, target, state_dict) -> None: ... + def unpickle(self, target, state_dict) -> None: ... + +class _EventsHold(event.RefCollection): + class_: Any + def __init__(self, class_) -> None: ... + + class HoldEvents: ... + + def remove(self, event_key) -> None: ... + @classmethod + def populate(cls, class_, subject) -> None: ... + +class _InstanceEventsHold(_EventsHold): + all_holds: Any + def resolve(self, class_): ... + + class HoldInstanceEvents(_EventsHold.HoldEvents, InstanceEvents): ... + dispatch: Any + +class MapperEvents(event.Events): + def instrument_class(self, mapper, class_) -> None: ... + def before_mapper_configured(self, mapper, class_) -> None: ... + def mapper_configured(self, mapper, class_) -> None: ... + def before_configured(self) -> None: ... + def after_configured(self) -> None: ... + def before_insert(self, mapper, connection, target) -> None: ... + def after_insert(self, mapper, connection, target) -> None: ... + def before_update(self, mapper, connection, target) -> None: ... + def after_update(self, mapper, connection, target) -> None: ... + def before_delete(self, mapper, connection, target) -> None: ... + def after_delete(self, mapper, connection, target) -> None: ... + +class _MapperEventsHold(_EventsHold): + all_holds: Any + def resolve(self, class_): ... + + class HoldMapperEvents(_EventsHold.HoldEvents, MapperEvents): ... + dispatch: Any + +class SessionEvents(event.Events): + def do_orm_execute(self, orm_execute_state) -> None: ... + def after_transaction_create(self, session, transaction) -> None: ... + def after_transaction_end(self, session, transaction) -> None: ... + def before_commit(self, session) -> None: ... + def after_commit(self, session) -> None: ... + def after_rollback(self, session) -> None: ... + def after_soft_rollback(self, session, previous_transaction) -> None: ... + def before_flush(self, session, flush_context, instances) -> None: ... + def after_flush(self, session, flush_context) -> None: ... + def after_flush_postexec(self, session, flush_context) -> None: ... + def after_begin(self, session, transaction, connection) -> None: ... + def before_attach(self, session, instance) -> None: ... + def after_attach(self, session, instance) -> None: ... + def after_bulk_update(self, update_context) -> None: ... + def after_bulk_delete(self, delete_context) -> None: ... + def transient_to_pending(self, session, instance) -> None: ... + def pending_to_transient(self, session, instance) -> None: ... + def persistent_to_transient(self, session, instance) -> None: ... + def pending_to_persistent(self, session, instance) -> None: ... + def detached_to_persistent(self, session, instance) -> None: ... + def loaded_as_persistent(self, session, instance) -> None: ... + def persistent_to_deleted(self, session, instance) -> None: ... + def deleted_to_persistent(self, session, instance) -> None: ... + def deleted_to_detached(self, session, instance) -> None: ... + def persistent_to_detached(self, session, instance) -> None: ... + +class AttributeEvents(event.Events): + def append(self, target, value, initiator) -> None: ... + def append_wo_mutation(self, target, value, initiator) -> None: ... + def bulk_replace(self, target, values, initiator) -> None: ... + def remove(self, target, value, initiator) -> None: ... + def set(self, target, value, oldvalue, initiator) -> None: ... + def init_scalar(self, target, value, dict_) -> None: ... + def init_collection(self, target, collection, collection_adapter) -> None: ... + def dispose_collection(self, target, collection, collection_adapter) -> None: ... + def modified(self, target, initiator) -> None: ... + +class QueryEvents(event.Events): + def before_compile(self, query) -> None: ... + def before_compile_update(self, query, update_context) -> None: ... + def before_compile_delete(self, query, delete_context) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi new file mode 100644 index 000000000000..28a664a27221 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/exc.pyi @@ -0,0 +1,33 @@ +from typing import Any + +from .. import exc as sa_exc + +NO_STATE: Any + +class StaleDataError(sa_exc.SQLAlchemyError): ... + +ConcurrentModificationError = StaleDataError + +class FlushError(sa_exc.SQLAlchemyError): ... +class UnmappedError(sa_exc.InvalidRequestError): ... +class ObjectDereferencedError(sa_exc.SQLAlchemyError): ... + +class DetachedInstanceError(sa_exc.SQLAlchemyError): + code: str + +class UnmappedInstanceError(UnmappedError): + def __init__(self, obj, msg: Any | None = ...) -> None: ... + def __reduce__(self): ... + +class UnmappedClassError(UnmappedError): + def __init__(self, cls, msg: Any | None = ...) -> None: ... + def __reduce__(self): ... + +class ObjectDeletedError(sa_exc.InvalidRequestError): + def __init__(self, state, msg: Any | None = ...) -> None: ... + def __reduce__(self): ... + +class UnmappedColumnError(sa_exc.InvalidRequestError): ... + +class LoaderStrategyException(sa_exc.InvalidRequestError): + def __init__(self, applied_to_property_type, requesting_property, applies_to, actual_strategy_type, strategy_key) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi new file mode 100644 index 000000000000..deb590c27386 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/identity.pyi @@ -0,0 +1,32 @@ +from typing import Any + +class IdentityMap: + def __init__(self) -> None: ... + def keys(self): ... + def replace(self, state) -> None: ... + def add(self, state) -> None: ... + def update(self, dict_) -> None: ... + def clear(self) -> None: ... + def check_modified(self): ... + def has_key(self, key): ... + def popitem(self) -> None: ... + def pop(self, key, *args) -> None: ... + def setdefault(self, key, default: Any | None = ...) -> None: ... + def __len__(self): ... + def copy(self) -> None: ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + +class WeakInstanceDict(IdentityMap): + def __getitem__(self, key): ... + def __contains__(self, key): ... + def contains_state(self, state): ... + def replace(self, state): ... + def add(self, state): ... + def get(self, key, default: Any | None = ...): ... + def items(self): ... + def values(self): ... + def __iter__(self): ... + def all_states(self): ... + def discard(self, state) -> None: ... + def safe_discard(self, state) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi new file mode 100644 index 000000000000..d3fcb9d9ef4c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/instrumentation.pyi @@ -0,0 +1,87 @@ +from typing import Any + +from ..util import HasMemoized, hybridmethod +from . import base + +DEL_ATTR: Any + +class ClassManager(HasMemoized, dict[Any, Any]): + MANAGER_ATTR: Any + STATE_ATTR: Any + expired_attribute_loader: Any + init_method: Any + factory: Any + mapper: Any + declarative_scan: Any + registry: Any + @property + def deferred_scalar_loader(self): ... + @deferred_scalar_loader.setter + def deferred_scalar_loader(self, obj) -> None: ... + class_: Any + info: Any + new_init: Any + local_attrs: Any + originals: Any + def __init__(self, class_) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + @property + def is_mapped(self): ... + # Will be overwritten when mapped + # def mapper(self) -> None: ... + def manage(self) -> None: ... + @hybridmethod + def manager_getter(self): ... + @hybridmethod + def state_getter(self): ... + @hybridmethod + def dict_getter(self): ... + def instrument_attribute(self, key, inst, propagated: bool = ...) -> None: ... + def subclass_managers(self, recursive) -> None: ... + def post_configure_attribute(self, key) -> None: ... + def uninstrument_attribute(self, key, propagated: bool = ...) -> None: ... + def unregister(self) -> None: ... + def install_descriptor(self, key, inst) -> None: ... + def uninstall_descriptor(self, key) -> None: ... + def install_member(self, key, implementation) -> None: ... + def uninstall_member(self, key) -> None: ... + def instrument_collection_class(self, key, collection_class): ... + def initialize_collection(self, key, state, factory): ... + def is_instrumented(self, key, search: bool = ...): ... + def get_impl(self, key): ... + @property + def attributes(self): ... + def new_instance(self, state: Any | None = ...): ... + def setup_instance(self, instance, state: Any | None = ...) -> None: ... + def teardown_instance(self, instance) -> None: ... + def has_state(self, instance): ... + def has_parent(self, state, key, optimistic: bool = ...): ... + def __bool__(self): ... + __nonzero__: Any + +class _SerializeManager: + class_: Any + def __init__(self, state, d) -> None: ... + def __call__(self, state, inst, state_dict) -> None: ... + +class InstrumentationFactory: + def create_manager_for_cls(self, class_): ... + def unregister(self, class_) -> None: ... + +instance_state: Any + +instance_dict: Any +manager_of_class = base.manager_of_class + +def register_class( + class_, + finalize: bool = ..., + mapper: Any | None = ..., + registry: Any | None = ..., + declarative_scan: Any | None = ..., + expired_attribute_loader: Any | None = ..., + init_method: Any | None = ..., +): ... +def unregister_class(class_) -> None: ... +def is_instrumented(instance, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi new file mode 100644 index 000000000000..1f222f67cb9d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/interfaces.pyi @@ -0,0 +1,128 @@ +from typing import Any + +from .. import util +from ..sql import operators, roles +from ..sql.base import ExecutableOption +from ..sql.traversals import HasCacheKey +from .base import ( + EXT_CONTINUE as EXT_CONTINUE, + EXT_SKIP as EXT_SKIP, + EXT_STOP as EXT_STOP, + MANYTOMANY as MANYTOMANY, + MANYTOONE as MANYTOONE, + NOT_EXTENSION as NOT_EXTENSION, + ONETOMANY as ONETOMANY, + InspectionAttr as InspectionAttr, + InspectionAttrInfo as InspectionAttrInfo, + _MappedAttribute as _MappedAttribute, +) + +__all__ = ( + "EXT_CONTINUE", + "EXT_STOP", + "EXT_SKIP", + "ONETOMANY", + "MANYTOMANY", + "MANYTOONE", + "NOT_EXTENSION", + "LoaderStrategy", + "MapperOption", + "LoaderOption", + "MapperProperty", + "PropComparator", + "StrategizedProperty", +) + +class ORMStatementRole(roles.StatementRole): ... +class ORMColumnsClauseRole(roles.ColumnsClauseRole): ... +class ORMEntityColumnsClauseRole(ORMColumnsClauseRole): ... +class ORMFromClauseRole(roles.StrictFromClauseRole): ... + +class MapperProperty(HasCacheKey, _MappedAttribute, InspectionAttr, util.MemoizedSlots): + cascade: Any + is_property: bool + def setup(self, context, query_entity, path, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, mapper, result, adapter, populators) -> None: ... + def cascade_iterator(self, type_, state, dict_, visited_states, halt_on: Any | None = ...): ... + parent: Any + def set_parent(self, parent, init) -> None: ... + def instrument_class(self, mapper) -> None: ... + def __init__(self) -> None: ... + def init(self) -> None: ... + @property + def class_attribute(self): ... + def do_init(self) -> None: ... + def post_instrument_class(self, mapper) -> None: ... + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + +class PropComparator(operators.ColumnOperators): + __visit_name__: str + prop: Any + property: Any + def __init__(self, prop, parentmapper, adapt_to_entity: Any | None = ...) -> None: ... + def __clause_element__(self) -> None: ... + def adapt_to_entity(self, adapt_to_entity): ... + @property + def adapter(self): ... + @property + def info(self): ... + @staticmethod + def any_op(a, b, **kwargs): ... + @staticmethod + def has_op(a, b, **kwargs): ... + @staticmethod + def of_type_op(a, class_): ... + def of_type(self, class_): ... + def and_(self, *criteria): ... + def any(self, criterion: Any | None = ..., **kwargs): ... + def has(self, criterion: Any | None = ..., **kwargs): ... + +class StrategizedProperty(MapperProperty): + inherit_cache: bool + strategy_wildcard_key: Any + def setup(self, context, query_entity, path, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, mapper, result, adapter, populators) -> None: ... + strategy: Any + def do_init(self) -> None: ... + def post_instrument_class(self, mapper) -> None: ... + @classmethod + def strategy_for(cls, **kw): ... + +class ORMOption(ExecutableOption): + propagate_to_loaders: bool + +class CompileStateOption(HasCacheKey, ORMOption): + def process_compile_state(self, compile_state) -> None: ... + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + +class LoaderOption(CompileStateOption): + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + def process_compile_state(self, compile_state) -> None: ... + +class CriteriaOption(CompileStateOption): + def process_compile_state(self, compile_state) -> None: ... + def get_global_criteria(self, attributes) -> None: ... + +class UserDefinedOption(ORMOption): + propagate_to_loaders: bool + payload: Any + def __init__(self, payload: Any | None = ...) -> None: ... + +class MapperOption(ORMOption): + propagate_to_loaders: bool + def process_query(self, query) -> None: ... + def process_query_conditionally(self, query) -> None: ... + +class LoaderStrategy: + parent_property: Any + is_class_level: bool + parent: Any + key: Any + strategy_key: Any + strategy_opts: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, **kwargs) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi new file mode 100644 index 000000000000..ce8486f598fb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/loading.pyi @@ -0,0 +1,47 @@ +from typing import Any + +def instances(cursor, context): ... +def merge_frozen_result(session, statement, frozen_result, load: bool = ...): ... +def merge_result(query, iterator, load: bool = ...): ... +def get_from_identity(session, mapper, key, passive): ... +def load_on_ident( + session, + statement, + key, + load_options: Any | None = ..., + refresh_state: Any | None = ..., + with_for_update: Any | None = ..., + only_load_props: Any | None = ..., + no_autoflush: bool = ..., + bind_arguments=..., + execution_options=..., +): ... +def load_on_pk_identity( + session, + statement, + primary_key_identity, + load_options: Any | None = ..., + refresh_state: Any | None = ..., + with_for_update: Any | None = ..., + only_load_props: Any | None = ..., + identity_token: Any | None = ..., + no_autoflush: bool = ..., + bind_arguments=..., + execution_options=..., +): ... + +class PostLoad: + loaders: Any + states: Any + load_keys: Any + def __init__(self) -> None: ... + def add_state(self, state, overwrite) -> None: ... + def invoke(self, context, path) -> None: ... + @classmethod + def for_context(cls, context, path, only_load_props): ... + @classmethod + def path_exists(cls, context, path, key): ... + @classmethod + def callable_for_path(cls, context, path, limit_to_mapper, token, loader_callable, *arg, **kw) -> None: ... + +def load_scalar_attributes(mapper, state, attribute_names, passive) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi new file mode 100644 index 000000000000..931bde703ca9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/mapper.pyi @@ -0,0 +1,143 @@ +from typing import Any + +from ..sql import base as sql_base +from ..util import HasMemoized, memoized_property +from .base import ( + _class_to_mapper as _class_to_mapper, + _state_mapper as _state_mapper, + class_mapper as class_mapper, + state_str as state_str, +) +from .interfaces import InspectionAttr, ORMEntityColumnsClauseRole, ORMFromClauseRole + +NO_ATTRIBUTE: Any + +class Mapper(ORMFromClauseRole, ORMEntityColumnsClauseRole, sql_base.MemoizedHasCacheKey, InspectionAttr): + logger: Any + class_: Any + class_manager: Any + non_primary: Any + always_refresh: Any + version_id_prop: Any + version_id_col: Any + version_id_generator: bool + concrete: Any + single: bool + inherits: Any + local_table: Any + inherit_condition: Any + inherit_foreign_keys: Any + batch: Any + eager_defaults: Any + column_prefix: Any + polymorphic_on: Any + validators: Any + passive_updates: Any + passive_deletes: Any + legacy_is_orphan: Any + allow_partial_pks: Any + confirm_deleted_rows: bool + polymorphic_load: Any + polymorphic_identity: Any + polymorphic_map: Any + include_properties: Any + exclude_properties: Any + def __init__( + self, + class_, + local_table: Any | None = ..., + properties: Any | None = ..., + primary_key: Any | None = ..., + non_primary: bool = ..., + inherits: Any | None = ..., + inherit_condition: Any | None = ..., + inherit_foreign_keys: Any | None = ..., + always_refresh: bool = ..., + version_id_col: Any | None = ..., + version_id_generator: Any | None = ..., + polymorphic_on: Any | None = ..., + _polymorphic_map: Any | None = ..., + polymorphic_identity: Any | None = ..., + concrete: bool = ..., + with_polymorphic: Any | None = ..., + polymorphic_load: Any | None = ..., + allow_partial_pks: bool = ..., + batch: bool = ..., + column_prefix: Any | None = ..., + include_properties: Any | None = ..., + exclude_properties: Any | None = ..., + passive_updates: bool = ..., + passive_deletes: bool = ..., + confirm_deleted_rows: bool = ..., + eager_defaults: bool = ..., + legacy_is_orphan: bool = ..., + _compiled_cache_size: int = ..., + ): ... + is_mapper: bool + represents_outer_join: bool + @property + def mapper(self): ... + @property + def entity(self): ... + persist_selectable: Any + configured: bool + tables: Any + primary_key: Any + base_mapper: Any + columns: Any + c: Any + @property + def mapped_table(self): ... + def add_properties(self, dict_of_properties) -> None: ... + def add_property(self, key, prop) -> None: ... + def has_property(self, key): ... + def get_property(self, key, _configure_mappers: bool = ...): ... + def get_property_by_column(self, column): ... + @property + def iterate_properties(self): ... + with_polymorphic_mappers: Any + def __clause_element__(self): ... + @memoized_property + def select_identity_token(self): ... + @property + def selectable(self): ... + @HasMemoized.memoized_attribute + def attrs(self): ... + @HasMemoized.memoized_attribute + def all_orm_descriptors(self): ... + @HasMemoized.memoized_attribute + def synonyms(self): ... + @property + def entity_namespace(self): ... + @HasMemoized.memoized_attribute + def column_attrs(self): ... + @HasMemoized.memoized_attribute + def relationships(self): ... + @HasMemoized.memoized_attribute + def composites(self): ... + def common_parent(self, other): ... + def is_sibling(self, other): ... + def isa(self, other): ... + def iterate_to_root(self) -> None: ... + @HasMemoized.memoized_attribute + def self_and_descendants(self): ... + def polymorphic_iterator(self): ... + def primary_mapper(self): ... + @property + def primary_base_mapper(self): ... + def identity_key_from_row(self, row, identity_token: Any | None = ..., adapter: Any | None = ...): ... + def identity_key_from_primary_key(self, primary_key, identity_token: Any | None = ...): ... + def identity_key_from_instance(self, instance): ... + def primary_key_from_instance(self, instance): ... + def cascade_iterator(self, type_, state, halt_on: Any | None = ...) -> None: ... + +class _OptGetColumnsNotAvailable(Exception): ... + +def configure_mappers() -> None: ... +def reconstructor(fn): ... +def validates(*names, **kw): ... + +class _ColumnMapping(dict[Any, Any]): + mapper: Any + def __init__(self, mapper) -> None: ... + def __missing__(self, column) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi new file mode 100644 index 000000000000..6d76489c4533 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/path_registry.pyi @@ -0,0 +1,106 @@ +from typing import Any, ClassVar + +from ..sql.traversals import HasCacheKey +from ..util import memoized_property +from . import base as orm_base + +log: Any + +class PathRegistry(HasCacheKey): + root: ClassVar[RootRegistry] + is_token: bool + is_root: bool + def __eq__(self, other): ... + def __ne__(self, other): ... + def set(self, attributes, key, value) -> None: ... + def setdefault(self, attributes, key, value) -> None: ... + def get(self, attributes, key, value: Any | None = ...): ... + def __len__(self): ... + def __hash__(self): ... + @property + def length(self): ... + def pairs(self) -> None: ... + def contains_mapper(self, mapper): ... + def contains(self, attributes, key): ... + def __reduce__(self): ... + @classmethod + def serialize_context_dict(cls, dict_, tokens): ... + @classmethod + def deserialize_context_dict(cls, serialized): ... + def serialize(self): ... + @classmethod + def deserialize(cls, path): ... + @classmethod + def per_mapper(cls, mapper): ... + @classmethod + def coerce(cls, raw): ... + def token(self, token): ... + def __add__(self, other): ... + +class RootRegistry(PathRegistry): + inherit_cache: bool + path: Any + natural_path: Any + has_entity: bool + is_aliased_class: bool + is_root: bool + def __getitem__(self, entity): ... + +class PathToken(orm_base.InspectionAttr, HasCacheKey, str): + @classmethod + def intern(cls, strvalue): ... + +class TokenRegistry(PathRegistry): + inherit_cache: bool + token: Any + parent: Any + path: Any + natural_path: Any + def __init__(self, parent, token) -> None: ... + has_entity: bool + is_token: bool + def generate_for_superclasses(self) -> None: ... + def __getitem__(self, entity) -> None: ... + +class PropRegistry(PathRegistry): + is_unnatural: bool + inherit_cache: bool + prop: Any + parent: Any + path: Any + natural_path: Any + def __init__(self, parent, prop) -> None: ... + @memoized_property + def has_entity(self): ... + @memoized_property + def entity(self): ... + @property + def mapper(self): ... + @property + def entity_path(self): ... + def __getitem__(self, entity): ... + +class AbstractEntityRegistry(PathRegistry): + has_entity: bool + key: Any + parent: Any + is_aliased_class: Any + entity: Any + path: Any + natural_path: Any + def __init__(self, parent, entity) -> None: ... + @property + def entity_path(self): ... + @property + def mapper(self): ... + def __bool__(self): ... + __nonzero__: Any + def __getitem__(self, entity): ... + +class SlotsEntityRegistry(AbstractEntityRegistry): + inherit_cache: bool + +class CachingEntityRegistry(AbstractEntityRegistry, dict): # type: ignore[misc] + inherit_cache: bool + def __getitem__(self, entity): ... + def __missing__(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi new file mode 100644 index 000000000000..8d5d721e7d5e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/persistence.pyi @@ -0,0 +1,28 @@ +from typing import Any + +from ..sql.base import CompileState, Options +from ..sql.dml import DeleteDMLState, UpdateDMLState + +def save_obj(base_mapper, states, uowtransaction, single: bool = ...) -> None: ... +def post_update(base_mapper, states, uowtransaction, post_update_cols) -> None: ... +def delete_obj(base_mapper, states, uowtransaction) -> None: ... + +class BulkUDCompileState(CompileState): + class default_update_options(Options): ... + + @classmethod + def orm_pre_session_exec(cls, session, statement, params, execution_options, bind_arguments, is_reentrant_invoke): ... + @classmethod + def orm_setup_cursor_result(cls, session, statement, params, execution_options, bind_arguments, result): ... + +class BulkORMUpdate(UpdateDMLState, BulkUDCompileState): + mapper: Any + extra_criteria_entities: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + +class BulkORMDelete(DeleteDMLState, BulkUDCompileState): + mapper: Any + extra_criteria_entities: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi new file mode 100644 index 000000000000..28d8980df379 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/properties.pyi @@ -0,0 +1,45 @@ +from typing import Any + +from .. import util +from .descriptor_props import ( + CompositeProperty as CompositeProperty, + ConcreteInheritedProperty as ConcreteInheritedProperty, + SynonymProperty as SynonymProperty, +) +from .interfaces import PropComparator, StrategizedProperty +from .relationships import RelationshipProperty as RelationshipProperty + +__all__ = ["ColumnProperty", "CompositeProperty", "ConcreteInheritedProperty", "RelationshipProperty", "SynonymProperty"] + +class ColumnProperty(StrategizedProperty): + logger: Any + strategy_wildcard_key: str + inherit_cache: bool + columns: Any + group: Any + deferred: Any + raiseload: Any + instrument: Any + comparator_factory: Any + descriptor: Any + active_history: Any + expire_on_flush: Any + info: Any + doc: Any + strategy_key: Any + def __init__(self, *columns, **kwargs) -> None: ... + def __clause_element__(self): ... + @property + def expression(self): ... + def instrument_class(self, mapper) -> None: ... + def do_init(self) -> None: ... + def copy(self): ... + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + + class Comparator(util.MemoizedSlots, PropComparator): + expressions: Any + def _memoized_method___clause_element__(self): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi new file mode 100644 index 000000000000..ea594a3ebf68 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/query.pyi @@ -0,0 +1,130 @@ +from _typeshed import Self +from typing import Any, Generic, TypeVar + +from ..sql.annotation import SupportsCloneAnnotations +from ..sql.base import Executable +from ..sql.selectable import GroupedElement, HasHints, HasPrefixes, HasSuffixes, SelectBase, _SelectFromElements +from . import interfaces +from .context import QueryContext as QueryContext +from .util import aliased as aliased + +__all__ = ["Query", "QueryContext", "aliased"] + +_T = TypeVar("_T") + +class Query(_SelectFromElements, SupportsCloneAnnotations, HasPrefixes, HasSuffixes, HasHints, Executable, Generic[_T]): + logger: Any + load_options: Any + session: Any + def __init__(self, entities, session: Any | None = ...) -> None: ... + @property + def statement(self): ... + def subquery(self, name: str | None = ..., with_labels: bool = ..., reduce_columns: bool = ...): ... + def cte(self, name: Any | None = ..., recursive: bool = ..., nesting: bool = ...): ... + def label(self, name): ... + def as_scalar(self): ... + def scalar_subquery(self): ... + @property + def selectable(self): ... + def __clause_element__(self): ... + def only_return_tuples(self: Self, value) -> Self: ... + @property + def is_single_entity(self): ... + def enable_eagerloads(self: Self, value) -> Self: ... + def with_labels(self): ... + apply_labels: Any + @property + def get_label_style(self): ... + def set_label_style(self, style): ... + def enable_assertions(self: Self, value) -> Self: ... + @property + def whereclause(self): ... + def with_polymorphic(self: Self, cls_or_mappers, selectable: Any | None = ..., polymorphic_on: Any | None = ...) -> Self: ... + def yield_per(self: Self, count) -> Self: ... + def get(self, ident): ... + @property + def lazy_loaded_from(self): ... + def correlate(self: Self, *fromclauses) -> Self: ... + def autoflush(self: Self, setting) -> Self: ... + def populate_existing(self: Self) -> Self: ... + def with_parent(self, instance, property: Any | None = ..., from_entity: Any | None = ...): ... + def add_entity(self: Self, entity, alias: Any | None = ...) -> Self: ... + def with_session(self: Self, session) -> Self: ... + def from_self(self, *entities): ... + def values(self, *columns): ... + def value(self, column): ... + def with_entities(self: Self, *entities) -> Self: ... + def add_columns(self: Self, *column) -> Self: ... + def add_column(self, column): ... + def options(self: Self, *args) -> Self: ... + def with_transformation(self, fn): ... + def get_execution_options(self): ... + def execution_options(self: Self, **kwargs) -> Self: ... + def with_for_update( + self: Self, read: bool = ..., nowait: bool = ..., of: Any | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> Self: ... + def params(self: Self, *args, **kwargs) -> Self: ... + def where(self, *criterion): ... + def filter(self: Self, *criterion) -> Self: ... + def filter_by(self: Self, **kwargs) -> Self: ... + def order_by(self: Self, *clauses) -> Self: ... + def group_by(self: Self, *clauses) -> Self: ... + def having(self: Self, criterion) -> Self: ... + def union(self, *q): ... + def union_all(self, *q): ... + def intersect(self, *q): ... + def intersect_all(self, *q): ... + def except_(self, *q): ... + def except_all(self, *q): ... + def join(self: Self, target, *props, **kwargs) -> Self: ... + def outerjoin(self: Self, target, *props, **kwargs) -> Self: ... + def reset_joinpoint(self: Self) -> Self: ... + def select_from(self: Self, *from_obj) -> Self: ... + def select_entity_from(self: Self, from_obj) -> Self: ... + def __getitem__(self, item): ... + def slice(self: Self, start, stop) -> Self: ... + def limit(self: Self, limit) -> Self: ... + def offset(self: Self, offset) -> Self: ... + def distinct(self: Self, *expr) -> Self: ... + def all(self) -> list[_T]: ... + def from_statement(self: Self, statement) -> Self: ... + def first(self) -> _T | None: ... + def one_or_none(self): ... + def one(self): ... + def scalar(self) -> Any: ... # type: ignore[override] + def __iter__(self): ... + @property + def column_descriptions(self): ... + def instances(self, result_proxy, context: Any | None = ...): ... + def merge_result(self, iterator, load: bool = ...): ... + def exists(self): ... + def count(self) -> int: ... + def delete(self, synchronize_session: str = ...) -> int: ... + def update(self, values, synchronize_session: str = ..., update_args: Any | None = ...): ... + +class FromStatement(GroupedElement, SelectBase, Executable): + __visit_name__: str + element: Any + def __init__(self, entities, element) -> None: ... + def get_label_style(self): ... + def set_label_style(self, label_style): ... + def get_children(self, **kw) -> None: ... # type: ignore[override] + +class AliasOption(interfaces.LoaderOption): + def __init__(self, alias) -> None: ... + inherit_cache: bool + def process_compile_state(self, compile_state) -> None: ... + +class BulkUD: + query: Any + mapper: Any + def __init__(self, query) -> None: ... + @property + def session(self): ... + +class BulkUpdate(BulkUD): + values: Any + update_kwargs: Any + def __init__(self, query, values, update_kwargs) -> None: ... + +class BulkDelete(BulkUD): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi new file mode 100644 index 000000000000..c51860ba53f8 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/relationships.pyi @@ -0,0 +1,176 @@ +from typing import Any + +from ..util import memoized_property +from .interfaces import PropComparator, StrategizedProperty + +def remote(expr): ... +def foreign(expr): ... + +class RelationshipProperty(StrategizedProperty): + logger: Any + strategy_wildcard_key: str + inherit_cache: bool + uselist: Any + argument: Any + secondary: Any + primaryjoin: Any + secondaryjoin: Any + post_update: Any + direction: Any + viewonly: Any + sync_backref: Any + lazy: Any + single_parent: Any + collection_class: Any + passive_deletes: Any + cascade_backrefs: Any + passive_updates: Any + remote_side: Any + enable_typechecks: Any + query_class: Any + innerjoin: Any + distinct_target_key: Any + doc: Any + active_history: Any + join_depth: Any + omit_join: Any + local_remote_pairs: Any + bake_queries: Any + load_on_pending: Any + comparator_factory: Any + comparator: Any + info: Any + strategy_key: Any + order_by: Any + back_populates: Any + backref: Any + def __init__( + self, + argument, + secondary: Any | None = ..., + primaryjoin: Any | None = ..., + secondaryjoin: Any | None = ..., + foreign_keys: Any | None = ..., + uselist: Any | None = ..., + order_by: bool = ..., + backref: Any | None = ..., + back_populates: Any | None = ..., + overlaps: Any | None = ..., + post_update: bool = ..., + cascade: bool = ..., + viewonly: bool = ..., + lazy: str = ..., + collection_class: Any | None = ..., + passive_deletes=..., + passive_updates=..., + remote_side: Any | None = ..., + enable_typechecks=..., + join_depth: Any | None = ..., + comparator_factory: Any | None = ..., + single_parent: bool = ..., + innerjoin: bool = ..., + distinct_target_key: Any | None = ..., + doc: Any | None = ..., + active_history=..., + cascade_backrefs=..., + load_on_pending: bool = ..., + bake_queries: bool = ..., + _local_remote_pairs: Any | None = ..., + query_class: Any | None = ..., + info: Any | None = ..., + omit_join: Any | None = ..., + sync_backref: Any | None = ..., + _legacy_inactive_history_style: bool = ..., + ) -> None: ... + def instrument_class(self, mapper) -> None: ... + + class Comparator(PropComparator): + prop: Any + def __init__( + self, prop, parentmapper, adapt_to_entity: Any | None = ..., of_type: Any | None = ..., extra_criteria=... + ) -> None: ... + def adapt_to_entity(self, adapt_to_entity): ... + @memoized_property + def entity(self): ... + @memoized_property + def mapper(self): ... + def __clause_element__(self): ... + def of_type(self, cls): ... + def and_(self, *other): ... + def in_(self, other) -> None: ... + __hash__: Any + def __eq__(self, other): ... + def any(self, criterion: Any | None = ..., **kwargs): ... + def has(self, criterion: Any | None = ..., **kwargs): ... + def contains(self, other, **kwargs): ... + def __ne__(self, other): ... + @memoized_property + def property(self): ... + + def merge( + self, session, source_state, source_dict, dest_state, dest_dict, load, _recursive, _resolve_conflict_map + ) -> None: ... + def cascade_iterator(self, type_, state, dict_, visited_states, halt_on: Any | None = ...) -> None: ... + @memoized_property + def entity(self): ... + @memoized_property + def mapper(self): ... + def do_init(self) -> None: ... + @property + def cascade(self): ... + @cascade.setter + def cascade(self, cascade) -> None: ... + +class JoinCondition: + parent_persist_selectable: Any + parent_local_selectable: Any + child_persist_selectable: Any + child_local_selectable: Any + parent_equivalents: Any + child_equivalents: Any + primaryjoin: Any + secondaryjoin: Any + secondary: Any + consider_as_foreign_keys: Any + prop: Any + self_referential: Any + support_sync: Any + can_be_synced_fn: Any + def __init__( + self, + parent_persist_selectable, + child_persist_selectable, + parent_local_selectable, + child_local_selectable, + primaryjoin: Any | None = ..., + secondary: Any | None = ..., + secondaryjoin: Any | None = ..., + parent_equivalents: Any | None = ..., + child_equivalents: Any | None = ..., + consider_as_foreign_keys: Any | None = ..., + local_remote_pairs: Any | None = ..., + remote_side: Any | None = ..., + self_referential: bool = ..., + prop: Any | None = ..., + support_sync: bool = ..., + can_be_synced_fn=..., + ): ... + @property + def primaryjoin_minus_local(self): ... + @property + def secondaryjoin_minus_local(self): ... + @memoized_property + def primaryjoin_reverse_remote(self): ... + @memoized_property + def remote_columns(self): ... + @memoized_property + def local_columns(self): ... + @memoized_property + def foreign_key_columns(self): ... + def join_targets(self, source_selectable, dest_selectable, aliased, single_crit: Any | None = ..., extra_criteria=...): ... + def create_lazy_clause(self, reverse_direction: bool = ...): ... + +class _ColInAnnotations: + name: Any + def __init__(self, name) -> None: ... + def __call__(self, c): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi new file mode 100644 index 000000000000..7d739982b4c1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/scoping.pyi @@ -0,0 +1,96 @@ +from typing import Any + +from ..util import memoized_property + +class ScopedSessionMixin: + def __call__(self, **kw): ... + def configure(self, **kwargs) -> None: ... + +class scoped_session(ScopedSessionMixin): + session_factory: Any + registry: Any + def __init__(self, session_factory, scopefunc: Any | None = ...) -> None: ... + def remove(self) -> None: ... + def query_property(self, query_cls: Any | None = ...): ... + # dynamically proxied from class Session + bind: Any + identity_map: Any + autoflush: Any + autocommit: bool + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + @property + def is_active(self): ... + @property + def no_autoflush(self) -> None: ... + @memoized_property + def info(self): ... + @classmethod + def close_all(cls) -> None: ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + def __contains__(self, instance): ... + def __iter__(self): ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def begin(self, subtransactions: bool = ..., nested: bool = ..., _subtrans: bool = ...): ... + def begin_nested(self): ... + def close(self) -> None: ... + def commit(self) -> None: ... + def connection( + self, bind_arguments: Any | None = ..., close_with_result: bool = ..., execution_options: Any | None = ..., **kw + ): ... + def delete(self, instance) -> None: ... + def execute( + self, + statement, + params: Any | None = ..., + execution_options=..., + bind_arguments: Any | None = ..., + _parent_execute_state: Any | None = ..., + _add_event: Any | None = ..., + **kw, + ): ... + def expire(self, instance, attribute_names: Any | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expunge(self, instance) -> None: ... + def expunge_all(self) -> None: ... + def flush(self, objects: Any | None = ...) -> None: ... + def get( + self, + entity, + ident, + options: Any | None = ..., + populate_existing: bool = ..., + with_for_update: Any | None = ..., + identity_token: Any | None = ..., + execution_options: Any | None = ..., + ): ... + def get_bind( + self, + mapper: Any | None = ..., + clause: Any | None = ..., + bind: Any | None = ..., + _sa_skip_events: Any | None = ..., + _sa_skip_for_implicit_returning: bool = ..., + ): ... + def is_modified(self, instance, include_collections: bool = ...): ... + def bulk_save_objects( + self, objects, return_defaults: bool = ..., update_changed_only: bool = ..., preserve_order: bool = ... + ): ... + def bulk_insert_mappings(self, mapper, mappings, return_defaults: bool = ..., render_nulls: bool = ...) -> None: ... + def bulk_update_mappings(self, mapper, mappings) -> None: ... + def merge(self, instance, load: bool = ..., options: Any | None = ...): ... + def query(self, *entities, **kwargs): ... + def refresh(self, instance, attribute_names: Any | None = ..., with_for_update: Any | None = ...) -> None: ... + def rollback(self) -> None: ... + def scalar(self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw): ... + def scalars(self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw): ... + +ScopedSession = scoped_session diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi new file mode 100644 index 000000000000..68586fdf97fc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/session.pyi @@ -0,0 +1,222 @@ +from collections.abc import Mapping +from typing import Any, TypeVar, overload + +from ..engine.base import Connection +from ..engine.result import Result +from ..engine.util import TransactionalContext +from ..sql.elements import ColumnElement +from ..sql.schema import Table +from ..util import MemoizedSlots, memoized_property +from .query import Query + +_T = TypeVar("_T") + +class _SessionClassMethods: + @classmethod + def close_all(cls) -> None: ... + @classmethod + def identity_key(cls, *args, **kwargs): ... + @classmethod + def object_session(cls, instance): ... + +class ORMExecuteState(MemoizedSlots): + session: Any + statement: Any + parameters: Any + local_execution_options: Any + execution_options: Any + bind_arguments: Any + def __init__( + self, session, statement, parameters, execution_options, bind_arguments, compile_state_cls, events_todo + ) -> None: ... + def invoke_statement( + self, + statement: Any | None = ..., + params: Any | None = ..., + execution_options: Any | None = ..., + bind_arguments: Any | None = ..., + ): ... + @property + def bind_mapper(self): ... + @property + def all_mappers(self): ... + @property + def is_orm_statement(self): ... + @property + def is_select(self): ... + @property + def is_insert(self): ... + @property + def is_update(self): ... + @property + def is_delete(self): ... + def update_execution_options(self, **opts) -> None: ... + @property + def lazy_loaded_from(self): ... + @property + def loader_strategy_path(self): ... + @property + def is_column_load(self): ... + @property + def is_relationship_load(self): ... + @property + def load_options(self): ... + @property + def update_delete_options(self): ... + @property + def user_defined_options(self): ... + +class SessionTransaction(TransactionalContext): + session: Any + nested: Any + def __init__(self, session, parent: Any | None = ..., nested: bool = ..., autobegin: bool = ...) -> None: ... + @property + def parent(self): ... + @property + def is_active(self): ... + def connection(self, bindkey, execution_options: Any | None = ..., **kwargs): ... + def prepare(self) -> None: ... + def commit(self, _to_root: bool = ...): ... + def rollback(self, _capture_exception: bool = ..., _to_root: bool = ...): ... + def close(self, invalidate: bool = ...) -> None: ... + +class Session(_SessionClassMethods): + identity_map: Any + bind: Any + future: Any + hash_key: Any + autoflush: Any + expire_on_commit: Any + enable_baked_queries: Any + autocommit: bool + twophase: Any + def __init__( + self, + bind: Any | None = ..., + autoflush: bool = ..., + future: bool = ..., + expire_on_commit: bool = ..., + autocommit: bool = ..., + twophase: bool = ..., + binds: Any | None = ..., + enable_baked_queries: bool = ..., + info: Any | None = ..., + query_cls: Any | None = ..., + ) -> None: ... + connection_callable: Any + def __enter__(self): ... + def __exit__(self, type_, value, traceback) -> None: ... + @property + def transaction(self): ... + def in_transaction(self): ... + def in_nested_transaction(self): ... + def get_transaction(self): ... + def get_nested_transaction(self): ... + @memoized_property + def info(self): ... + def begin(self, subtransactions: bool = ..., nested: bool = ..., _subtrans: bool = ...): ... + def begin_nested(self): ... + def rollback(self) -> None: ... + def commit(self) -> None: ... + def prepare(self) -> None: ... + # TODO: bind_arguments could use a TypedDict + def connection( + self, + bind_arguments: Mapping[str, Any] | None = ..., + close_with_result: bool = ..., + execution_options: Mapping[str, Any] | None = ..., + **kw: Any, + ) -> Connection: ... + def execute( + self, + statement, + params: Any | None = ..., + execution_options=..., + bind_arguments: Any | None = ..., + _parent_execute_state: Any | None = ..., + _add_event: Any | None = ..., + **kw, + ) -> Result: ... + def scalar(self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw): ... + def scalars(self, statement, params: Any | None = ..., execution_options=..., bind_arguments: Any | None = ..., **kw): ... + def close(self) -> None: ... + def invalidate(self) -> None: ... + def expunge_all(self) -> None: ... + def bind_mapper(self, mapper, bind) -> None: ... + def bind_table(self, table, bind) -> None: ... + def get_bind( + self, + mapper: Any | None = ..., + clause: Any | None = ..., + bind: Any | None = ..., + _sa_skip_events: Any | None = ..., + _sa_skip_for_implicit_returning: bool = ..., + ): ... + @overload + def query(self, entities: Table, **kwargs: Any) -> Query[Any]: ... + @overload + def query(self, entities: ColumnElement[_T], **kwargs: Any) -> Query[tuple[_T]]: ... # type: ignore[misc] + @overload + def query(self, *entities: ColumnElement[_T], **kwargs: Any) -> Query[tuple[_T, ...]]: ... + @overload + def query(self, *entities: type[_T], **kwargs: Any) -> Query[_T]: ... + @property + def no_autoflush(self) -> None: ... + def refresh(self, instance, attribute_names: Any | None = ..., with_for_update: Any | None = ...) -> None: ... + def expire_all(self) -> None: ... + def expire(self, instance, attribute_names: Any | None = ...) -> None: ... + def expunge(self, instance) -> None: ... + def add(self, instance, _warn: bool = ...) -> None: ... + def add_all(self, instances) -> None: ... + def delete(self, instance) -> None: ... + def get( + self, + entity, + ident, + options: Any | None = ..., + populate_existing: bool = ..., + with_for_update: Any | None = ..., + identity_token: Any | None = ..., + execution_options: Any | None = ..., + ): ... + def merge(self, instance, load: bool = ..., options: Any | None = ...): ... + def enable_relationship_loading(self, obj) -> None: ... + def __contains__(self, instance): ... + def __iter__(self): ... + def flush(self, objects: Any | None = ...) -> None: ... + def bulk_save_objects( + self, objects, return_defaults: bool = ..., update_changed_only: bool = ..., preserve_order: bool = ... + ): ... + def bulk_insert_mappings(self, mapper, mappings, return_defaults: bool = ..., render_nulls: bool = ...) -> None: ... + def bulk_update_mappings(self, mapper, mappings) -> None: ... + def is_modified(self, instance, include_collections: bool = ...): ... + @property + def is_active(self): ... + @property + def dirty(self): ... + @property + def deleted(self): ... + @property + def new(self): ... + +class sessionmaker(_SessionClassMethods): + kw: Any + class_: Any + def __init__( + self, + bind: Any | None = ..., + class_=..., + autoflush: bool = ..., + autocommit: bool = ..., + expire_on_commit: bool = ..., + info: Any | None = ..., + **kw, + ) -> None: ... + def begin(self): ... + def __call__(self, **local_kw): ... + def configure(self, **new_kw) -> None: ... + +def close_all_sessions() -> None: ... +def make_transient(instance) -> None: ... +def make_transient_to_detached(instance) -> None: ... +def object_session(instance): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi new file mode 100644 index 000000000000..134ff2259ef8 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/state.pyi @@ -0,0 +1,83 @@ +from typing import Any + +from ..util import memoized_property +from . import interfaces + +class InstanceState(interfaces.InspectionAttrInfo): + session_id: Any + key: Any + runid: Any + load_options: Any + load_path: Any + insert_order: Any + modified: bool + expired: bool + is_instance: bool + identity_token: Any + callables: Any + class_: Any + manager: Any + committed_state: Any + expired_attributes: Any + def __init__(self, obj, manager) -> None: ... + @memoized_property + def attrs(self): ... + @property + def transient(self): ... + @property + def pending(self): ... + @property + def deleted(self): ... + @property + def was_deleted(self): ... + @property + def persistent(self): ... + @property + def detached(self): ... + @property + def session(self): ... + @property + def async_session(self): ... + @property + def object(self): ... + @property + def identity(self): ... + @property + def identity_key(self): ... + @memoized_property + def parents(self): ... + @memoized_property + def mapper(self): ... + @property + def has_identity(self): ... + def obj(self) -> None: ... + @property + def dict(self): ... + def get_history(self, key, passive): ... + def get_impl(self, key): ... + @property + def unmodified(self): ... + def unmodified_intersection(self, keys): ... + @property + def unloaded(self): ... + @property + def unloaded_expirable(self): ... + +class AttributeState: + state: Any + key: Any + def __init__(self, state, key) -> None: ... + @property + def loaded_value(self): ... + @property + def value(self): ... + @property + def history(self): ... + def load_history(self): ... + +class PendingCollection: + deleted_items: Any + added_items: Any + def __init__(self) -> None: ... + def append(self, value) -> None: ... + def remove(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi new file mode 100644 index 000000000000..463e13ee38cf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategies.pyi @@ -0,0 +1,141 @@ +from typing import Any, NamedTuple + +from .. import util +from .interfaces import LoaderStrategy + +class UninstrumentedColumnLoader(LoaderStrategy): + columns: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query( + self, compile_state, query_entity, path, loadopt, adapter, column_collection: Any | None = ..., **kwargs + ) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class ColumnLoader(LoaderStrategy): + logger: Any + columns: Any + is_composite: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, check_for_adapt: bool = ..., **kwargs) -> None: ... # type: ignore[override] + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class ExpressionColumnLoader(ColumnLoader): + logger: Any + def __init__(self, parent, strategy_key) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, **kwargs) -> None: ... # type: ignore[override] + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + +class DeferredColumnLoader(LoaderStrategy): + logger: Any + raiseload: Any + columns: Any + group: Any + def __init__(self, parent, strategy_key) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def setup_query(self, compile_state, query_entity, path, loadopt, adapter, column_collection, memoized_populators, only_load_props: Any | None = ..., **kw) -> None: ... # type: ignore[override] + +class LoadDeferredColumns: + key: Any + raiseload: Any + def __init__(self, key, raiseload: bool = ...) -> None: ... + def __call__(self, state, passive=...): ... + +class AbstractRelationshipLoader(LoaderStrategy): + mapper: Any + entity: Any + target: Any + uselist: Any + def __init__(self, parent, strategy_key) -> None: ... + +class DoNothingLoader(LoaderStrategy): + logger: Any + +class NoLoader(AbstractRelationshipLoader): + logger: Any + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class LazyLoader(AbstractRelationshipLoader, util.MemoizedSlots): + logger: Any + is_aliased_class: Any + use_get: Any + def __init__(self, parent, strategy_key) -> None: ... + is_class_level: bool + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class LoadLazyAttribute: + key: Any + strategy_key: Any + loadopt: Any + extra_criteria: Any + def __init__(self, key, initiating_strategy, loadopt, extra_criteria) -> None: ... + def __call__(self, state, passive=...): ... + +class PostLoader(AbstractRelationshipLoader): ... + +class ImmediateLoader(PostLoader): + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class SubqueryLoader(PostLoader): + logger: Any + join_depth: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + + class _SubqCollections: + session: Any + execution_options: Any + load_options: Any + params: Any + subq: Any + def __init__(self, context, subq) -> None: ... + def get(self, key, default): ... + def loader(self, state, dict_, row) -> None: ... + + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators): ... + +class JoinedLoader(AbstractRelationshipLoader): + logger: Any + join_depth: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def setup_query( + self, + compile_state, + query_entity, + path, + loadopt, + adapter, + column_collection: Any | None = ..., + parentmapper: Any | None = ..., + chained_from_outerjoin: bool = ..., + **kwargs, + ) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators) -> None: ... + +class SelectInLoader(PostLoader, util.MemoizedSlots): + logger: Any + + class query_info(NamedTuple): + load_only_child: Any + load_with_join: Any + in_expr: Any + pk_cols: Any + zero_idx: Any + child_lookup_cols: Any + join_depth: Any + omit_join: Any + def __init__(self, parent, strategy_key) -> None: ... + def init_class_attribute(self, mapper) -> None: ... + def create_row_processor(self, context, query_entity, path, loadopt, mapper, result, adapter, populators): ... + +def single_parent_validator(desc, prop): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi new file mode 100644 index 000000000000..5fa175bbe4a2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/strategy_options.pyi @@ -0,0 +1,66 @@ +from typing import Any + +from ..sql.base import Generative +from .interfaces import LoaderOption + +class Load(Generative, LoaderOption): + path: Any + context: Any + local_opts: Any + is_class_strategy: bool + def __init__(self, entity) -> None: ... + @classmethod + def for_existing_path(cls, path): ... + is_opts_only: bool + strategy: Any + propagate_to_loaders: bool + def process_compile_state_replaced_entities(self, compile_state, mapper_entities) -> None: ... + def process_compile_state(self, compile_state) -> None: ... + def options(self, *opts) -> None: ... + def set_relationship_strategy(self, attr, strategy, propagate_to_loaders: bool = ...) -> None: ... + def set_column_strategy(self, attrs, strategy, opts: Any | None = ..., opts_only: bool = ...) -> None: ... + def set_generic_strategy(self, attrs, strategy) -> None: ... + def set_class_strategy(self, strategy, opts) -> None: ... + # added dynamically at runtime + def contains_eager(self, attr, alias: Any | None = ...): ... + def load_only(self, *attrs): ... + def joinedload(self, attr, innerjoin: Any | None = ...): ... + def subqueryload(self, attr): ... + def selectinload(self, attr): ... + def lazyload(self, attr): ... + def immediateload(self, attr): ... + def noload(self, attr): ... + def raiseload(self, attr, sql_only: bool = ...): ... + def defaultload(self, attr): ... + def defer(self, key, raiseload: bool = ...): ... + def undefer(self, key): ... + def undefer_group(self, name): ... + def with_expression(self, key, expression): ... + def selectin_polymorphic(self, classes): ... + +class _UnboundLoad(Load): + path: Any + local_opts: Any + def __init__(self) -> None: ... + +class loader_option: + def __init__(self) -> None: ... + name: Any + fn: Any + def __call__(self, fn): ... + +def contains_eager(loadopt, attr, alias: Any | None = ...): ... +def load_only(loadopt, *attrs): ... +def joinedload(loadopt, attr, innerjoin: Any | None = ...): ... +def subqueryload(loadopt, attr): ... +def selectinload(loadopt, attr): ... +def lazyload(loadopt, attr): ... +def immediateload(loadopt, attr): ... +def noload(loadopt, attr): ... +def raiseload(loadopt, attr, sql_only: bool = ...): ... +def defaultload(loadopt, attr): ... +def defer(loadopt, key, raiseload: bool = ...): ... +def undefer(loadopt, key): ... +def undefer_group(loadopt, name): ... +def with_expression(loadopt, key, expression): ... +def selectin_polymorphic(loadopt, classes): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi new file mode 100644 index 000000000000..558c9c848c08 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/sync.pyi @@ -0,0 +1,6 @@ +def populate(source, source_mapper, dest, dest_mapper, synchronize_pairs, uowcommit, flag_cascaded_pks) -> None: ... +def bulk_populate_inherit_keys(source_dict, source_mapper, synchronize_pairs) -> None: ... +def clear(dest, dest_mapper, synchronize_pairs) -> None: ... +def update(source, source_mapper, dest, old_prefix, synchronize_pairs) -> None: ... +def populate_dict(source, source_mapper, dict_, synchronize_pairs) -> None: ... +def source_modified(uowcommit, source, source_mapper, synchronize_pairs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi new file mode 100644 index 000000000000..5ca959e0e625 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/unitofwork.pyi @@ -0,0 +1,105 @@ +from typing import Any + +def track_cascade_events(descriptor, prop): ... + +class UOWTransaction: + session: Any + attributes: Any + deps: Any + mappers: Any + presort_actions: Any + postsort_actions: Any + dependencies: Any + states: Any + post_update_states: Any + def __init__(self, session): ... + @property + def has_work(self): ... + def was_already_deleted(self, state): ... + def is_deleted(self, state): ... + def memo(self, key, callable_): ... + def remove_state_actions(self, state) -> None: ... + def get_attribute_history(self, state, key, passive=...): ... + def has_dep(self, processor): ... + def register_preprocessor(self, processor, fromparent) -> None: ... + def register_object( + self, + state, + isdelete: bool = ..., + listonly: bool = ..., + cancel_delete: bool = ..., + operation: Any | None = ..., + prop: Any | None = ..., + ): ... + def register_post_update(self, state, post_update_cols) -> None: ... + def filter_states_for_dep(self, dep, states): ... + def states_for_mapper_hierarchy(self, mapper, isdelete, listonly) -> None: ... + def execute(self): ... + def finalize_flush_changes(self) -> None: ... + +class IterateMappersMixin: ... + +class Preprocess(IterateMappersMixin): + dependency_processor: Any + fromparent: Any + processed: Any + setup_flush_actions: bool + def __init__(self, dependency_processor, fromparent) -> None: ... + def execute(self, uow): ... + +class PostSortRec: + disabled: Any + def __new__(cls, uow, *args): ... + def execute_aggregate(self, uow, recs) -> None: ... + +class ProcessAll(IterateMappersMixin, PostSortRec): + dependency_processor: Any + sort_key: Any + isdelete: Any + fromparent: Any + def __init__(self, uow, dependency_processor, isdelete, fromparent) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow): ... + +class PostUpdateAll(PostSortRec): + mapper: Any + isdelete: Any + sort_key: Any + def __init__(self, uow, mapper, isdelete) -> None: ... + def execute(self, uow) -> None: ... + +class SaveUpdateAll(PostSortRec): + mapper: Any + sort_key: Any + def __init__(self, uow, mapper) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow) -> None: ... + +class DeleteAll(PostSortRec): + mapper: Any + sort_key: Any + def __init__(self, uow, mapper) -> None: ... + def execute(self, uow) -> None: ... + def per_state_flush_actions(self, uow) -> None: ... + +class ProcessState(PostSortRec): + dependency_processor: Any + sort_key: Any + isdelete: Any + state: Any + def __init__(self, uow, dependency_processor, isdelete, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... + +class SaveUpdateState(PostSortRec): + state: Any + mapper: Any + sort_key: Any + def __init__(self, uow, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... + +class DeleteState(PostSortRec): + state: Any + mapper: Any + sort_key: Any + def __init__(self, uow, state) -> None: ... + def execute_aggregate(self, uow, recs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi new file mode 100644 index 000000000000..964a52d303af --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/orm/util.pyi @@ -0,0 +1,180 @@ +from typing import Any + +from ..sql import base as sql_base, expression, util as sql_util +from ..sql.annotation import SupportsCloneAnnotations +from .base import ( + InspectionAttr as InspectionAttr, + _class_to_mapper as _class_to_mapper, + _never_set as _never_set, + _none_set as _none_set, + attribute_str as attribute_str, + class_mapper as class_mapper, + instance_str as instance_str, + object_mapper as object_mapper, + object_state as object_state, + state_attribute_str as state_attribute_str, + state_class_str as state_class_str, + state_str as state_str, +) +from .interfaces import CriteriaOption, ORMColumnsClauseRole, ORMEntityColumnsClauseRole, ORMFromClauseRole + +all_cascades: Any + +class CascadeOptions(frozenset[Any]): + save_update: Any + delete: Any + refresh_expire: Any + merge: Any + expunge: Any + delete_orphan: Any + def __new__(cls, value_list): ... + @classmethod + def from_string(cls, arg): ... + +def polymorphic_union(table_map, typecolname, aliasname: str = ..., cast_nulls: bool = ...): ... +def identity_key(*args, **kwargs): ... + +class ORMAdapter(sql_util.ColumnAdapter): + mapper: Any + aliased_class: Any + def __init__( + self, + entity, + equivalents: Any | None = ..., + adapt_required: bool = ..., + allow_label_resolve: bool = ..., + anonymize_labels: bool = ..., + ) -> None: ... + +class AliasedClass: + __name__: Any + def __init__( + self, + mapped_class_or_ac, + alias: Any | None = ..., + name: Any | None = ..., + flat: bool = ..., + adapt_on_names: bool = ..., + with_polymorphic_mappers=..., + with_polymorphic_discriminator: Any | None = ..., + base_alias: Any | None = ..., + use_mapper_path: bool = ..., + represents_outer_join: bool = ..., + ) -> None: ... + def __getattr__(self, key): ... + +class AliasedInsp(ORMEntityColumnsClauseRole, ORMFromClauseRole, sql_base.MemoizedHasCacheKey, InspectionAttr): + mapper: Any + selectable: Any + name: Any + polymorphic_on: Any + represents_outer_join: Any + with_polymorphic_mappers: Any + def __init__( + self, + entity, + inspected, + selectable, + name, + with_polymorphic_mappers, + polymorphic_on, + _base_alias, + _use_mapper_path, + adapt_on_names, + represents_outer_join, + nest_adapters: bool, # added in 1.4.30 + ) -> None: ... + @property + def entity(self): ... + is_aliased_class: bool + def __clause_element__(self): ... + @property + def entity_namespace(self): ... + @property + def class_(self): ... + +class _WrapUserEntity: + subject: Any + def __init__(self, subject) -> None: ... + def __getattribute__(self, name): ... + +class LoaderCriteriaOption(CriteriaOption): + root_entity: Any + entity: Any + deferred_where_criteria: bool + where_criteria: Any + include_aliases: Any + propagate_to_loaders: Any + def __init__( + self, + entity_or_base, + where_criteria, + loader_only: bool = ..., + include_aliases: bool = ..., + propagate_to_loaders: bool = ..., + track_closure_variables: bool = ..., + ) -> None: ... + def process_compile_state_replaced_entities(self, compile_state, mapper_entities): ... + def process_compile_state(self, compile_state) -> None: ... + def get_global_criteria(self, attributes) -> None: ... + +def aliased(element, alias: Any | None = ..., name: Any | None = ..., flat: bool = ..., adapt_on_names: bool = ...): ... +def with_polymorphic( + base, + classes, + selectable: bool = ..., + flat: bool = ..., + polymorphic_on: Any | None = ..., + aliased: bool = ..., + innerjoin: bool = ..., + _use_mapper_path: bool = ..., + _existing_alias: Any | None = ..., +): ... + +class Bundle(ORMColumnsClauseRole, SupportsCloneAnnotations, sql_base.MemoizedHasCacheKey, InspectionAttr): + single_entity: bool + is_clause_element: bool + is_mapper: bool + is_aliased_class: bool + is_bundle: bool + name: Any + exprs: Any + c: Any + def __init__(self, name, *exprs, **kw) -> None: ... + @property + def mapper(self): ... + @property + def entity(self): ... + @property + def entity_namespace(self): ... + columns: Any + def __clause_element__(self): ... + @property + def clauses(self): ... + def label(self, name): ... + def create_row_processor(self, query, procs, labels): ... + +class _ORMJoin(expression.Join): + __visit_name__: Any + inherit_cache: bool + onclause: Any + def __init__( + self, + left, + right, + onclause: Any | None = ..., + isouter: bool = ..., + full: bool = ..., + _left_memo: Any | None = ..., + _right_memo: Any | None = ..., + _extra_criteria=..., + ) -> None: ... + def join(self, right, onclause: Any | None = ..., isouter: bool = ..., full: bool = ..., join_to_left: Any | None = ...): ... + def outerjoin(self, right, onclause: Any | None = ..., full: bool = ..., join_to_left: Any | None = ...): ... + +def join(left, right, onclause: Any | None = ..., isouter: bool = ..., full: bool = ..., join_to_left: Any | None = ...): ... +def outerjoin(left, right, onclause: Any | None = ..., full: bool = ..., join_to_left: Any | None = ...): ... +def with_parent(instance, prop, from_entity: Any | None = ...): ... +def has_identity(object_): ... +def was_deleted(object_): ... +def randomize_unitofwork() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi new file mode 100644 index 000000000000..977c65ad93f6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/__init__.pyi @@ -0,0 +1,27 @@ +from .base import Pool as Pool, reset_commit as reset_commit, reset_none as reset_none, reset_rollback as reset_rollback +from .dbapi_proxy import clear_managers as clear_managers, manage as manage +from .impl import ( + AssertionPool as AssertionPool, + AsyncAdaptedQueuePool as AsyncAdaptedQueuePool, + FallbackAsyncAdaptedQueuePool as FallbackAsyncAdaptedQueuePool, + NullPool as NullPool, + QueuePool as QueuePool, + SingletonThreadPool as SingletonThreadPool, + StaticPool as StaticPool, +) + +__all__ = [ + "Pool", + "reset_commit", + "reset_none", + "reset_rollback", + "clear_managers", + "manage", + "AssertionPool", + "NullPool", + "QueuePool", + "AsyncAdaptedQueuePool", + "FallbackAsyncAdaptedQueuePool", + "SingletonThreadPool", + "StaticPool", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi new file mode 100644 index 000000000000..9936b2d12377 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/base.pyi @@ -0,0 +1,88 @@ +from typing import Any + +from .. import log +from ..util import memoized_property + +reset_rollback: Any +reset_commit: Any +reset_none: Any + +class _ConnDialect: + is_async: bool + def do_rollback(self, dbapi_connection) -> None: ... + def do_commit(self, dbapi_connection) -> None: ... + def do_close(self, dbapi_connection) -> None: ... + def do_ping(self, dbapi_connection) -> None: ... + def get_driver_connection(self, connection): ... + +class _AsyncConnDialect(_ConnDialect): + is_async: bool + +class Pool(log.Identified): + logging_name: Any + echo: Any + def __init__( + self, + creator, + recycle: int = ..., + echo: Any | None = ..., + logging_name: Any | None = ..., + reset_on_return: bool = ..., + events: Any | None = ..., + dialect: Any | None = ..., + pre_ping: bool = ..., + _dispatch: Any | None = ..., + ) -> None: ... + def recreate(self) -> None: ... + def dispose(self) -> None: ... + def connect(self): ... + def status(self) -> None: ... + +class _ConnectionRecord: + finalize_callback: Any + def __init__(self, pool, connect: bool = ...) -> None: ... + fresh: bool + fairy_ref: Any + starttime: Any + dbapi_connection: Any + @property + def driver_connection(self): ... + @property + def connection(self): ... + @connection.setter + def connection(self, value) -> None: ... + @memoized_property + def info(self): ... + @memoized_property + def record_info(self): ... + @classmethod + def checkout(cls, pool): ... + def checkin(self, _fairy_was_created: bool = ...) -> None: ... + @property + def in_use(self): ... + @property + def last_connect_time(self): ... + def close(self) -> None: ... + def invalidate(self, e: Any | None = ..., soft: bool = ...) -> None: ... + def get_connection(self): ... + +class _ConnectionFairy: + dbapi_connection: Any + def __init__(self, dbapi_connection, connection_record, echo) -> None: ... + @property + def driver_connection(self): ... + @property + def connection(self): ... + @connection.setter + def connection(self, value) -> None: ... + @property + def is_valid(self): ... + @memoized_property + def info(self): ... + @property + def record_info(self): ... + def invalidate(self, e: Any | None = ..., soft: bool = ...) -> None: ... + def cursor(self, *args, **kwargs): ... + def __getattr__(self, key): ... + def detach(self) -> None: ... + def close(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi new file mode 100644 index 000000000000..909b78d852a6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/dbapi_proxy.pyi @@ -0,0 +1,19 @@ +from typing import Any + +proxies: Any + +def manage(module, **params): ... +def clear_managers() -> None: ... + +class _DBProxy: + module: Any + kw: Any + poolclass: Any + pools: Any + def __init__(self, module, poolclass=..., **kw) -> None: ... + def close(self) -> None: ... + def __del__(self) -> None: ... + def __getattr__(self, key): ... + def get_pool(self, *args, **kw): ... + def connect(self, *args, **kw): ... + def dispose(self, *args, **kw) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi new file mode 100644 index 000000000000..8a5dde542e3a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/events.pyi @@ -0,0 +1,13 @@ +from .. import event + +class PoolEvents(event.Events): + def connect(self, dbapi_connection, connection_record) -> None: ... + def first_connect(self, dbapi_connection, connection_record) -> None: ... + def checkout(self, dbapi_connection, connection_record, connection_proxy) -> None: ... + def checkin(self, dbapi_connection, connection_record) -> None: ... + def reset(self, dbapi_connection, connection_record) -> None: ... + def invalidate(self, dbapi_connection, connection_record, exception) -> None: ... + def soft_invalidate(self, dbapi_connection, connection_record, exception) -> None: ... + def close(self, dbapi_connection, connection_record) -> None: ... + def detach(self, dbapi_connection, connection_record) -> None: ... + def close_detached(self, dbapi_connection) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi new file mode 100644 index 000000000000..2646cafd536d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/pool/impl.pyi @@ -0,0 +1,46 @@ +from typing import Any + +from ..util import memoized_property +from .base import Pool + +class QueuePool(Pool): + def __init__( + self, creator, pool_size: int = ..., max_overflow: int = ..., timeout: float = ..., use_lifo: bool = ..., **kw + ) -> None: ... + def recreate(self): ... + def dispose(self) -> None: ... + def status(self): ... + def size(self): ... + def timeout(self): ... + def checkedin(self): ... + def overflow(self): ... + def checkedout(self): ... + +class AsyncAdaptedQueuePool(QueuePool): ... +class FallbackAsyncAdaptedQueuePool(AsyncAdaptedQueuePool): ... + +class NullPool(Pool): + def status(self): ... + def recreate(self): ... + def dispose(self) -> None: ... + +class SingletonThreadPool(Pool): + size: Any + def __init__(self, creator, pool_size: int = ..., **kw) -> None: ... + def recreate(self): ... + def dispose(self) -> None: ... + def status(self): ... + def connect(self): ... + +class StaticPool(Pool): + @memoized_property + def connection(self): ... + def status(self): ... + def dispose(self) -> None: ... + def recreate(self): ... + +class AssertionPool(Pool): + def __init__(self, *args, **kw) -> None: ... + def status(self): ... + def dispose(self) -> None: ... + def recreate(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/processors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/processors.pyi new file mode 100644 index 000000000000..be304e64ea5d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/processors.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def str_to_datetime_processor_factory(regexp, type_): ... +def py_fallback(): ... +def to_unicode_processor_factory(encoding, errors: Any | None = ...): ... +def to_conditional_unicode_processor_factory(encoding, errors: Any | None = ...): ... +def to_decimal_processor_factory(target_class, scale): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/schema.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/schema.pyi new file mode 100644 index 000000000000..ef2cc1ed91c9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/schema.pyi @@ -0,0 +1,51 @@ +from .sql.base import SchemaVisitor as SchemaVisitor +from .sql.ddl import ( + DDL as DDL, + AddConstraint as AddConstraint, + CreateColumn as CreateColumn, + CreateIndex as CreateIndex, + CreateSchema as CreateSchema, + CreateSequence as CreateSequence, + CreateTable as CreateTable, + DDLBase as DDLBase, + DDLElement as DDLElement, + DropColumnComment as DropColumnComment, + DropConstraint as DropConstraint, + DropIndex as DropIndex, + DropSchema as DropSchema, + DropSequence as DropSequence, + DropTable as DropTable, + DropTableComment as DropTableComment, + SetColumnComment as SetColumnComment, + SetTableComment as SetTableComment, + _CreateDropBase as _CreateDropBase, + _DDLCompiles as _DDLCompiles, + _DropView as _DropView, + sort_tables as sort_tables, + sort_tables_and_constraints as sort_tables_and_constraints, +) +from .sql.naming import conv as conv +from .sql.schema import ( + BLANK_SCHEMA as BLANK_SCHEMA, + CheckConstraint as CheckConstraint, + Column as Column, + ColumnCollectionConstraint as ColumnCollectionConstraint, + ColumnCollectionMixin as ColumnCollectionMixin, + ColumnDefault as ColumnDefault, + Computed as Computed, + Constraint as Constraint, + DefaultClause as DefaultClause, + DefaultGenerator as DefaultGenerator, + FetchedValue as FetchedValue, + ForeignKey as ForeignKey, + ForeignKeyConstraint as ForeignKeyConstraint, + Identity as Identity, + Index as Index, + MetaData as MetaData, + PrimaryKeyConstraint as PrimaryKeyConstraint, + SchemaItem as SchemaItem, + Sequence as Sequence, + Table as Table, + ThreadLocalMetaData as ThreadLocalMetaData, + UniqueConstraint as UniqueConstraint, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi new file mode 100644 index 000000000000..625094b1e7ab --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/__init__.pyi @@ -0,0 +1,91 @@ +from . import sqltypes as sqltypes +from .base import Executable as Executable +from .compiler import ( + COLLECT_CARTESIAN_PRODUCTS as COLLECT_CARTESIAN_PRODUCTS, + FROM_LINTING as FROM_LINTING, + NO_LINTING as NO_LINTING, + WARN_LINTING as WARN_LINTING, +) +from .expression import ( + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + Alias as Alias, + ClauseElement as ClauseElement, + ColumnCollection as ColumnCollection, + ColumnElement as ColumnElement, + CompoundSelect as CompoundSelect, + Delete as Delete, + False_ as False_, + FromClause as FromClause, + Insert as Insert, + Join as Join, + LambdaElement as LambdaElement, + Select as Select, + Selectable as Selectable, + StatementLambdaElement as StatementLambdaElement, + Subquery as Subquery, + TableClause as TableClause, + TableSample as TableSample, + True_ as True_, + Update as Update, + Values as Values, + alias as alias, + all_ as all_, + and_ as and_, + any_ as any_, + asc as asc, + between as between, + bindparam as bindparam, + case as case, + cast as cast, + collate as collate, + column as column, + cte as cte, + delete as delete, + desc as desc, + distinct as distinct, + except_ as except_, + except_all as except_all, + exists as exists, + extract as extract, + false as false, + func as func, + funcfilter as funcfilter, + insert as insert, + intersect as intersect, + intersect_all as intersect_all, + join as join, + label as label, + lambda_stmt as lambda_stmt, + lateral as lateral, + literal as literal, + literal_column as literal_column, + modifier as modifier, + not_ as not_, + null as null, + nulls_first as nulls_first, + nulls_last as nulls_last, + nullsfirst as nullsfirst, + nullslast as nullslast, + or_ as or_, + outerjoin as outerjoin, + outparam as outparam, + over as over, + quoted_name as quoted_name, + select as select, + subquery as subquery, + table as table, + tablesample as tablesample, + text as text, + true as true, + tuple_ as tuple_, + type_coerce as type_coerce, + union as union, + union_all as union_all, + update as update, + values as values, + within_group as within_group, +) +from .visitors import ClauseVisitor as ClauseVisitor diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi new file mode 100644 index 000000000000..202412831da0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/annotation.pyi @@ -0,0 +1,19 @@ +from typing import Any + +EMPTY_ANNOTATIONS: Any + +class SupportsAnnotations: ... +class SupportsCloneAnnotations(SupportsAnnotations): ... +class SupportsWrappingAnnotations(SupportsAnnotations): ... + +class Annotated: + def __new__(cls, *args): ... + __dict__: Any + def __init__(self, element, values) -> None: ... + def __reduce__(self): ... + def __hash__(self): ... + def __eq__(self, other): ... + @property + def entity_namespace(self): ... + +annotated_classes: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi new file mode 100644 index 000000000000..fb006721d837 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/base.pyi @@ -0,0 +1,159 @@ +from _typeshed import Self +from collections.abc import MutableMapping +from typing import Any + +from .. import util +from ..util import HasMemoized, hybridmethod, memoized_property +from . import roles +from .traversals import ( + HasCacheKey as HasCacheKey, + HasCopyInternals as HasCopyInternals, + MemoizedHasCacheKey as MemoizedHasCacheKey, +) +from .visitors import ClauseVisitor + +coercions: Any +elements: Any +type_api: Any +PARSE_AUTOCOMMIT: Any +NO_ARG: Any + +class Immutable: + def unique_params(self, *optionaldict, **kwargs) -> None: ... + def params(self, *optionaldict, **kwargs) -> None: ... + +class SingletonConstant(Immutable): + def __new__(cls, *arg, **kw): ... + +class _DialectArgView(MutableMapping[Any, Any]): + obj: Any + def __init__(self, obj) -> None: ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __len__(self): ... + def __iter__(self): ... + +class _DialectArgDict(MutableMapping[Any, Any]): + def __init__(self) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + +class DialectKWArgs: + @classmethod + def argument_for(cls, dialect_name, argument_name, default) -> None: ... + @memoized_property + def dialect_kwargs(self): ... + @property + def kwargs(self): ... + @memoized_property + def dialect_options(self): ... + +class CompileState: + plugins: Any + @classmethod + def create_for_statement(cls, statement, compiler, **kw): ... + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + @classmethod + def get_plugin_class(cls, statement): ... + @classmethod + def plugin_for(cls, plugin_name, visit_name): ... + +class Generative(HasMemoized): ... +class InPlaceGenerative(HasMemoized): ... +class HasCompileState(Generative): ... + +class _MetaOptions(type): + def __init__(cls, classname, bases, dict_) -> None: ... + def __add__(self, other): ... + +class Options: + def __init__(self, **kw) -> None: ... + def __add__(self, other): ... + def __eq__(self, other): ... + @classmethod + def isinstance(cls, klass): ... + @hybridmethod + def add_to_element(self, name, value): ... + @classmethod + def safe_merge(cls, other): ... + @classmethod + def from_execution_options(cls, key, attrs, exec_options, statement_exec_options): ... + +class CacheableOptions(Options, HasCacheKey): ... + +class ExecutableOption(HasCopyInternals): + __visit_name__: str + +class Executable(roles.StatementRole, Generative): + supports_execution: bool + is_select: bool + is_update: bool + is_insert: bool + is_text: bool + is_delete: bool + is_dml: bool + def options(self: Self, *options) -> Self: ... + def execution_options(self: Self, **kw) -> Self: ... + def get_execution_options(self): ... + def execute(self, *multiparams, **params): ... + def scalar(self, *multiparams, **params): ... + @property + def bind(self): ... + +class prefix_anon_map(dict[Any, Any]): + def __missing__(self, key): ... + +class SchemaEventTarget: ... + +class SchemaVisitor(ClauseVisitor): + __traverse_options__: Any + +class ColumnCollection: + def __init__(self, columns: Any | None = ...) -> None: ... + def keys(self): ... + def values(self): ... + def items(self): ... + def __bool__(self): ... + def __len__(self): ... + def __iter__(self): ... + def __getitem__(self, key): ... + def __getattr__(self, key): ... + def __contains__(self, key): ... + def compare(self, other): ... + def __eq__(self, other): ... + def get(self, key, default: Any | None = ...): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def __setattr__(self, key, obj) -> None: ... + def clear(self) -> None: ... + def remove(self, column) -> None: ... + def update(self, iter_) -> None: ... + __hash__: Any + def add(self, column, key: Any | None = ...) -> None: ... + def contains_column(self, col): ... + def as_immutable(self): ... + def corresponding_column(self, column, require_embedded: bool = ...): ... + +class DedupeColumnCollection(ColumnCollection): + def add(self, column, key: Any | None = ...) -> None: ... + def extend(self, iter_) -> None: ... + def remove(self, column) -> None: ... + def replace(self, column) -> None: ... + +class ImmutableColumnCollection(util.ImmutableContainer, ColumnCollection): + def __init__(self, collection) -> None: ... + add: Any + extend: Any + remove: Any + +class ColumnSet(util.ordered_column_set): + def contains_column(self, col): ... + def extend(self, cols) -> None: ... + def __add__(self, other): ... + def __eq__(self, other): ... + def __hash__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi new file mode 100644 index 000000000000..79a24e98d6bd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/coercions.pyi @@ -0,0 +1,66 @@ +from typing import Any + +from . import roles + +elements: Any +lambdas: Any +schema: Any +selectable: Any +sqltypes: Any +traversals: Any + +def expect(role, element, apply_propagate_attrs: Any | None = ..., argname: Any | None = ..., post_inspect: bool = ..., **kw): ... +def expect_as_key(role, element, **kw): ... +def expect_col_expression_collection(role, expressions) -> None: ... + +class RoleImpl: + name: Any + def __init__(self, role_class) -> None: ... + +class _Deannotate: ... +class _StringOnly: ... +class _ReturnsStringKey: ... +class _ColumnCoercions: ... +class _NoTextCoercion: ... +class _CoerceLiterals: ... +class LiteralValueImpl(RoleImpl): ... +class _SelectIsNotFrom: ... +class HasCacheKeyImpl(RoleImpl): ... +class ExecutableOptionImpl(RoleImpl): ... +class ExpressionElementImpl(_ColumnCoercions, RoleImpl): ... +class BinaryElementImpl(ExpressionElementImpl, RoleImpl): ... +class InElementImpl(RoleImpl): ... +class OnClauseImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): ... +class WhereHavingImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl): ... +class StatementOptionImpl(_CoerceLiterals, RoleImpl): ... +class ColumnArgumentImpl(_NoTextCoercion, RoleImpl): ... +class ColumnArgumentOrKeyImpl(_ReturnsStringKey, RoleImpl): ... +class StrAsPlainColumnImpl(_CoerceLiterals, RoleImpl): ... +class ByOfImpl(_CoerceLiterals, _ColumnCoercions, RoleImpl, roles.ByOfRole): ... +class OrderByImpl(ByOfImpl, RoleImpl): ... +class GroupByImpl(ByOfImpl, RoleImpl): ... +class DMLColumnImpl(_ReturnsStringKey, RoleImpl): ... +class ConstExprImpl(RoleImpl): ... +class TruncatedLabelImpl(_StringOnly, RoleImpl): ... +class DDLExpressionImpl(_Deannotate, _CoerceLiterals, RoleImpl): ... +class DDLConstraintColumnImpl(_Deannotate, _ReturnsStringKey, RoleImpl): ... +class DDLReferredColumnImpl(DDLConstraintColumnImpl): ... +class LimitOffsetImpl(RoleImpl): ... +class LabeledColumnExprImpl(ExpressionElementImpl): ... +class ColumnsClauseImpl(_SelectIsNotFrom, _CoerceLiterals, RoleImpl): ... +class ReturnsRowsImpl(RoleImpl): ... +class StatementImpl(_CoerceLiterals, RoleImpl): ... +class SelectStatementImpl(_NoTextCoercion, RoleImpl): ... +class HasCTEImpl(ReturnsRowsImpl): ... +class IsCTEImpl(RoleImpl): ... +class JoinTargetImpl(RoleImpl): ... +class FromClauseImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): ... +class StrictFromClauseImpl(FromClauseImpl): ... +class AnonymizedFromClauseImpl(StrictFromClauseImpl): ... +class DMLTableImpl(_SelectIsNotFrom, _NoTextCoercion, RoleImpl): ... +class DMLSelectImpl(_NoTextCoercion, RoleImpl): ... +class CompoundElementImpl(_NoTextCoercion, RoleImpl): ... + +cls: Any +name: Any +impl: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi new file mode 100644 index 000000000000..66cac257c105 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/compiler.pyi @@ -0,0 +1,472 @@ +from typing import Any, NamedTuple + +from ..util import memoized_property +from . import elements + +RESERVED_WORDS: Any +LEGAL_CHARACTERS: Any +LEGAL_CHARACTERS_PLUS_SPACE: Any +ILLEGAL_INITIAL_CHARACTERS: Any +FK_ON_DELETE: Any +FK_ON_UPDATE: Any +FK_INITIALLY: Any +BIND_PARAMS: Any +BIND_PARAMS_ESC: Any +BIND_TEMPLATES: Any +OPERATORS: Any +FUNCTIONS: Any +EXTRACT_MAP: Any +COMPOUND_KEYWORDS: Any +RM_RENDERED_NAME: int +RM_NAME: int +RM_OBJECTS: int +RM_TYPE: int + +class ExpandedState(NamedTuple): + statement: Any + additional_parameters: Any + processors: Any + positiontup: Any + parameter_expansion: Any + +NO_LINTING: Any +COLLECT_CARTESIAN_PRODUCTS: Any +WARN_LINTING: Any +FROM_LINTING: Any + +class FromLinter: + def lint(self, start: Any | None = ...): ... + def warn(self) -> None: ... + +class Compiled: + schema_translate_map: Any + execution_options: Any + compile_state: Any + cache_key: Any + dialect: Any + preparer: Any + statement: Any + can_execute: Any + string: Any + def __init__( + self, dialect, statement, schema_translate_map: Any | None = ..., render_schema_translate: bool = ..., compile_kwargs=... + ) -> None: ... + def visit_unsupported_compilation(self, element, err) -> None: ... + @property + def sql_compiler(self) -> None: ... + def process(self, obj, **kwargs): ... + def construct_params(self, params: Any | None = ..., extracted_parameters: Any | None = ...) -> None: ... + @property + def params(self): ... + +class TypeCompiler: + ensure_kwarg: str + dialect: Any + def __init__(self, dialect) -> None: ... + def process(self, type_, **kw): ... + def visit_unsupported_compilation(self, element, err, **kw) -> None: ... + +class _CompileLabel(elements.ColumnElement[Any]): + __visit_name__: str + element: Any + name: Any + def __init__(self, col, name, alt_names=...) -> None: ... + @property + def proxy_set(self): ... + @property + def type(self): ... + def self_group(self, **kw): ... + +class SQLCompiler(Compiled): + extract_map: Any + compound_keywords: Any + isdelete: bool + isinsert: bool + isupdate: bool + isplaintext: bool + returning: Any + returning_precedes_values: bool + render_table_with_column_in_update_from: bool + ansi_bind_rules: bool + insert_single_values_expr: Any + literal_execute_params: Any + post_compile_params: Any + escaped_bind_names: Any + has_out_parameters: bool + insert_prefetch: Any + update_prefetch: Any + postfetch_lastrowid: bool + positiontup: Any + inline: bool + column_keys: Any + cache_key: Any + for_executemany: Any + linting: Any + binds: Any + bind_names: Any + stack: Any + positional: Any + bindtemplate: Any + ctes: Any + label_length: Any + anon_map: Any + truncated_names: Any + def __init__( + self, + dialect, + statement, + cache_key: Any | None = ..., + column_keys: Any | None = ..., + for_executemany: bool = ..., + linting=..., + **kwargs, + ) -> None: ... + @property + def current_executable(self): ... + @property + def prefetch(self): ... + def is_subquery(self): ... + @property + def sql_compiler(self): ... + def construct_params(self, params: Any | None = ..., _group_number: Any | None = ..., _check: bool = ..., extracted_parameters: Any | None = ...): ... # type: ignore[override] + @property + def params(self): ... + def default_from(self): ... + def visit_grouping(self, grouping, asfrom: bool = ..., **kwargs): ... + def visit_select_statement_grouping(self, grouping, **kwargs): ... + def visit_label_reference(self, element, within_columns_clause: bool = ..., **kwargs): ... + def visit_textual_label_reference(self, element, within_columns_clause: bool = ..., **kwargs): ... + def visit_label( + self, + label, + add_to_result_map: Any | None = ..., + within_label_clause: bool = ..., + within_columns_clause: bool = ..., + render_label_as_label: Any | None = ..., + result_map_targets=..., + **kw, + ): ... + def visit_lambda_element(self, element, **kw): ... + def visit_column( + self, column, add_to_result_map: Any | None = ..., include_table: bool = ..., result_map_targets=..., **kwargs + ): ... + def visit_collation(self, element, **kw): ... + def visit_fromclause(self, fromclause, **kwargs): ... + def visit_index(self, index, **kwargs): ... + def visit_typeclause(self, typeclause, **kw): ... + def post_process_text(self, text): ... + def escape_literal_column(self, text): ... + def visit_textclause(self, textclause, add_to_result_map: Any | None = ..., **kw): ... + def visit_textual_select(self, taf, compound_index: Any | None = ..., asfrom: bool = ..., **kw): ... + def visit_null(self, expr, **kw): ... + def visit_true(self, expr, **kw): ... + def visit_false(self, expr, **kw): ... + def visit_tuple(self, clauselist, **kw): ... + def visit_clauselist(self, clauselist, **kw): ... + def visit_case(self, clause, **kwargs): ... + def visit_type_coerce(self, type_coerce, **kw): ... + def visit_cast(self, cast, **kwargs): ... + def visit_over(self, over, **kwargs): ... + def visit_withingroup(self, withingroup, **kwargs): ... + def visit_funcfilter(self, funcfilter, **kwargs): ... + def visit_extract(self, extract, **kwargs): ... + def visit_scalar_function_column(self, element, **kw): ... + def visit_function(self, func, add_to_result_map: Any | None = ..., **kwargs): ... + def visit_next_value_func(self, next_value, **kw): ... + def visit_sequence(self, sequence, **kw) -> None: ... + def function_argspec(self, func, **kwargs): ... + compile_state: Any + def visit_compound_select(self, cs, asfrom: bool = ..., compound_index: Any | None = ..., **kwargs): ... + def visit_unary(self, unary, add_to_result_map: Any | None = ..., result_map_targets=..., **kw): ... + def visit_is_true_unary_operator(self, element, operator, **kw): ... + def visit_is_false_unary_operator(self, element, operator, **kw): ... + def visit_not_match_op_binary(self, binary, operator, **kw): ... + def visit_not_in_op_binary(self, binary, operator, **kw): ... + def visit_empty_set_op_expr(self, type_, expand_op): ... + def visit_empty_set_expr(self, element_types) -> None: ... + def visit_binary( + self, + binary, + override_operator: Any | None = ..., + eager_grouping: bool = ..., + from_linter: Any | None = ..., + lateral_from_linter: Any | None = ..., + **kw, + ): ... + def visit_function_as_comparison_op_binary(self, element, operator, **kw): ... + def visit_mod_binary(self, binary, operator, **kw): ... + def visit_custom_op_binary(self, element, operator, **kw): ... + def visit_custom_op_unary_operator(self, element, operator, **kw): ... + def visit_custom_op_unary_modifier(self, element, operator, **kw): ... + def visit_contains_op_binary(self, binary, operator, **kw): ... + def visit_not_contains_op_binary(self, binary, operator, **kw): ... + def visit_startswith_op_binary(self, binary, operator, **kw): ... + def visit_not_startswith_op_binary(self, binary, operator, **kw): ... + def visit_endswith_op_binary(self, binary, operator, **kw): ... + def visit_not_endswith_op_binary(self, binary, operator, **kw): ... + def visit_like_op_binary(self, binary, operator, **kw): ... + def visit_not_like_op_binary(self, binary, operator, **kw): ... + def visit_ilike_op_binary(self, binary, operator, **kw): ... + def visit_not_ilike_op_binary(self, binary, operator, **kw): ... + def visit_between_op_binary(self, binary, operator, **kw): ... + def visit_not_between_op_binary(self, binary, operator, **kw): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw) -> None: ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw) -> None: ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw) -> None: ... + def visit_bindparam( + self, + bindparam, + within_columns_clause: bool = ..., + literal_binds: bool = ..., + skip_bind_expression: bool = ..., + literal_execute: bool = ..., + render_postcompile: bool = ..., + **kwargs, + ): ... + def render_literal_bindparam(self, bindparam, render_literal_value=..., **kw): ... + def render_literal_value(self, value, type_): ... + def bindparam_string( + self, + name, + positional_names: Any | None = ..., + post_compile: bool = ..., + expanding: bool = ..., + escaped_from: Any | None = ..., + **kw, + ): ... + execution_options: Any + ctes_recursive: bool + def visit_cte( + self, + cte, + asfrom: bool = ..., + ashint: bool = ..., + fromhints: Any | None = ..., + visiting_cte: Any | None = ..., + from_linter: Any | None = ..., + **kwargs, + ): ... + def visit_table_valued_alias(self, element, **kw): ... + def visit_table_valued_column(self, element, **kw): ... + def visit_alias( + self, + alias, + asfrom: bool = ..., + ashint: bool = ..., + iscrud: bool = ..., + fromhints: Any | None = ..., + subquery: bool = ..., + lateral: bool = ..., + enclosing_alias: Any | None = ..., + from_linter: Any | None = ..., + **kwargs, + ): ... + def visit_subquery(self, subquery, **kw): ... + def visit_lateral(self, lateral_, **kw): ... + def visit_tablesample(self, tablesample, asfrom: bool = ..., **kw): ... + def visit_values(self, element, asfrom: bool = ..., from_linter: Any | None = ..., **kw): ... + def get_render_as_alias_suffix(self, alias_name_text): ... + def format_from_hint_text(self, sqltext, table, hint, iscrud): ... + def get_select_hint_text(self, byfroms) -> None: ... + def get_from_hint_text(self, table, text) -> None: ... + def get_crud_hint_text(self, table, text) -> None: ... + def get_statement_hint_text(self, hint_texts): ... + translate_select_structure: Any + def visit_select( + self, + select_stmt, + asfrom: bool = ..., + insert_into: bool = ..., + fromhints: Any | None = ..., + compound_index: Any | None = ..., + select_wraps_for: Any | None = ..., + lateral: bool = ..., + from_linter: Any | None = ..., + **kwargs, + ): ... + def get_cte_preamble(self, recursive): ... + def get_select_precolumns(self, select, **kw): ... + def group_by_clause(self, select, **kw): ... + def order_by_clause(self, select, **kw): ... + def for_update_clause(self, select, **kw): ... + def returning_clause(self, stmt, returning_cols) -> None: ... + def limit_clause(self, select, **kw): ... + def fetch_clause(self, select, **kw): ... + def visit_table( + self, + table, + asfrom: bool = ..., + iscrud: bool = ..., + ashint: bool = ..., + fromhints: Any | None = ..., + use_schema: bool = ..., + from_linter: Any | None = ..., + **kwargs, + ): ... + def visit_join(self, join, asfrom: bool = ..., from_linter: Any | None = ..., **kwargs): ... + def visit_insert(self, insert_stmt, **kw): ... + def update_limit_clause(self, update_stmt) -> None: ... + def update_tables_clause(self, update_stmt, from_table, extra_froms, **kw): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def visit_update(self, update_stmt, **kw): ... + def delete_extra_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw) -> None: ... + def delete_table_clause(self, delete_stmt, from_table, extra_froms): ... + def visit_delete(self, delete_stmt, **kw): ... + def visit_savepoint(self, savepoint_stmt): ... + def visit_rollback_to_savepoint(self, savepoint_stmt): ... + def visit_release_savepoint(self, savepoint_stmt): ... + +class StrSQLCompiler(SQLCompiler): + def visit_unsupported_compilation(self, element, err, **kw): ... + def visit_getitem_binary(self, binary, operator, **kw): ... + def visit_json_getitem_op_binary(self, binary, operator, **kw): ... + def visit_json_path_getitem_op_binary(self, binary, operator, **kw): ... + def visit_sequence(self, seq, **kw): ... + def returning_clause(self, stmt, returning_cols): ... + def update_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def delete_extra_from_clause(self, update_stmt, from_table, extra_froms, from_hints, **kw): ... + def visit_empty_set_expr(self, type_): ... + def get_from_hint_text(self, table, text): ... + def visit_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_not_regexp_match_op_binary(self, binary, operator, **kw): ... + def visit_regexp_replace_op_binary(self, binary, operator, **kw): ... + +class DDLCompiler(Compiled): + @memoized_property + def sql_compiler(self): ... + @memoized_property + def type_compiler(self): ... + def construct_params(self, params: Any | None = ..., extracted_parameters: Any | None = ...) -> None: ... + def visit_ddl(self, ddl, **kwargs): ... + def visit_create_schema(self, create, **kw): ... + def visit_drop_schema(self, drop, **kw): ... + def visit_create_table(self, create, **kw): ... + def visit_create_column(self, create, first_pk: bool = ..., **kw): ... + def create_table_constraints(self, table, _include_foreign_key_constraints: Any | None = ..., **kw): ... + def visit_drop_table(self, drop, **kw): ... + def visit_drop_view(self, drop, **kw): ... + def visit_create_index(self, create, include_schema: bool = ..., include_table_schema: bool = ..., **kw): ... + def visit_drop_index(self, drop, **kw): ... + def visit_add_constraint(self, create, **kw): ... + def visit_set_table_comment(self, create, **kw): ... + def visit_drop_table_comment(self, drop, **kw): ... + def visit_set_column_comment(self, create, **kw): ... + def visit_drop_column_comment(self, drop, **kw): ... + def get_identity_options(self, identity_options): ... + def visit_create_sequence(self, create, prefix: Any | None = ..., **kw): ... + def visit_drop_sequence(self, drop, **kw): ... + def visit_drop_constraint(self, drop, **kw): ... + def get_column_specification(self, column, **kwargs): ... + def create_table_suffix(self, table): ... + def post_create_table(self, table): ... + def get_column_default_string(self, column): ... + def visit_table_or_column_check_constraint(self, constraint, **kw): ... + def visit_check_constraint(self, constraint, **kw): ... + def visit_column_check_constraint(self, constraint, **kw): ... + def visit_primary_key_constraint(self, constraint, **kw): ... + def visit_foreign_key_constraint(self, constraint, **kw): ... + def define_constraint_remote_table(self, constraint, table, preparer): ... + def visit_unique_constraint(self, constraint, **kw): ... + def define_constraint_cascades(self, constraint): ... + def define_constraint_deferrability(self, constraint): ... + def define_constraint_match(self, constraint): ... + def visit_computed_column(self, generated, **kw): ... + def visit_identity_column(self, identity, **kw): ... + +class GenericTypeCompiler(TypeCompiler): + def visit_FLOAT(self, type_, **kw): ... + def visit_REAL(self, type_, **kw): ... + def visit_NUMERIC(self, type_, **kw): ... + def visit_DECIMAL(self, type_, **kw): ... + def visit_INTEGER(self, type_, **kw): ... + def visit_SMALLINT(self, type_, **kw): ... + def visit_BIGINT(self, type_, **kw): ... + def visit_TIMESTAMP(self, type_, **kw): ... + def visit_DATETIME(self, type_, **kw): ... + def visit_DATE(self, type_, **kw): ... + def visit_TIME(self, type_, **kw): ... + def visit_CLOB(self, type_, **kw): ... + def visit_NCLOB(self, type_, **kw): ... + def visit_CHAR(self, type_, **kw): ... + def visit_NCHAR(self, type_, **kw): ... + def visit_VARCHAR(self, type_, **kw): ... + def visit_NVARCHAR(self, type_, **kw): ... + def visit_TEXT(self, type_, **kw): ... + def visit_BLOB(self, type_, **kw): ... + def visit_BINARY(self, type_, **kw): ... + def visit_VARBINARY(self, type_, **kw): ... + def visit_BOOLEAN(self, type_, **kw): ... + def visit_large_binary(self, type_, **kw): ... + def visit_boolean(self, type_, **kw): ... + def visit_time(self, type_, **kw): ... + def visit_datetime(self, type_, **kw): ... + def visit_date(self, type_, **kw): ... + def visit_big_integer(self, type_, **kw): ... + def visit_small_integer(self, type_, **kw): ... + def visit_integer(self, type_, **kw): ... + def visit_real(self, type_, **kw): ... + def visit_float(self, type_, **kw): ... + def visit_numeric(self, type_, **kw): ... + def visit_string(self, type_, **kw): ... + def visit_unicode(self, type_, **kw): ... + def visit_text(self, type_, **kw): ... + def visit_unicode_text(self, type_, **kw): ... + def visit_enum(self, type_, **kw): ... + def visit_null(self, type_, **kw) -> None: ... + def visit_type_decorator(self, type_, **kw): ... + def visit_user_defined(self, type_, **kw): ... + +class StrSQLTypeCompiler(GenericTypeCompiler): + def process(self, type_, **kw): ... + def __getattr__(self, key): ... + def visit_null(self, type_, **kw): ... + def visit_user_defined(self, type_, **kw): ... + +class IdentifierPreparer: + reserved_words: Any + legal_characters: Any + illegal_initial_characters: Any + schema_for_object: Any + dialect: Any + initial_quote: Any + final_quote: Any + escape_quote: Any + escape_to_quote: Any + omit_schema: Any + quote_case_sensitive_collations: Any + def __init__( + self, + dialect, + initial_quote: str = ..., + final_quote: Any | None = ..., + escape_quote: str = ..., + quote_case_sensitive_collations: bool = ..., + omit_schema: bool = ..., + ) -> None: ... + def validate_sql_phrase(self, element, reg): ... + def quote_identifier(self, value): ... + def quote_schema(self, schema, force: Any | None = ...): ... + def quote(self, ident, force: Any | None = ...): ... + def format_collation(self, collation_name): ... + def format_sequence(self, sequence, use_schema: bool = ...): ... + def format_label(self, label, name: Any | None = ...): ... + def format_alias(self, alias, name: Any | None = ...): ... + def format_savepoint(self, savepoint, name: Any | None = ...): ... + def format_constraint(self, constraint, _alembic_quote: bool = ...): ... + def truncate_and_render_index_name(self, name, _alembic_quote: bool = ...): ... + def truncate_and_render_constraint_name(self, name, _alembic_quote: bool = ...): ... + def format_index(self, index): ... + def format_table(self, table, use_schema: bool = ..., name: Any | None = ...): ... + def format_schema(self, name): ... + def format_label_name(self, name, anon_map: Any | None = ...): ... + def format_column( + self, + column, + use_table: bool = ..., + name: Any | None = ..., + table_name: Any | None = ..., + use_schema: bool = ..., + anon_map: Any | None = ..., + ): ... + def format_table_seq(self, table, use_schema: bool = ...): ... + def unformat_identifiers(self, identifiers): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi new file mode 100644 index 000000000000..a39d81741c23 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/crud.pyi @@ -0,0 +1,15 @@ +from typing import Any + +from . import elements + +REQUIRED: Any + +class _multiparam_column(elements.ColumnElement[Any]): + index: Any + key: Any + original: Any + default: Any + type: Any + def __init__(self, original, index) -> None: ... + def compare(self, other, **kw) -> None: ... + def __eq__(self, other): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi new file mode 100644 index 000000000000..4dba7ea2bb74 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/ddl.pyi @@ -0,0 +1,139 @@ +from typing import Any + +from . import roles +from .base import Executable, SchemaVisitor +from .elements import ClauseElement + +class _DDLCompiles(ClauseElement): ... + +class DDLElement(roles.DDLRole, Executable, _DDLCompiles): + target: Any + on: Any + dialect: Any + callable_: Any + def execute(self, bind: Any | None = ..., target: Any | None = ...): ... # type: ignore[override] + def against(self, target) -> None: ... + state: Any + def execute_if(self, dialect: Any | None = ..., callable_: Any | None = ..., state: Any | None = ...) -> None: ... + def __call__(self, target, bind, **kw): ... + bind: Any + +class DDL(DDLElement): + __visit_name__: str + statement: Any + context: Any + def __init__(self, statement, context: Any | None = ..., bind: Any | None = ...) -> None: ... + +class _CreateDropBase(DDLElement): + element: Any + bind: Any + if_exists: Any + if_not_exists: Any + def __init__( + self, element, bind: Any | None = ..., if_exists: bool = ..., if_not_exists: bool = ..., _legacy_bind: Any | None = ... + ) -> None: ... + @property + def stringify_dialect(self): ... + +class CreateSchema(_CreateDropBase): + __visit_name__: str + quote: Any + def __init__(self, name, quote: Any | None = ..., **kw) -> None: ... + +class DropSchema(_CreateDropBase): + __visit_name__: str + quote: Any + cascade: Any + def __init__(self, name, quote: Any | None = ..., cascade: bool = ..., **kw) -> None: ... + +class CreateTable(_CreateDropBase): + __visit_name__: str + columns: Any + include_foreign_key_constraints: Any + def __init__( + self, element, bind: Any | None = ..., include_foreign_key_constraints: Any | None = ..., if_not_exists: bool = ... + ) -> None: ... + +class _DropView(_CreateDropBase): + __visit_name__: str + +class CreateColumn(_DDLCompiles): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class DropTable(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Any | None = ..., if_exists: bool = ...) -> None: ... + +class CreateSequence(_CreateDropBase): + __visit_name__: str + +class DropSequence(_CreateDropBase): + __visit_name__: str + +class CreateIndex(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Any | None = ..., if_not_exists: bool = ...) -> None: ... + +class DropIndex(_CreateDropBase): + __visit_name__: str + def __init__(self, element, bind: Any | None = ..., if_exists: bool = ...) -> None: ... + +class AddConstraint(_CreateDropBase): + __visit_name__: str + def __init__(self, element, *args, **kw) -> None: ... + +class DropConstraint(_CreateDropBase): + __visit_name__: str + cascade: Any + def __init__(self, element, cascade: bool = ..., **kw) -> None: ... + +class SetTableComment(_CreateDropBase): + __visit_name__: str + +class DropTableComment(_CreateDropBase): + __visit_name__: str + +class SetColumnComment(_CreateDropBase): + __visit_name__: str + +class DropColumnComment(_CreateDropBase): + __visit_name__: str + +class DDLBase(SchemaVisitor): + connection: Any + def __init__(self, connection) -> None: ... + +class SchemaGenerator(DDLBase): + checkfirst: Any + tables: Any + preparer: Any + dialect: Any + memo: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., tables: Any | None = ..., **kwargs) -> None: ... + def visit_metadata(self, metadata) -> None: ... + def visit_table( + self, table, create_ok: bool = ..., include_foreign_key_constraints: Any | None = ..., _is_metadata_operation: bool = ... + ) -> None: ... + def visit_foreign_key_constraint(self, constraint) -> None: ... + def visit_sequence(self, sequence, create_ok: bool = ...) -> None: ... + def visit_index(self, index, create_ok: bool = ...) -> None: ... + +class SchemaDropper(DDLBase): + checkfirst: Any + tables: Any + preparer: Any + dialect: Any + memo: Any + def __init__(self, dialect, connection, checkfirst: bool = ..., tables: Any | None = ..., **kwargs) -> None: ... + def visit_metadata(self, metadata): ... + def visit_index(self, index, drop_ok: bool = ...) -> None: ... + def visit_table(self, table, drop_ok: bool = ..., _is_metadata_operation: bool = ..., _ignore_sequences=...) -> None: ... + def visit_foreign_key_constraint(self, constraint) -> None: ... + def visit_sequence(self, sequence, drop_ok: bool = ...) -> None: ... + +def sort_tables(tables, skip_fn: Any | None = ..., extra_dependencies: Any | None = ...): ... +def sort_tables_and_constraints( + tables, filter_fn: Any | None = ..., extra_dependencies: Any | None = ..., _warn_for_cycles: bool = ... +): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi new file mode 100644 index 000000000000..ac514fdf0146 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/default_comparator.pyi @@ -0,0 +1,3 @@ +from typing import Any + +operator_lookup: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi new file mode 100644 index 000000000000..ee31b23435b0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/dml.pyi @@ -0,0 +1,111 @@ +from typing import Any + +from . import roles +from .base import CompileState, DialectKWArgs, Executable, HasCompileState +from .elements import ClauseElement +from .selectable import HasCTE, HasPrefixes, ReturnsRows + +class DMLState(CompileState): + isupdate: bool + isdelete: bool + isinsert: bool + def __init__(self, statement, compiler, **kw) -> None: ... + @property + def dml_table(self): ... + +class InsertDMLState(DMLState): + isinsert: bool + include_table_with_column_exprs: bool + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class UpdateDMLState(DMLState): + isupdate: bool + include_table_with_column_exprs: bool + statement: Any + is_multitable: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class DeleteDMLState(DMLState): + isdelete: bool + statement: Any + def __init__(self, statement, compiler, **kw) -> None: ... + +class UpdateBase(roles.DMLRole, HasCTE, HasCompileState, DialectKWArgs, HasPrefixes, ReturnsRows, Executable, ClauseElement): + __visit_name__: str + named_with_column: bool + is_dml: bool + def params(self, *arg, **kw) -> None: ... + def with_dialect_options(self, **opt) -> None: ... + bind: Any + def returning(self, *cols) -> None: ... + @property + def exported_columns(self): ... + def with_hint(self, text, selectable: Any | None = ..., dialect_name: str = ...) -> None: ... + +class ValuesBase(UpdateBase): + __visit_name__: str + select: Any + table: Any + def __init__(self, table, values, prefixes) -> None: ... + def values(self, *args, **kwargs) -> None: ... + def return_defaults(self, *cols) -> None: ... + +class Insert(ValuesBase): + __visit_name__: str + select: Any + include_insert_from_select_defaults: bool + is_insert: bool + def __init__( + self, + table, + values: Any | None = ..., + inline: bool = ..., + bind: Any | None = ..., + prefixes: Any | None = ..., + returning: Any | None = ..., + return_defaults: bool = ..., + **dialect_kw, + ) -> None: ... + def inline(self) -> None: ... + def from_select(self, names, select, include_defaults: bool = ...) -> None: ... + +class DMLWhereBase: + def where(self, *whereclause) -> None: ... + def filter(self, *criteria): ... + def filter_by(self, **kwargs): ... + @property + def whereclause(self): ... + +class Update(DMLWhereBase, ValuesBase): + __visit_name__: str + is_update: bool + def __init__( + self, + table, + whereclause: Any | None = ..., + values: Any | None = ..., + inline: bool = ..., + bind: Any | None = ..., + prefixes: Any | None = ..., + returning: Any | None = ..., + return_defaults: bool = ..., + preserve_parameter_order: bool = ..., + **dialect_kw, + ) -> None: ... + def ordered_values(self, *args) -> None: ... + def inline(self) -> None: ... + +class Delete(DMLWhereBase, UpdateBase): + __visit_name__: str + is_delete: bool + table: Any + def __init__( + self, + table, + whereclause: Any | None = ..., + bind: Any | None = ..., + returning: Any | None = ..., + prefixes: Any | None = ..., + **dialect_kw, + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi new file mode 100644 index 000000000000..4d3f8544226c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/elements.pyi @@ -0,0 +1,456 @@ +from typing import Any, Generic, TypeVar +from typing_extensions import Literal + +from .. import util +from ..util import HasMemoized, memoized_property +from . import operators, roles +from .annotation import Annotated, SupportsWrappingAnnotations +from .base import Executable, Immutable, SingletonConstant +from .traversals import HasCopyInternals, MemoizedHasCacheKey +from .visitors import Traversible + +_T = TypeVar("_T") + +def collate(expression, collation): ... +def between(expr, lower_bound, upper_bound, symmetric: bool = ...): ... +def literal(value, type_: Any | None = ...): ... +def outparam(key, type_: Any | None = ...): ... +def not_(clause): ... + +class ClauseElement(roles.SQLRole, SupportsWrappingAnnotations, MemoizedHasCacheKey, HasCopyInternals, Traversible): + __visit_name__: str + supports_execution: bool + stringify_dialect: str + bind: Any + description: Any + is_clause_element: bool + is_selectable: bool + @property + def entity_namespace(self) -> None: ... + def unique_params(self, *optionaldict, **kwargs): ... + def params(self, *optionaldict, **kwargs): ... + def compare(self, other, **kw): ... + def self_group(self, against: Any | None = ...): ... + def compile(self, bind: Any | None = ..., dialect: Any | None = ..., **kw): ... + def __invert__(self): ... + def __bool__(self) -> None: ... + __nonzero__: Any + +class ColumnElement( + roles.ColumnArgumentOrKeyRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.BinaryElementRole, + roles.OrderByRole, + roles.ColumnsClauseRole, + roles.LimitOffsetRole, + roles.DMLColumnRole, + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + operators.ColumnOperators[_T], + ClauseElement, + Generic[_T], +): + __visit_name__: str + primary_key: bool + foreign_keys: Any + key: Any + def self_group(self, against: Any | None = ...): ... + @memoized_property + def type(self): ... + @HasMemoized.memoized_attribute + def comparator(self): ... + def __getattr__(self, key): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + @property + def expression(self): ... + @memoized_property + def base_columns(self): ... + @memoized_property + def proxy_set(self): ... + def shares_lineage(self, othercolumn): ... + def cast(self, type_): ... + def label(self, name): ... + @property + def anon_label(self): ... + @property + def anon_key_label(self): ... + +class WrapsColumnExpression: + @property + def wrapped_column_expression(self) -> None: ... + +class BindParameter(roles.InElementRole, ColumnElement[_T], Generic[_T]): + __visit_name__: str + inherit_cache: bool + key: Any + unique: Any + value: Any + callable: Any + isoutparam: Any + required: Any + expanding: Any + expand_op: Any + literal_execute: Any + type: Any + def __init__( + self, + key, + value=..., + type_: Any | None = ..., + unique: bool = ..., + required=..., + quote: Any | None = ..., + callable_: Any | None = ..., + expanding: bool = ..., + isoutparam: bool = ..., + literal_execute: bool = ..., + _compared_to_operator: Any | None = ..., + _compared_to_type: Any | None = ..., + _is_crud: bool = ..., + ) -> None: ... + @property + def effective_value(self): ... + def render_literal_execute(self): ... + +class TypeClause(ClauseElement): + __visit_name__: str + type: Any + def __init__(self, type_) -> None: ... + +class TextClause( + roles.DDLConstraintColumnRole, + roles.DDLExpressionRole, + roles.StatementOptionRole, + roles.WhereHavingRole, + roles.OrderByRole, + roles.FromClauseRole, + roles.SelectStatementRole, + roles.BinaryElementRole, + roles.InElementRole, + Executable, + ClauseElement, +): + __visit_name__: str + def __and__(self, other): ... + key: Any + text: Any + def __init__(self, text, bind: Any | None = ...): ... + def bindparams(self, *binds, **names_to_values) -> None: ... + def columns(self, *cols, **types): ... + @property + def type(self): ... + @property + def comparator(self): ... + def self_group(self, against: Any | None = ...): ... + +class Null(SingletonConstant, roles.ConstExprRole, ColumnElement[None]): + __visit_name__: str + @memoized_property + def type(self): ... + +class False_(SingletonConstant, roles.ConstExprRole, ColumnElement[Literal[False]]): + __visit_name__: str + @memoized_property + def type(self): ... + +class True_(SingletonConstant, roles.ConstExprRole, ColumnElement[Literal[True]]): + __visit_name__: str + @memoized_property + def type(self): ... + +class ClauseList(roles.InElementRole, roles.OrderByRole, roles.ColumnsClauseRole, roles.DMLColumnRole, ClauseElement): + __visit_name__: str + operator: Any + group: Any + group_contents: Any + clauses: Any + def __init__(self, *clauses, **kwargs) -> None: ... + def __iter__(self): ... + def __len__(self): ... + def append(self, clause) -> None: ... + def self_group(self, against: Any | None = ...): ... + +class BooleanClauseList(ClauseList, ColumnElement[Any]): + __visit_name__: str + inherit_cache: bool + def __init__(self, *arg, **kw) -> None: ... + @classmethod + def and_(cls, *clauses): ... + @classmethod + def or_(cls, *clauses): ... + def self_group(self, against: Any | None = ...): ... + +and_: Any +or_: Any + +class Tuple(ClauseList, ColumnElement[Any]): + __visit_name__: str + type: Any + def __init__(self, *clauses, **kw) -> None: ... + def self_group(self, against: Any | None = ...): ... + +class Case(ColumnElement[Any]): + __visit_name__: str + value: Any + type: Any + whens: Any + else_: Any + def __init__(self, *whens, **kw) -> None: ... + +def literal_column(text, type_: Any | None = ...): ... + +class Cast(WrapsColumnExpression, ColumnElement[Any]): + __visit_name__: str + type: Any + clause: Any + typeclause: Any + def __init__(self, expression, type_) -> None: ... + @property + def wrapped_column_expression(self): ... + +class TypeCoerce(WrapsColumnExpression, ColumnElement[Any]): + __visit_name__: str + type: Any + clause: Any + def __init__(self, expression, type_) -> None: ... + @HasMemoized.memoized_attribute + def typed_expression(self): ... + @property + def wrapped_column_expression(self): ... + def self_group(self, against: Any | None = ...): ... + +class Extract(ColumnElement[Any]): + __visit_name__: str + type: Any + field: Any + expr: Any + def __init__(self, field, expr, **kwargs) -> None: ... + +class _label_reference(ColumnElement[Any]): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class _textual_label_reference(ColumnElement[Any]): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + +class UnaryExpression(ColumnElement[Any]): + __visit_name__: str + operator: Any + modifier: Any + element: Any + type: Any + wraps_column_expression: Any + def __init__( + self, + element, + operator: Any | None = ..., + modifier: Any | None = ..., + type_: Any | None = ..., + wraps_column_expression: bool = ..., + ) -> None: ... + def self_group(self, against: Any | None = ...): ... + +class CollectionAggregate(UnaryExpression): + inherit_cache: bool + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs) -> None: ... + +class AsBoolean(WrapsColumnExpression, UnaryExpression): + inherit_cache: bool + element: Any + type: Any + operator: Any + negate: Any + modifier: Any + wraps_column_expression: bool + def __init__(self, element, operator, negate) -> None: ... + @property + def wrapped_column_expression(self): ... + def self_group(self, against: Any | None = ...): ... + +class BinaryExpression(ColumnElement[Any]): + __visit_name__: str + left: Any + right: Any + operator: Any + type: Any + negate: Any + modifiers: Any + def __init__( + self, left, right, operator, type_: Any | None = ..., negate: Any | None = ..., modifiers: Any | None = ... + ) -> None: ... + def __bool__(self): ... + __nonzero__: Any + @property + def is_comparison(self): ... + def self_group(self, against: Any | None = ...): ... + +class Slice(ColumnElement[Any]): + __visit_name__: str + start: Any + stop: Any + step: Any + type: Any + def __init__(self, start, stop, step, _name: Any | None = ...) -> None: ... + def self_group(self, against: Any | None = ...): ... + +class IndexExpression(BinaryExpression): + inherit_cache: bool + +class GroupedElement(ClauseElement): + __visit_name__: str + def self_group(self, against: Any | None = ...): ... + +class Grouping(GroupedElement, ColumnElement[Any]): + element: Any + type: Any + def __init__(self, element) -> None: ... + def __getattr__(self, attr): ... + +RANGE_UNBOUNDED: Any +RANGE_CURRENT: Any + +class Over(ColumnElement[Any]): + __visit_name__: str + order_by: Any + partition_by: Any + element: Any + range_: Any + rows: Any + def __init__( + self, + element, + partition_by: Any | None = ..., + order_by: Any | None = ..., + range_: Any | None = ..., + rows: Any | None = ..., + ) -> None: ... + def __reduce__(self): ... + @memoized_property + def type(self): ... + +class WithinGroup(ColumnElement[Any]): + __visit_name__: str + order_by: Any + element: Any + def __init__(self, element, *order_by) -> None: ... + def __reduce__(self): ... + def over( + self, partition_by: Any | None = ..., order_by: Any | None = ..., range_: Any | None = ..., rows: Any | None = ... + ): ... + @memoized_property + def type(self): ... + +class FunctionFilter(ColumnElement[Any]): + __visit_name__: str + criterion: Any + func: Any + def __init__(self, func, *criterion) -> None: ... + def filter(self, *criterion): ... + def over( + self, partition_by: Any | None = ..., order_by: Any | None = ..., range_: Any | None = ..., rows: Any | None = ... + ): ... + def self_group(self, against: Any | None = ...): ... + @memoized_property + def type(self): ... + +class Label(roles.LabeledColumnExprRole, ColumnElement[Any]): + __visit_name__: str + name: Any + key: Any + def __init__(self, name, element, type_: Any | None = ...) -> None: ... + def __reduce__(self): ... + @memoized_property + def type(self): ... + @HasMemoized.memoized_attribute + def element(self): ... + def self_group(self, against: Any | None = ...): ... + @property + def primary_key(self): ... + @property + def foreign_keys(self): ... + +class NamedColumn(ColumnElement[Any]): + is_literal: bool + table: Any + @memoized_property + def description(self): ... + +class ColumnClause(roles.DDLReferredColumnRole, roles.LabeledColumnExprRole, roles.StrAsPlainColumnRole, Immutable, NamedColumn): + table: Any + is_literal: bool + __visit_name__: str + onupdate: Any + default: Any + server_default: Any + server_onupdate: Any + key: Any + type: Any + def __init__(self, text, type_: Any | None = ..., is_literal: bool = ..., _selectable: Any | None = ...) -> None: ... + def get_children(self, column_tables: bool = ..., **kw): ... # type: ignore[override] + @property + def entity_namespace(self): ... + +class TableValuedColumn(NamedColumn): + __visit_name__: str + scalar_alias: Any + key: Any + type: Any + def __init__(self, scalar_alias, type_) -> None: ... + +class CollationClause(ColumnElement[Any]): + __visit_name__: str + collation: Any + def __init__(self, collation) -> None: ... + +class _IdentifiedClause(Executable, ClauseElement): + __visit_name__: str + ident: Any + def __init__(self, ident) -> None: ... + +class SavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class RollbackToSavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class ReleaseSavepointClause(_IdentifiedClause): + __visit_name__: str + inherit_cache: bool + +class quoted_name(util.MemoizedSlots, util.text_type): + quote: Any + def __new__(cls, value, quote): ... + def __reduce__(self): ... + +class AnnotatedColumnElement(Annotated): + def __init__(self, element, values) -> None: ... + @memoized_property + def name(self): ... + @memoized_property + def table(self): ... + @memoized_property + def key(self): ... + @memoized_property + def info(self): ... + +class _truncated_label(quoted_name): + def __new__(cls, value, quote: Any | None = ...): ... + def __reduce__(self): ... + def apply_map(self, map_): ... + +class conv(_truncated_label): ... + +class _anonymous_label(_truncated_label): + @classmethod + def safe_construct(cls, seed, body, enclosing_label: Any | None = ..., sanitize_key: bool = ...): ... + def __add__(self, other): ... + def __radd__(self, other): ... + def apply_map(self, map_): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi new file mode 100644 index 000000000000..11765c6af968 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/events.pyi @@ -0,0 +1,10 @@ +from .. import event + +class DDLEvents(event.Events): + def before_create(self, target, connection, **kw) -> None: ... + def after_create(self, target, connection, **kw) -> None: ... + def before_drop(self, target, connection, **kw) -> None: ... + def after_drop(self, target, connection, **kw) -> None: ... + def before_parent_attach(self, target, parent) -> None: ... + def after_parent_attach(self, target, parent) -> None: ... + def column_reflect(self, inspector, table, column_info) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi new file mode 100644 index 000000000000..79df689cef91 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/expression.pyi @@ -0,0 +1,201 @@ +from typing import Any + +from .base import PARSE_AUTOCOMMIT as PARSE_AUTOCOMMIT, ColumnCollection as ColumnCollection, Executable as Executable +from .dml import Delete as Delete, Insert as Insert, Update as Update, UpdateBase as UpdateBase, ValuesBase as ValuesBase +from .elements import ( + BinaryExpression as BinaryExpression, + BindParameter as BindParameter, + BooleanClauseList as BooleanClauseList, + Case as Case, + Cast as Cast, + ClauseElement as ClauseElement, + ClauseList as ClauseList, + CollectionAggregate as CollectionAggregate, + ColumnClause as ColumnClause, + ColumnElement as ColumnElement, + Extract as Extract, + False_ as False_, + FunctionFilter as FunctionFilter, + Grouping as Grouping, + Label as Label, + Null as Null, + Over as Over, + ReleaseSavepointClause as ReleaseSavepointClause, + RollbackToSavepointClause as RollbackToSavepointClause, + SavepointClause as SavepointClause, + TextClause as TextClause, + True_ as True_, + Tuple as Tuple, + TypeClause as TypeClause, + TypeCoerce as TypeCoerce, + UnaryExpression as UnaryExpression, + WithinGroup as WithinGroup, + _truncated_label as _truncated_label, + between as between, + collate as collate, + literal as literal, + literal_column as literal_column, + not_ as not_, + outparam as outparam, + quoted_name as quoted_name, +) +from .functions import Function as Function, FunctionElement as FunctionElement, func as func, modifier as modifier +from .lambdas import LambdaElement as LambdaElement, StatementLambdaElement as StatementLambdaElement, lambda_stmt as lambda_stmt +from .operators import ColumnOperators as ColumnOperators, Operators as Operators, custom_op as custom_op +from .selectable import ( + CTE as CTE, + LABEL_STYLE_DEFAULT as LABEL_STYLE_DEFAULT, + LABEL_STYLE_DISAMBIGUATE_ONLY as LABEL_STYLE_DISAMBIGUATE_ONLY, + LABEL_STYLE_NONE as LABEL_STYLE_NONE, + LABEL_STYLE_TABLENAME_PLUS_COL as LABEL_STYLE_TABLENAME_PLUS_COL, + Alias as Alias, + AliasedReturnsRows as AliasedReturnsRows, + CompoundSelect as CompoundSelect, + Exists as Exists, + FromClause as FromClause, + FromGrouping as FromGrouping, + GenerativeSelect as GenerativeSelect, + HasCTE as HasCTE, + HasPrefixes as HasPrefixes, + HasSuffixes as HasSuffixes, + Join as Join, + Lateral as Lateral, + ReturnsRows as ReturnsRows, + ScalarSelect as ScalarSelect, + Select as Select, + Selectable as Selectable, + SelectBase as SelectBase, + Subquery as Subquery, + TableClause as TableClause, + TableSample as TableSample, + TableValuedAlias as TableValuedAlias, + TextAsFrom as TextAsFrom, + TextualSelect as TextualSelect, + Values as Values, + subquery as subquery, +) +from .traversals import CacheKey as CacheKey +from .visitors import Visitable as Visitable + +__all__ = [ + "Alias", + "AliasedReturnsRows", + "any_", + "all_", + "CacheKey", + "ClauseElement", + "ColumnCollection", + "ColumnElement", + "CompoundSelect", + "Delete", + "FromClause", + "Insert", + "Join", + "Lateral", + "LambdaElement", + "StatementLambdaElement", + "Select", + "Selectable", + "TableClause", + "TableValuedAlias", + "Update", + "Values", + "alias", + "and_", + "asc", + "between", + "bindparam", + "case", + "cast", + "column", + "custom_op", + "cte", + "delete", + "desc", + "distinct", + "except_", + "except_all", + "exists", + "extract", + "func", + "modifier", + "collate", + "insert", + "intersect", + "intersect_all", + "join", + "label", + "lateral", + "lambda_stmt", + "literal", + "literal_column", + "not_", + "null", + "nulls_first", + "nulls_last", + "or_", + "outparam", + "outerjoin", + "over", + "select", + "table", + "text", + "tuple_", + "type_coerce", + "quoted_name", + "union", + "union_all", + "update", + "quoted_name", + "within_group", + "Subquery", + "TableSample", + "tablesample", + "values", +] + +all_: Any +any_: Any +and_: Any +alias: Any +tablesample: Any +lateral: Any +or_: Any +bindparam: Any +select: Any +text: Any +table: Any +column: Any +over: Any +within_group: Any +label: Any +case: Any +cast: Any +cte: Any +values: Any +extract: Any +tuple_: Any +except_: Any +except_all: Any +intersect: Any +intersect_all: Any +union: Any +union_all: Any +exists: Any +nulls_first: Any +nullsfirst: Any +nulls_last: Any +nullslast: Any +asc: Any +desc: Any +distinct: Any +type_coerce: Any +true: Any +false: Any +null: Any +join: Any +outerjoin: Any +insert: Any +update: Any +delete: Any +funcfilter: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi new file mode 100644 index 000000000000..873b414b49c4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/functions.pyi @@ -0,0 +1,223 @@ +from typing import Any + +from ..util import HasMemoized +from .base import Executable, Generative +from .elements import BinaryExpression, ColumnElement, NamedColumn +from .selectable import FromClause +from .visitors import TraversibleType + +def register_function(identifier, fn, package: str = ...) -> None: ... + +class FunctionElement(Executable, ColumnElement[Any], FromClause, Generative): # type: ignore[misc] + packagenames: Any + clause_expr: Any + def __init__(self, *clauses, **kwargs) -> None: ... + def scalar_table_valued(self, name, type_: Any | None = ...): ... + def table_valued(self, *expr, **kw): ... + def column_valued(self, name: Any | None = ...): ... + @property + def columns(self): ... + @property + def exported_columns(self): ... + @HasMemoized.memoized_attribute + def clauses(self): ... + def over( + self, partition_by: Any | None = ..., order_by: Any | None = ..., rows: Any | None = ..., range_: Any | None = ... + ): ... + def within_group(self, *order_by): ... + def filter(self, *criterion): ... + def as_comparison(self, left_index, right_index): ... + def within_group_type(self, within_group) -> None: ... + def alias(self, name: Any | None = ...): ... # type: ignore[override] + def select(self): ... + def scalar(self): ... + def execute(self): ... + def self_group(self, against: Any | None = ...): ... + @property + def entity_namespace(self): ... + +class FunctionAsBinary(BinaryExpression): + sql_function: Any + left_index: Any + right_index: Any + operator: Any + type: Any + negate: Any + modifiers: Any + def __init__(self, fn, left_index, right_index) -> None: ... + @property + def left(self): ... + @left.setter + def left(self, value) -> None: ... + @property + def right(self): ... + @right.setter + def right(self, value) -> None: ... + +class ScalarFunctionColumn(NamedColumn): + __visit_name__: str + is_literal: bool + table: Any + fn: Any + name: Any + type: Any + def __init__(self, fn, name, type_: Any | None = ...) -> None: ... + +class _FunctionGenerator: + opts: Any + def __init__(self, **opts) -> None: ... + def __getattr__(self, name): ... + def __call__(self, *c, **kwargs): ... + +func: Any +modifier: Any + +class Function(FunctionElement): + __visit_name__: str + type: Any + packagenames: Any + name: Any + def __init__(self, name, *clauses, **kw) -> None: ... + +class _GenericMeta(TraversibleType): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class GenericFunction: + name: Any + identifier: Any + coerce_arguments: bool + inherit_cache: bool + packagenames: Any + clause_expr: Any + type: Any + def __init__(self, *args, **kwargs) -> None: ... + +class next_value(GenericFunction): + type: Any + name: str + sequence: Any + def __init__(self, seq, **kw) -> None: ... + def compare(self, other, **kw): ... + +class AnsiFunction(GenericFunction): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class ReturnTypeFromArgs(GenericFunction): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class coalesce(ReturnTypeFromArgs): + inherit_cache: bool + +class max(ReturnTypeFromArgs): + inherit_cache: bool + +class min(ReturnTypeFromArgs): + inherit_cache: bool + +class sum(ReturnTypeFromArgs): + inherit_cache: bool + +class now(GenericFunction): + type: Any + inherit_cache: bool + +class concat(GenericFunction): + type: Any + inherit_cache: bool + +class char_length(GenericFunction): + type: Any + inherit_cache: bool + def __init__(self, arg, **kwargs) -> None: ... + +class random(GenericFunction): + inherit_cache: bool + +class count(GenericFunction): + type: Any + inherit_cache: bool + def __init__(self, expression: Any | None = ..., **kwargs) -> None: ... + +class current_date(AnsiFunction): + type: Any + inherit_cache: bool + +class current_time(AnsiFunction): + type: Any + inherit_cache: bool + +class current_timestamp(AnsiFunction): + type: Any + inherit_cache: bool + +class current_user(AnsiFunction): + type: Any + inherit_cache: bool + +class localtime(AnsiFunction): + type: Any + inherit_cache: bool + +class localtimestamp(AnsiFunction): + type: Any + inherit_cache: bool + +class session_user(AnsiFunction): + type: Any + inherit_cache: bool + +class sysdate(AnsiFunction): + type: Any + inherit_cache: bool + +class user(AnsiFunction): + type: Any + inherit_cache: bool + +class array_agg(GenericFunction): + type: Any + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + +class OrderedSetAgg(GenericFunction): + array_for_multi_clause: bool + inherit_cache: bool + def within_group_type(self, within_group): ... + +class mode(OrderedSetAgg): + inherit_cache: bool + +class percentile_cont(OrderedSetAgg): + array_for_multi_clause: bool + inherit_cache: bool + +class percentile_disc(OrderedSetAgg): + array_for_multi_clause: bool + inherit_cache: bool + +class rank(GenericFunction): + type: Any + inherit_cache: bool + +class dense_rank(GenericFunction): + type: Any + inherit_cache: bool + +class percent_rank(GenericFunction): + type: Any + inherit_cache: bool + +class cume_dist(GenericFunction): + type: Any + inherit_cache: bool + +class cube(GenericFunction): + inherit_cache: bool + +class rollup(GenericFunction): + inherit_cache: bool + +class grouping_sets(GenericFunction): + inherit_cache: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi new file mode 100644 index 000000000000..525111d0d071 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/lambdas.pyi @@ -0,0 +1,113 @@ +from typing import Any + +from . import elements, roles +from .base import Options +from .operators import ColumnOperators + +class LambdaOptions(Options): + enable_tracking: bool + track_closure_variables: bool + track_on: Any + global_track_bound_values: bool + track_bound_values: bool + lambda_cache: Any + +def lambda_stmt( + lmb, + enable_tracking: bool = ..., + track_closure_variables: bool = ..., + track_on: Any | None = ..., + global_track_bound_values: bool = ..., + track_bound_values: bool = ..., + lambda_cache: Any | None = ..., +): ... + +class LambdaElement(elements.ClauseElement): + __visit_name__: str + parent_lambda: Any + fn: Any + role: Any + tracker_key: Any + opts: Any + def __init__(self, fn, role, opts=..., apply_propagate_attrs: Any | None = ...) -> None: ... + def __getattr__(self, key): ... + +class DeferredLambdaElement(LambdaElement): + lambda_args: Any + def __init__(self, fn, role, opts=..., lambda_args=...) -> None: ... + +class StatementLambdaElement(roles.AllowsLambdaRole, LambdaElement): + def __add__(self, other): ... + def add_criteria( + self, + other, + enable_tracking: bool = ..., + track_on: Any | None = ..., + track_closure_variables: bool = ..., + track_bound_values: bool = ..., + ): ... + def spoil(self): ... + +class NullLambdaStatement(roles.AllowsLambdaRole, elements.ClauseElement): + __visit_name__: str + def __init__(self, statement) -> None: ... + def __getattr__(self, key): ... + def __add__(self, other): ... + def add_criteria(self, other, **kw): ... + +class LinkedLambdaElement(StatementLambdaElement): + role: Any + opts: Any + fn: Any + parent_lambda: Any + tracker_key: Any + def __init__(self, fn, parent_lambda, opts) -> None: ... + +class AnalyzedCode: + @classmethod + def get(cls, fn, lambda_element, lambda_kw, **kw): ... + track_bound_values: Any + track_closure_variables: Any + bindparam_trackers: Any + closure_trackers: Any + build_py_wrappers: Any + def __init__(self, fn, lambda_element, opts) -> None: ... + +class NonAnalyzedFunction: + closure_bindparams: Any + bindparam_trackers: Any + expr: Any + def __init__(self, expr) -> None: ... + @property + def expected_expr(self): ... + +class AnalyzedFunction: + analyzed_code: Any + fn: Any + closure_pywrappers: Any + tracker_instrumented_fn: Any + expr: Any + bindparam_trackers: Any + expected_expr: Any + is_sequence: Any + propagate_attrs: Any + closure_bindparams: Any + def __init__(self, analyzed_code, lambda_element, apply_propagate_attrs, fn) -> None: ... + +class PyWrapper(ColumnOperators[Any]): + fn: Any + track_bound_values: Any + def __init__( + self, fn, name, to_evaluate, closure_index: Any | None = ..., getter: Any | None = ..., track_bound_values: bool = ... + ) -> None: ... + def __call__(self, *arg, **kw): ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def __clause_element__(self): ... + def __bool__(self): ... + def __nonzero__(self): ... + def __getattribute__(self, key): ... + def __iter__(self): ... + def __getitem__(self, key): ... + +def insp(lmb): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi new file mode 100644 index 000000000000..50bdacf72a28 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/naming.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from .elements import conv as conv + +class ConventionDict: + const: Any + table: Any + convention: Any + def __init__(self, const, table, convention) -> None: ... + def __getitem__(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi new file mode 100644 index 000000000000..4e1042a21694 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/operators.pyi @@ -0,0 +1,193 @@ +from collections.abc import Container, Iterable +from operator import truediv +from typing import Any, Generic, TypeVar + +_T = TypeVar("_T") + +div = truediv + +class Operators: + def __and__(self, other): ... + def __or__(self, other): ... + def __invert__(self): ... + def op(self, opstring, precedence: int = ..., is_comparison: bool = ..., return_type: Any | None = ...): ... + def bool_op(self, opstring, precedence: int = ...): ... + def operate(self, op, *other, **kwargs) -> None: ... + def reverse_operate(self, op, other, **kwargs) -> None: ... + +class custom_op: + __name__: str + opstring: Any + precedence: Any + is_comparison: Any + natural_self_precedent: Any + eager_grouping: Any + return_type: Any + def __init__( + self, + opstring, + precedence: int = ..., + is_comparison: bool = ..., + return_type: Any | None = ..., + natural_self_precedent: bool = ..., + eager_grouping: bool = ..., + ) -> None: ... + def __eq__(self, other): ... + def __hash__(self): ... + def __call__(self, left, right, **kw): ... + +class ColumnOperators(Operators, Generic[_T]): + timetuple: Any + def __lt__(self, other: _T | ColumnOperators[_T] | None): ... + def __le__(self, other: _T | ColumnOperators[_T] | None): ... + __hash__: Any + def __eq__(self, other: _T | ColumnOperators[_T] | None): ... # type: ignore[override] + def __ne__(self, other: _T | ColumnOperators[_T] | None): ... # type: ignore[override] + def is_distinct_from(self, other): ... + def is_not_distinct_from(self, other): ... + isnot_distinct_from = is_not_distinct_from + def __gt__(self, other: _T | ColumnOperators[_T] | None): ... + def __ge__(self, other: _T | ColumnOperators[_T] | None): ... + def __neg__(self): ... + def __contains__(self, other): ... + def __getitem__(self, index: int): ... + def __lshift__(self, other): ... + def __rshift__(self, other): ... + def concat(self, other: _T | ColumnOperators[_T] | None): ... + def like(self, other: _T, escape: str | None = ...): ... + def ilike(self, other: _T, escape: str | None = ...): ... + def in_(self, other: Container[_T] | Iterable[_T]): ... + def not_in(self, other: Container[_T] | Iterable[_T]): ... + notin_ = not_in + def not_like(self, other: _T, escape: str | None = ...): ... + notlike = not_like + def not_ilike(self, other: _T, escape: str | None = ...): ... + notilike = not_ilike + def is_(self, other: _T): ... + def is_not(self, other: _T): ... + isnot = is_not + def startswith(self, other: str, **kwargs): ... + def endswith(self, other: str, **kwargs): ... + def contains(self, other: str, **kwargs): ... + def match(self, other: str, **kwargs): ... + def regexp_match(self, pattern, flags: Any | None = ...): ... + def regexp_replace(self, pattern, replacement, flags: Any | None = ...): ... + def desc(self): ... + def asc(self): ... + def nulls_first(self): ... + nullsfirst: Any + def nulls_last(self): ... + nullslast: Any + def collate(self, collation): ... + def __radd__(self, other): ... + def __rsub__(self, other): ... + def __rmul__(self, other): ... + def __rdiv__(self, other): ... + def __rmod__(self, other): ... + def between(self, cleft, cright, symmetric: bool = ...): ... + def distinct(self): ... + def any_(self): ... + def all_(self): ... + def __add__(self, other): ... + def __sub__(self, other): ... + def __mul__(self, other): ... + def __div__(self, other): ... + def __mod__(self, other): ... + def __truediv__(self, other): ... + def __rtruediv__(self, other): ... + +def commutative_op(fn): ... +def comparison_op(fn): ... +def from_() -> None: ... +def function_as_comparison_op() -> None: ... +def as_() -> None: ... +def exists() -> None: ... +def is_true(a) -> None: ... + +istrue = is_true + +def is_false(a) -> None: ... + +isfalse = is_false + +def is_distinct_from(a, b): ... +def is_not_distinct_from(a, b): ... + +isnot_distinct_from = is_not_distinct_from + +def is_(a, b): ... +def is_not(a, b): ... + +isnot = is_not + +def collate(a, b): ... +def op(a, opstring, b): ... +def like_op(a, b, escape: Any | None = ...): ... +def not_like_op(a, b, escape: Any | None = ...): ... + +notlike_op = not_like_op + +def ilike_op(a, b, escape: Any | None = ...): ... +def not_ilike_op(a, b, escape: Any | None = ...): ... + +notilike_op = not_ilike_op + +def between_op(a, b, c, symmetric: bool = ...): ... +def not_between_op(a, b, c, symmetric: bool = ...): ... + +notbetween_op = not_between_op + +def in_op(a, b): ... +def not_in_op(a, b): ... + +notin_op = not_in_op + +def distinct_op(a): ... +def any_op(a): ... +def all_op(a): ... +def startswith_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... +def not_startswith_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... + +notstartswith_op = not_startswith_op + +def endswith_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... +def not_endswith_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... + +notendswith_op = not_endswith_op + +def contains_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... +def not_contains_op(a, b, escape: Any | None = ..., autoescape: bool = ...): ... + +notcontains_op = not_contains_op + +def match_op(a, b, **kw): ... +def regexp_match_op(a, b, flags: Any | None = ...): ... +def not_regexp_match_op(a, b, flags: Any | None = ...): ... +def regexp_replace_op(a, b, replacement, flags: Any | None = ...): ... +def not_match_op(a, b, **kw): ... + +notmatch_op = not_match_op + +def comma_op(a, b) -> None: ... +def filter_op(a, b) -> None: ... +def concat_op(a, b): ... +def desc_op(a): ... +def asc_op(a): ... +def nulls_first_op(a): ... + +nullsfirst_op = nulls_first_op + +def nulls_last_op(a): ... + +nullslast_op = nulls_last_op + +def json_getitem_op(a, b) -> None: ... +def json_path_getitem_op(a, b) -> None: ... +def is_comparison(op): ... +def is_commutative(op): ... +def is_ordering_modifier(op): ... +def is_natural_self_precedent(op): ... +def is_boolean(op): ... +def mirror(op): ... +def is_associative(op): ... +def is_precedent(operator, against): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi new file mode 100644 index 000000000000..e7c290b149c3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/roles.pyi @@ -0,0 +1,57 @@ +class SQLRole: + allows_lambda: bool + uses_inspection: bool + +class UsesInspection: + uses_inspection: bool + +class AllowsLambdaRole: + allows_lambda: bool + +class HasCacheKeyRole(SQLRole): ... +class ExecutableOptionRole(SQLRole): ... +class LiteralValueRole(SQLRole): ... +class ColumnArgumentRole(SQLRole): ... +class ColumnArgumentOrKeyRole(ColumnArgumentRole): ... +class StrAsPlainColumnRole(ColumnArgumentRole): ... +class ColumnListRole(SQLRole): ... +class TruncatedLabelRole(SQLRole): ... +class ColumnsClauseRole(AllowsLambdaRole, UsesInspection, ColumnListRole): ... +class LimitOffsetRole(SQLRole): ... +class ByOfRole(ColumnListRole): ... +class GroupByRole(AllowsLambdaRole, UsesInspection, ByOfRole): ... +class OrderByRole(AllowsLambdaRole, ByOfRole): ... +class StructuralRole(SQLRole): ... +class StatementOptionRole(StructuralRole): ... +class OnClauseRole(AllowsLambdaRole, StructuralRole): ... +class WhereHavingRole(OnClauseRole): ... +class ExpressionElementRole(SQLRole): ... +class ConstExprRole(ExpressionElementRole): ... +class LabeledColumnExprRole(ExpressionElementRole): ... +class BinaryElementRole(ExpressionElementRole): ... +class InElementRole(SQLRole): ... +class JoinTargetRole(AllowsLambdaRole, UsesInspection, StructuralRole): ... +class FromClauseRole(ColumnsClauseRole, JoinTargetRole): ... + +class StrictFromClauseRole(FromClauseRole): + @property + def description(self) -> None: ... + +class AnonymizedFromClauseRole(StrictFromClauseRole): ... +class ReturnsRowsRole(SQLRole): ... +class StatementRole(SQLRole): ... + +class SelectStatementRole(StatementRole, ReturnsRowsRole): + def subquery(self) -> None: ... + +class HasCTERole(ReturnsRowsRole): ... +class IsCTERole(SQLRole): ... +class CompoundElementRole(AllowsLambdaRole, SQLRole): ... +class DMLRole(StatementRole): ... +class DMLTableRole(FromClauseRole): ... +class DMLColumnRole(SQLRole): ... +class DMLSelectRole(SQLRole): ... +class DDLRole(StatementRole): ... +class DDLExpressionRole(StructuralRole): ... +class DDLConstraintColumnRole(SQLRole): ... +class DDLReferredColumnRole(DDLConstraintColumnRole): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi new file mode 100644 index 000000000000..b7c82d82a7df --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/schema.pyi @@ -0,0 +1,375 @@ +from typing import Any + +from ..util import memoized_property +from . import visitors +from .base import DialectKWArgs, Executable, SchemaEventTarget +from .elements import ColumnClause +from .selectable import TableClause + +RETAIN_SCHEMA: Any +BLANK_SCHEMA: Any +NULL_UNSPECIFIED: Any + +class SchemaItem(SchemaEventTarget, visitors.Visitable): + __visit_name__: str + create_drop_stringify_dialect: str + @memoized_property + def info(self): ... + +class Table(DialectKWArgs, SchemaItem, TableClause): + __visit_name__: str + constraints: Any + indexes: Any + def __new__(cls, *args, **kw): ... + def __init__(self, *args, **kw) -> None: ... + @property + def foreign_key_constraints(self): ... + @property + def key(self): ... + @property + def bind(self): ... + def add_is_dependent_on(self, table) -> None: ... + def append_column(self, column, replace_existing: bool = ...) -> None: ... # type: ignore[override] + def append_constraint(self, constraint) -> None: ... + def exists(self, bind: Any | None = ...): ... + def create(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + def tometadata(self, metadata, schema=..., referred_schema_fn: Any | None = ..., name: Any | None = ...): ... + def to_metadata(self, metadata, schema=..., referred_schema_fn: Any | None = ..., name: Any | None = ...): ... + +class Column(DialectKWArgs, SchemaItem, ColumnClause): # type: ignore # argument disparities between base classes + __visit_name__: str + inherit_cache: bool + key: Any + primary_key: Any + nullable: Any + default: Any + server_default: Any + server_onupdate: Any + index: Any + unique: Any + system: Any + doc: Any + onupdate: Any + autoincrement: Any + constraints: Any + foreign_keys: Any + comment: Any + computed: Any + identity: Any + info: Any + def __init__(self, *args, **kwargs) -> None: ... + def references(self, column): ... + def append_foreign_key(self, fk) -> None: ... + def copy(self, **kw): ... + +class ForeignKey(DialectKWArgs, SchemaItem): + __visit_name__: str + constraint: Any + parent: Any + use_alter: Any + name: Any + onupdate: Any + ondelete: Any + deferrable: Any + initially: Any + link_to_name: Any + match: Any + info: Any + def __init__( + self, + column, + _constraint: Any | None = ..., + use_alter: bool = ..., + name: Any | None = ..., + onupdate: Any | None = ..., + ondelete: Any | None = ..., + deferrable: Any | None = ..., + initially: Any | None = ..., + link_to_name: bool = ..., + match: Any | None = ..., + info: Any | None = ..., + **dialect_kw, + ) -> None: ... + def copy(self, schema: Any | None = ..., **kw): ... + target_fullname: Any + def references(self, table): ... + def get_referent(self, table): ... + @memoized_property + def column(self): ... + +class DefaultGenerator(Executable, SchemaItem): + __visit_name__: str + is_sequence: bool + is_server_default: bool + column: Any + for_update: Any + def __init__(self, for_update: bool = ...) -> None: ... + def execute(self, bind: Any | None = ...): ... # type: ignore[override] + @property + def bind(self): ... + +class ColumnDefault(DefaultGenerator): + arg: Any + def __init__(self, arg, **kwargs) -> None: ... + @memoized_property + def is_callable(self): ... + @memoized_property + def is_clause_element(self): ... + @memoized_property + def is_scalar(self): ... + +class IdentityOptions: + start: Any + increment: Any + minvalue: Any + maxvalue: Any + nominvalue: Any + nomaxvalue: Any + cycle: Any + cache: Any + order: Any + def __init__( + self, + start: Any | None = ..., + increment: Any | None = ..., + minvalue: Any | None = ..., + maxvalue: Any | None = ..., + nominvalue: Any | None = ..., + nomaxvalue: Any | None = ..., + cycle: Any | None = ..., + cache: Any | None = ..., + order: Any | None = ..., + ) -> None: ... + +class Sequence(IdentityOptions, DefaultGenerator): + __visit_name__: str + is_sequence: bool + name: Any + optional: Any + schema: Any + metadata: Any + data_type: Any + def __init__( + self, + name, + start: Any | None = ..., + increment: Any | None = ..., + minvalue: Any | None = ..., + maxvalue: Any | None = ..., + nominvalue: Any | None = ..., + nomaxvalue: Any | None = ..., + cycle: Any | None = ..., + schema: Any | None = ..., + cache: Any | None = ..., + order: Any | None = ..., + data_type: Any | None = ..., + optional: bool = ..., + quote: Any | None = ..., + metadata: Any | None = ..., + quote_schema: Any | None = ..., + for_update: bool = ..., + ) -> None: ... + @memoized_property + def is_callable(self): ... + @memoized_property + def is_clause_element(self): ... + def next_value(self): ... + @property + def bind(self): ... + def create(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + +class FetchedValue(SchemaEventTarget): + is_server_default: bool + reflected: bool + has_argument: bool + is_clause_element: bool + for_update: Any + def __init__(self, for_update: bool = ...) -> None: ... + +class DefaultClause(FetchedValue): + has_argument: bool + arg: Any + reflected: Any + def __init__(self, arg, for_update: bool = ..., _reflected: bool = ...) -> None: ... + +class Constraint(DialectKWArgs, SchemaItem): + __visit_name__: str + name: Any + deferrable: Any + initially: Any + info: Any + def __init__( + self, + name: Any | None = ..., + deferrable: Any | None = ..., + initially: Any | None = ..., + _create_rule: Any | None = ..., + info: Any | None = ..., + _type_bound: bool = ..., + **dialect_kw, + ) -> None: ... + @property + def table(self): ... + def copy(self, **kw): ... + +class ColumnCollectionMixin: + columns: Any + def __init__(self, *columns, **kw) -> None: ... + +class ColumnCollectionConstraint(ColumnCollectionMixin, Constraint): + def __init__(self, *columns, **kw) -> None: ... + columns: Any + def __contains__(self, x): ... + def copy(self, target_table: Any | None = ..., **kw): ... + def contains_column(self, col): ... + def __iter__(self): ... + def __len__(self): ... + +class CheckConstraint(ColumnCollectionConstraint): + __visit_name__: str + sqltext: Any + def __init__( + self, + sqltext, + name: Any | None = ..., + deferrable: Any | None = ..., + initially: Any | None = ..., + table: Any | None = ..., + info: Any | None = ..., + _create_rule: Any | None = ..., + _autoattach: bool = ..., + _type_bound: bool = ..., + **kw, + ) -> None: ... + @property + def is_column_level(self): ... + def copy(self, target_table: Any | None = ..., **kw): ... + +class ForeignKeyConstraint(ColumnCollectionConstraint): + __visit_name__: str + onupdate: Any + ondelete: Any + link_to_name: Any + use_alter: Any + match: Any + elements: Any + def __init__( + self, + columns, + refcolumns, + name: Any | None = ..., + onupdate: Any | None = ..., + ondelete: Any | None = ..., + deferrable: Any | None = ..., + initially: Any | None = ..., + use_alter: bool = ..., + link_to_name: bool = ..., + match: Any | None = ..., + table: Any | None = ..., + info: Any | None = ..., + **dialect_kw, + ) -> None: ... + columns: Any + @property + def referred_table(self): ... + @property + def column_keys(self): ... + def copy(self, schema: Any | None = ..., target_table: Any | None = ..., **kw): ... # type: ignore[override] + +class PrimaryKeyConstraint(ColumnCollectionConstraint): + __visit_name__: str + def __init__(self, *columns, **kw) -> None: ... + @property + def columns_autoinc_first(self): ... + +class UniqueConstraint(ColumnCollectionConstraint): + __visit_name__: str + +class Index(DialectKWArgs, ColumnCollectionMixin, SchemaItem): + __visit_name__: str + table: Any + name: Any + unique: Any + info: Any + expressions: Any + def __init__(self, name, *expressions, **kw) -> None: ... + @property + def bind(self): ... + def create(self, bind: Any | None = ..., checkfirst: bool = ...): ... + def drop(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + +DEFAULT_NAMING_CONVENTION: Any + +class MetaData(SchemaItem): + __visit_name__: str + tables: Any + schema: Any + naming_convention: Any + info: Any + def __init__( + self, + bind: Any | None = ..., + schema: Any | None = ..., + quote_schema: Any | None = ..., + naming_convention: Any | None = ..., + info: Any | None = ..., + ) -> None: ... + def __contains__(self, table_or_key): ... + def is_bound(self): ... + bind: Any + def clear(self) -> None: ... + def remove(self, table) -> None: ... + @property + def sorted_tables(self): ... + def reflect( + self, + bind: Any | None = ..., + schema: Any | None = ..., + views: bool = ..., + only: Any | None = ..., + extend_existing: bool = ..., + autoload_replace: bool = ..., + resolve_fks: bool = ..., + **dialect_kwargs, + ) -> None: ... + def create_all(self, bind: Any | None = ..., tables: Any | None = ..., checkfirst: bool = ...) -> None: ... + def drop_all(self, bind: Any | None = ..., tables: Any | None = ..., checkfirst: bool = ...) -> None: ... + +class ThreadLocalMetaData(MetaData): + __visit_name__: str + context: Any + def __init__(self) -> None: ... + bind: Any + def is_bound(self): ... + def dispose(self) -> None: ... + +class Computed(FetchedValue, SchemaItem): + __visit_name__: str + sqltext: Any + persisted: Any + column: Any + def __init__(self, sqltext, persisted: Any | None = ...) -> None: ... + def copy(self, target_table: Any | None = ..., **kw): ... + +class Identity(IdentityOptions, FetchedValue, SchemaItem): + __visit_name__: str + always: Any + on_null: Any + column: Any + def __init__( + self, + always: bool = ..., + on_null: Any | None = ..., + start: Any | None = ..., + increment: Any | None = ..., + minvalue: Any | None = ..., + maxvalue: Any | None = ..., + nominvalue: Any | None = ..., + nomaxvalue: Any | None = ..., + cycle: Any | None = ..., + cache: Any | None = ..., + order: Any | None = ..., + ) -> None: ... + def copy(self, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi new file mode 100644 index 000000000000..c1a9d2e18cc0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/selectable.pyi @@ -0,0 +1,416 @@ +from typing import Any + +from .. import util +from ..util import HasMemoized, memoized_property +from . import roles, traversals, visitors +from .annotation import Annotated, SupportsCloneAnnotations +from .base import CacheableOptions, CompileState, Executable, Generative, HasCompileState, Immutable +from .elements import ( + BindParameter as BindParameter, + BooleanClauseList as BooleanClauseList, + ClauseElement as ClauseElement, + ClauseList as ClauseList, + ColumnClause as ColumnClause, + GroupedElement as GroupedElement, + Grouping as Grouping, + TableValuedColumn as TableValuedColumn, + UnaryExpression as UnaryExpression, + literal_column as literal_column, +) + +class _OffsetLimitParam(BindParameter[Any]): + inherit_cache: bool + +def subquery(alias, *args, **kwargs): ... + +class ReturnsRows(roles.ReturnsRowsRole, ClauseElement): + @property + def selectable(self): ... + @property + def exported_columns(self) -> None: ... + +class Selectable(ReturnsRows): + __visit_name__: str + is_selectable: bool + def lateral(self, name: Any | None = ...): ... + def replace_selectable(self, old, alias): ... + def corresponding_column(self, column, require_embedded: bool = ...): ... + +class HasPrefixes: + def prefix_with(self, *expr, **kw) -> None: ... + +class HasSuffixes: + def suffix_with(self, *expr, **kw) -> None: ... + +class HasHints: + def with_statement_hint(self, text, dialect_name: str = ...): ... + def with_hint(self, selectable, text, dialect_name: str = ...) -> None: ... + +class FromClause(roles.AnonymizedFromClauseRole, Selectable): + __visit_name__: str + named_with_column: bool + schema: Any + is_selectable: bool + def select(self, whereclause: Any | None = ..., **kwargs): ... + def join(self, right, onclause: Any | None = ..., isouter: bool = ..., full: bool = ...): ... + def outerjoin(self, right, onclause: Any | None = ..., full: bool = ...): ... + def alias(self, name: Any | None = ..., flat: bool = ...): ... + def table_valued(self): ... + def tablesample(self, sampling, name: Any | None = ..., seed: Any | None = ...): ... + def is_derived_from(self, fromclause): ... + @property + def description(self): ... + @property + def exported_columns(self): ... + @memoized_property + def columns(self): ... + @property + def entity_namespace(self): ... + @memoized_property + def primary_key(self): ... + @memoized_property + def foreign_keys(self): ... + c: Any + +LABEL_STYLE_NONE: Any +LABEL_STYLE_TABLENAME_PLUS_COL: Any +LABEL_STYLE_DISAMBIGUATE_ONLY: Any +LABEL_STYLE_DEFAULT: Any + +class Join(roles.DMLTableRole, FromClause): + __visit_name__: str + left: Any + right: Any + onclause: Any + isouter: Any + full: Any + def __init__(self, left, right, onclause: Any | None = ..., isouter: bool = ..., full: bool = ...) -> None: ... + @property + def description(self): ... + def is_derived_from(self, fromclause): ... + def self_group(self, against: Any | None = ...): ... + def select(self, whereclause: Any | None = ..., **kwargs): ... + @property + def bind(self): ... + def alias(self, name: Any | None = ..., flat: bool = ...): ... + +class NoInit: + def __init__(self, *arg, **kw) -> None: ... + +class AliasedReturnsRows(NoInit, FromClause): + named_with_column: bool + @property + def description(self): ... + @property + def original(self): ... + def is_derived_from(self, fromclause): ... + @property + def bind(self): ... + +class Alias(roles.DMLTableRole, AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + +class TableValuedAlias(Alias): + __visit_name__: str + @HasMemoized.memoized_attribute + def column(self): ... + def alias(self, name: Any | None = ...): ... # type: ignore[override] + def lateral(self, name: Any | None = ...): ... + def render_derived(self, name: Any | None = ..., with_types: bool = ...): ... + +class Lateral(AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + +class TableSample(AliasedReturnsRows): + __visit_name__: str + +class CTE(roles.DMLTableRole, roles.IsCTERole, Generative, HasPrefixes, HasSuffixes, AliasedReturnsRows): + __visit_name__: str + def alias(self, name: Any | None = ..., flat: bool = ...): ... + def union(self, *other): ... + def union_all(self, *other): ... + +class HasCTE(roles.HasCTERole): + def add_cte(self, cte) -> None: ... + def cte(self, name: Any | None = ..., recursive: bool = ..., nesting: bool = ...): ... + +class Subquery(AliasedReturnsRows): + __visit_name__: str + inherit_cache: bool + def as_scalar(self): ... + +class FromGrouping(GroupedElement, FromClause): + element: Any + def __init__(self, element) -> None: ... + @property + def columns(self): ... + @property + def primary_key(self): ... + @property + def foreign_keys(self): ... + def is_derived_from(self, element): ... + def alias(self, **kw): ... + +class TableClause(roles.DMLTableRole, Immutable, FromClause): + __visit_name__: str + named_with_column: bool + implicit_returning: bool + name: Any + primary_key: Any + foreign_keys: Any + schema: Any + fullname: Any + def __init__(self, name, *columns, **kw) -> None: ... + @memoized_property + def description(self): ... + def append_column(self, c, **kw) -> None: ... + def insert(self, values: Any | None = ..., inline: bool = ..., **kwargs): ... + def update(self, whereclause: Any | None = ..., values: Any | None = ..., inline: bool = ..., **kwargs): ... + def delete(self, whereclause: Any | None = ..., **kwargs): ... + +class ForUpdateArg(ClauseElement): + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self): ... + nowait: Any + read: Any + skip_locked: Any + key_share: Any + of: Any + def __init__( + self, nowait: bool = ..., read: bool = ..., of: Any | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> None: ... + +class Values(Generative, FromClause): + named_with_column: bool + __visit_name__: str + name: Any + literal_binds: Any + def __init__(self, *columns, **kw) -> None: ... + def alias(self, name, **kw) -> None: ... # type: ignore[override] + def lateral(self, name: Any | None = ...) -> None: ... + def data(self, values) -> None: ... + +class SelectBase( + roles.SelectStatementRole, + roles.DMLSelectRole, + roles.CompoundElementRole, + roles.InElementRole, + HasCTE, + Executable, + SupportsCloneAnnotations, + Selectable, +): + is_select: bool + @property + def selected_columns(self) -> None: ... + @property + def exported_columns(self): ... + @property + def c(self): ... + @property + def columns(self): ... + def select(self, *arg, **kw): ... + def as_scalar(self): ... + def exists(self): ... + def scalar_subquery(self): ... + def label(self, name): ... + def lateral(self, name: Any | None = ...): ... + def subquery(self, name: Any | None = ...): ... + def alias(self, name: Any | None = ..., flat: bool = ...): ... + +class SelectStatementGrouping(GroupedElement, SelectBase): + __visit_name__: str + element: Any + def __init__(self, element) -> None: ... + def get_label_style(self): ... + def set_label_style(self, label_style): ... + @property + def select_statement(self): ... + def self_group(self, against: Any | None = ...): ... + @property + def selected_columns(self): ... + +class DeprecatedSelectBaseGenerations: + def append_order_by(self, *clauses) -> None: ... + def append_group_by(self, *clauses) -> None: ... + +class GenerativeSelect(DeprecatedSelectBaseGenerations, SelectBase): + def __init__( + self, + _label_style=..., + use_labels: bool = ..., + limit: Any | None = ..., + offset: Any | None = ..., + order_by: Any | None = ..., + group_by: Any | None = ..., + bind: Any | None = ..., + ) -> None: ... + def with_for_update( + self, nowait: bool = ..., read: bool = ..., of: Any | None = ..., skip_locked: bool = ..., key_share: bool = ... + ) -> None: ... + def get_label_style(self): ... + def set_label_style(self, style): ... + def apply_labels(self): ... + def limit(self, limit) -> None: ... + def fetch(self, count, with_ties: bool = ..., percent: bool = ...) -> None: ... + def offset(self, offset) -> None: ... + def slice(self, start, stop) -> None: ... + def order_by(self, *clauses) -> None: ... + def group_by(self, *clauses) -> None: ... + +class CompoundSelectState(CompileState): ... + +class CompoundSelect(HasCompileState, GenerativeSelect): + __visit_name__: str + UNION: Any + UNION_ALL: Any + EXCEPT: Any + EXCEPT_ALL: Any + INTERSECT: Any + INTERSECT_ALL: Any + keyword: Any + selects: Any + def __init__(self, keyword, *selects, **kwargs) -> None: ... + def self_group(self, against: Any | None = ...): ... + def is_derived_from(self, fromclause): ... + @property + def selected_columns(self): ... + @property + def bind(self): ... + @bind.setter + def bind(self, bind) -> None: ... + +class DeprecatedSelectGenerations: + def append_correlation(self, fromclause) -> None: ... + def append_column(self, column) -> None: ... + def append_prefix(self, clause) -> None: ... + def append_whereclause(self, whereclause) -> None: ... + def append_having(self, having) -> None: ... + def append_from(self, fromclause) -> None: ... + +class SelectState(util.MemoizedSlots, CompileState): + class default_select_compile_options(CacheableOptions): ... + statement: Any + from_clauses: Any + froms: Any + columns_plus_names: Any + def __init__(self, statement, compiler, **kw) -> None: ... + @classmethod + def get_column_descriptions(cls, statement) -> None: ... + @classmethod + def from_statement(cls, statement, from_statement) -> None: ... + @classmethod + def get_columns_clause_froms(cls, statement): ... + @classmethod + def determine_last_joined_entity(cls, stmt): ... + @classmethod + def all_selected_columns(cls, statement): ... + +class _SelectFromElements: ... + +class _MemoizedSelectEntities(traversals.HasCacheKey, traversals.HasCopyInternals, visitors.Traversible): + __visit_name__: str + +class Select( + HasPrefixes, HasSuffixes, HasHints, HasCompileState, DeprecatedSelectGenerations, _SelectFromElements, GenerativeSelect +): + __visit_name__: str + @classmethod + def create_legacy_select( + cls, + columns: Any | None = ..., + whereclause: Any | None = ..., + from_obj: Any | None = ..., + distinct: bool = ..., + having: Any | None = ..., + correlate: bool = ..., + prefixes: Any | None = ..., + suffixes: Any | None = ..., + **kwargs, + ): ... + def __init__(self) -> None: ... + def filter(self, *criteria): ... + def filter_by(self, **kwargs): ... + @property + def column_descriptions(self): ... + def from_statement(self, statement): ... + def join(self, target, onclause: Any | None = ..., isouter: bool = ..., full: bool = ...) -> None: ... + def outerjoin_from(self, from_, target, onclause: Any | None = ..., full: bool = ...): ... + def join_from(self, from_, target, onclause: Any | None = ..., isouter: bool = ..., full: bool = ...) -> None: ... + def outerjoin(self, target, onclause: Any | None = ..., full: bool = ...): ... + def get_final_froms(self): ... + @property + def froms(self): ... + @property + def columns_clause_froms(self): ... + @property + def inner_columns(self): ... + def is_derived_from(self, fromclause): ... + def get_children(self, **kwargs): ... + def add_columns(self, *columns) -> None: ... + def column(self, column): ... + def reduce_columns(self, only_synonyms: bool = ...): ... + def with_only_columns(self, *columns, **kw) -> None: ... + @property + def whereclause(self): ... + def where(self, *whereclause) -> None: ... + def having(self, having) -> None: ... + def distinct(self, *expr) -> None: ... + def select_from(self, *froms) -> None: ... + def correlate(self, *fromclauses) -> None: ... + def correlate_except(self, *fromclauses) -> None: ... + @HasMemoized.memoized_attribute + def selected_columns(self): ... + def self_group(self, against: Any | None = ...): ... + def union(self, *other, **kwargs): ... + def union_all(self, *other, **kwargs): ... + def except_(self, *other, **kwargs): ... + def except_all(self, *other, **kwargs): ... + def intersect(self, *other, **kwargs): ... + def intersect_all(self, *other, **kwargs): ... + @property + def bind(self): ... + @bind.setter + def bind(self, bind) -> None: ... + +class ScalarSelect(roles.InElementRole, Generative, Grouping): + inherit_cache: bool + element: Any + type: Any + def __init__(self, element) -> None: ... + @property + def columns(self) -> None: ... + c: Any + def where(self, crit) -> None: ... + def self_group(self, **kwargs): ... + def correlate(self, *fromclauses) -> None: ... + def correlate_except(self, *fromclauses) -> None: ... + +class Exists(UnaryExpression): + inherit_cache: bool + def __init__(self, *args, **kwargs) -> None: ... + def select(self, whereclause: Any | None = ..., **kwargs): ... + def correlate(self, *fromclause): ... + def correlate_except(self, *fromclause): ... + def select_from(self, *froms): ... + def where(self, *clause): ... + +class TextualSelect(SelectBase): + __visit_name__: str + is_text: bool + is_select: bool + element: Any + column_args: Any + positional: Any + def __init__(self, text, columns, positional: bool = ...) -> None: ... + @HasMemoized.memoized_attribute + def selected_columns(self): ... + def bindparams(self, *binds, **bind_as_values) -> None: ... + +TextAsFrom = TextualSelect + +class AnnotatedFromClause(Annotated): + def __init__(self, element, values) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi new file mode 100644 index 000000000000..5075c49b91d9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/sqltypes.pyi @@ -0,0 +1,362 @@ +from typing import Any + +from .base import SchemaEventTarget +from .traversals import HasCacheKey +from .type_api import ( + Emulated as Emulated, + NativeForEmulated as NativeForEmulated, + TypeDecorator as TypeDecorator, + TypeEngine as TypeEngine, + Variant as Variant, + to_instance as to_instance, +) + +class _LookupExpressionAdapter: + class Comparator(TypeEngine.Comparator): ... + comparator_factory: Any + +class Concatenable: + class Comparator(TypeEngine.Comparator): ... + comparator_factory: Any + +class Indexable: + class Comparator(TypeEngine.Comparator): + def __getitem__(self, index): ... + comparator_factory: Any + +class String(Concatenable, TypeEngine): + __visit_name__: str + RETURNS_UNICODE: Any + RETURNS_BYTES: Any + RETURNS_CONDITIONAL: Any + RETURNS_UNKNOWN: Any + length: Any + collation: Any + def __init__( + self, + length: Any | None = ..., + collation: Any | None = ..., + convert_unicode: bool = ..., + unicode_error: Any | None = ..., + _warn_on_bytestring: bool = ..., + _expect_unicode: bool = ..., + ) -> None: ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + @property + def python_type(self): ... + def get_dbapi_type(self, dbapi): ... + +class Text(String): + __visit_name__: str + +class Unicode(String): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class UnicodeText(Text): + __visit_name__: str + def __init__(self, length: Any | None = ..., **kwargs) -> None: ... + +class Integer(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + def literal_processor(self, dialect): ... + +class SmallInteger(Integer): + __visit_name__: str + +class BigInteger(Integer): + __visit_name__: str + +class Numeric(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + precision: Any + scale: Any + decimal_return_scale: Any + asdecimal: Any + def __init__( + self, precision: Any | None = ..., scale: Any | None = ..., decimal_return_scale: Any | None = ..., asdecimal: bool = ... + ) -> None: ... + def get_dbapi_type(self, dbapi): ... + def literal_processor(self, dialect): ... + @property + def python_type(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class Float(Numeric): + __visit_name__: str + scale: Any + precision: Any + asdecimal: Any + decimal_return_scale: Any + def __init__(self, precision: Any | None = ..., asdecimal: bool = ..., decimal_return_scale: Any | None = ...) -> None: ... + def result_processor(self, dialect, coltype): ... + +class DateTime(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + timezone: Any + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class Date(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class Time(_LookupExpressionAdapter, TypeEngine): + __visit_name__: str + timezone: Any + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + @property + def python_type(self): ... + +class _Binary(TypeEngine): + length: Any + def __init__(self, length: Any | None = ...) -> None: ... + def literal_processor(self, dialect): ... + @property + def python_type(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def coerce_compared_value(self, op, value): ... + def get_dbapi_type(self, dbapi): ... + +class LargeBinary(_Binary): + __visit_name__: str + def __init__(self, length: Any | None = ...) -> None: ... + +class SchemaType(SchemaEventTarget): + name: Any + schema: Any + metadata: Any + inherit_schema: Any + def __init__( + self, + name: Any | None = ..., + schema: Any | None = ..., + metadata: Any | None = ..., + inherit_schema: bool = ..., + quote: Any | None = ..., + _create_events: bool = ..., + ) -> None: ... + def copy(self, **kw): ... + def adapt(self, impltype, **kw): ... + @property + def bind(self): ... + def create(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + def drop(self, bind: Any | None = ..., checkfirst: bool = ...) -> None: ... + +class Enum(Emulated, String, SchemaType): # type: ignore # argument disparities between base classes + __visit_name__: str + def __init__(self, *enums, **kw) -> None: ... + @property + def sort_key_function(self): ... + @property + def native(self): ... + + class Comparator(Concatenable.Comparator): ... + comparator_factory: Any + def as_generic(self, allow_nulltype: bool = ...): ... + def adapt_to_emulated(self, impltype, **kw): ... + def adapt(self, impltype, **kw): ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def copy(self, **kw): ... + @property + def python_type(self): ... + +class PickleType(TypeDecorator): + impl: Any + cache_ok: bool + protocol: Any + pickler: Any + comparator: Any + def __init__(self, protocol=..., pickler: Any | None = ..., comparator: Any | None = ..., impl: Any | None = ...) -> None: ... + def __reduce__(self): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def compare_values(self, x, y): ... + +class Boolean(Emulated, TypeEngine, SchemaType): # type: ignore[misc] + __visit_name__: str + native: bool + create_constraint: Any + name: Any + def __init__(self, create_constraint: bool = ..., name: Any | None = ..., _create_events: bool = ...) -> None: ... + @property + def python_type(self): ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class _AbstractInterval(_LookupExpressionAdapter, TypeEngine): + def coerce_compared_value(self, op, value): ... + +class Interval(Emulated, _AbstractInterval, TypeDecorator): # type: ignore[misc] + impl: Any + epoch: Any + cache_ok: bool + native: Any + second_precision: Any + day_precision: Any + def __init__(self, native: bool = ..., second_precision: Any | None = ..., day_precision: Any | None = ...) -> None: ... + @property + def python_type(self): ... + def adapt_to_emulated(self, impltype, **kw): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class JSON(Indexable, TypeEngine): + __visit_name__: str + hashable: bool + NULL: Any + none_as_null: Any + def __init__(self, none_as_null: bool = ...) -> None: ... + + class JSONElementType(TypeEngine): + def string_bind_processor(self, dialect): ... + def string_literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def literal_processor(self, dialect): ... + + class JSONIndexType(JSONElementType): ... + class JSONIntIndexType(JSONIndexType): ... + class JSONStrIndexType(JSONIndexType): ... + class JSONPathType(JSONElementType): ... + + class Comparator(Indexable.Comparator, Concatenable.Comparator): + def as_boolean(self): ... + def as_string(self): ... + def as_integer(self): ... + def as_float(self): ... + def as_numeric(self, precision, scale, asdecimal: bool = ...): ... + def as_json(self): ... + comparator_factory: Any + @property + def python_type(self): ... + @property # type: ignore[override] + def should_evaluate_none(self): ... + @should_evaluate_none.setter + def should_evaluate_none(self, value) -> None: ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + +class ARRAY(SchemaEventTarget, Indexable, Concatenable, TypeEngine): + __visit_name__: str + zero_indexes: bool + + class Comparator(Indexable.Comparator, Concatenable.Comparator): + def contains(self, *arg, **kw) -> None: ... + def any(self, other, operator: Any | None = ...): ... + def all(self, other, operator: Any | None = ...): ... + comparator_factory: Any + item_type: Any + as_tuple: Any + dimensions: Any + def __init__(self, item_type, as_tuple: bool = ..., dimensions: Any | None = ..., zero_indexes: bool = ...) -> None: ... + @property + def hashable(self): ... + @property + def python_type(self): ... + def compare_values(self, x, y): ... + +class TupleType(TypeEngine): + types: Any + def __init__(self, *types) -> None: ... + def result_processor(self, dialect, coltype) -> None: ... + +class REAL(Float): + __visit_name__: str + +class FLOAT(Float): + __visit_name__: str + +class NUMERIC(Numeric): + __visit_name__: str + +class DECIMAL(Numeric): + __visit_name__: str + +class INTEGER(Integer): + __visit_name__: str + +INT = INTEGER + +class SMALLINT(SmallInteger): + __visit_name__: str + +class BIGINT(BigInteger): + __visit_name__: str + +class TIMESTAMP(DateTime): + __visit_name__: str + def __init__(self, timezone: bool = ...) -> None: ... + def get_dbapi_type(self, dbapi): ... + +class DATETIME(DateTime): + __visit_name__: str + +class DATE(Date): + __visit_name__: str + +class TIME(Time): + __visit_name__: str + +class TEXT(Text): + __visit_name__: str + +class CLOB(Text): + __visit_name__: str + +class VARCHAR(String): + __visit_name__: str + +class NVARCHAR(Unicode): + __visit_name__: str + +class CHAR(String): + __visit_name__: str + +class NCHAR(Unicode): + __visit_name__: str + +class BLOB(LargeBinary): + __visit_name__: str + +class BINARY(_Binary): + __visit_name__: str + +class VARBINARY(_Binary): + __visit_name__: str + +class BOOLEAN(Boolean): + __visit_name__: str + +class NullType(TypeEngine): + __visit_name__: str + def literal_processor(self, dialect): ... + + class Comparator(TypeEngine.Comparator): ... + comparator_factory: Any + +class TableValueType(HasCacheKey, TypeEngine): + def __init__(self, *elements) -> None: ... + +class MatchType(Boolean): ... + +NULLTYPE: Any +BOOLEANTYPE: Any +STRINGTYPE: Any +INTEGERTYPE: Any +MATCHTYPE: Any +TABLEVALUE: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi new file mode 100644 index 000000000000..4ce0715c426e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/traversals.pyi @@ -0,0 +1,158 @@ +from typing import Any + +from .. import util +from ..util import HasMemoized +from .visitors import ExtendedInternalTraversal, InternalTraversal + +SKIP_TRAVERSE: Any +COMPARE_FAILED: bool +COMPARE_SUCCEEDED: bool +NO_CACHE: Any +CACHE_IN_PLACE: Any +CALL_GEN_CACHE_KEY: Any +STATIC_CACHE_KEY: Any +PROPAGATE_ATTRS: Any +ANON_NAME: Any + +def compare(obj1, obj2, **kw): ... + +class HasCacheKey: + inherit_cache: Any + +class MemoizedHasCacheKey(HasCacheKey, HasMemoized): ... + +class CacheKey: + def __hash__(self): ... + def to_offline_string(self, statement_cache, statement, parameters): ... + def __eq__(self, other): ... + +class _CacheKey(ExtendedInternalTraversal): + visit_has_cache_key: Any + visit_clauseelement: Any + visit_clauseelement_list: Any + visit_annotations_key: Any + visit_clauseelement_tuple: Any + visit_memoized_select_entities: Any + visit_string: Any + visit_boolean: Any + visit_operator: Any + visit_plain_obj: Any + visit_statement_hint_list: Any + visit_type: Any + visit_anon_name: Any + visit_propagate_attrs: Any + def visit_with_context_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_inspectable(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_multi(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_multi_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_has_cache_key_tuples(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_has_cache_key_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_executable_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_inspectable_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_clauseelement_tuples(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_fromclause_ordered_set(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_clauseelement_unordered_set(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_named_ddl_element(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_prefix_sequence(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_setup_join_tuple(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_table_hint_list(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_plain_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dialect_options(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_clauseelement_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_string_multi_dict(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_fromclause_canonical_column_collection(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_unknown_structure(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_ordered_values(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_values(self, attrname, obj, parent, anon_map, bindparams): ... + def visit_dml_multi_values(self, attrname, obj, parent, anon_map, bindparams): ... + +class HasCopyInternals: ... + +class _CopyInternals(InternalTraversal): + def visit_clauseelement(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_list(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_tuple(self, attrname, parent, element, clone=..., **kw): ... + def visit_executable_options(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_unordered_set(self, attrname, parent, element, clone=..., **kw): ... + def visit_clauseelement_tuples(self, attrname, parent, element, clone=..., **kw): ... + def visit_string_clauseelement_dict(self, attrname, parent, element, clone=..., **kw): ... + def visit_setup_join_tuple(self, attrname, parent, element, clone=..., **kw): ... + def visit_memoized_select_entities(self, attrname, parent, element, **kw): ... + def visit_dml_ordered_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_dml_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_dml_multi_values(self, attrname, parent, element, clone=..., **kw): ... + def visit_propagate_attrs(self, attrname, parent, element, clone=..., **kw): ... + +class _GetChildren(InternalTraversal): + def visit_has_cache_key(self, element, **kw): ... + def visit_clauseelement(self, element, **kw): ... + def visit_clauseelement_list(self, element, **kw): ... + def visit_clauseelement_tuple(self, element, **kw): ... + def visit_clauseelement_tuples(self, element, **kw): ... + def visit_fromclause_canonical_column_collection(self, element, **kw): ... + def visit_string_clauseelement_dict(self, element, **kw): ... + def visit_fromclause_ordered_set(self, element, **kw): ... + def visit_clauseelement_unordered_set(self, element, **kw): ... + def visit_setup_join_tuple(self, element, **kw) -> None: ... + def visit_memoized_select_entities(self, element, **kw): ... + def visit_dml_ordered_values(self, element, **kw) -> None: ... + def visit_dml_values(self, element, **kw) -> None: ... + def visit_dml_multi_values(self, element, **kw): ... + def visit_propagate_attrs(self, element, **kw): ... + +class anon_map(dict[Any, Any]): + index: int + def __init__(self) -> None: ... + def __missing__(self, key): ... + +class TraversalComparatorStrategy(InternalTraversal, util.MemoizedSlots): + stack: Any + cache: Any + anon_map: Any + def __init__(self) -> None: ... + def compare(self, obj1, obj2, **kw): ... + def compare_inner(self, obj1, obj2, **kw): ... + def visit_has_cache_key(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_propagate_attrs(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_has_cache_key_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_executable_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_fromclause_canonical_column_collection(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_fromclause_derived_column_collection(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_string_clauseelement_dict(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement_tuples(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_clauseelement_list(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_clauseelement_tuple(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_clauseelement_unordered_set(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_fromclause_ordered_set(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_string(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_string_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_anon_name(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_boolean(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_operator(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_type(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_plain_dict(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dialect_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_annotations_key(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_with_context_options(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_plain_obj(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_named_ddl_element(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_prefix_sequence(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_setup_join_tuple(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_memoized_select_entities(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_table_hint_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_statement_hint_list(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_unknown_structure(self, attrname, left_parent, left, right_parent, right, **kw) -> None: ... + def visit_dml_ordered_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dml_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def visit_dml_multi_values(self, attrname, left_parent, left, right_parent, right, **kw): ... + def compare_clauselist(self, left, right, **kw): ... + def compare_binary(self, left, right, **kw): ... + def compare_bindparam(self, left, right, **kw): ... + +class ColIdentityComparatorStrategy(TraversalComparatorStrategy): + def compare_column_element(self, left, right, use_proxies: bool = ..., equivalents=..., **kw): ... + def compare_column(self, left, right, **kw): ... + def compare_label(self, left, right, **kw): ... + def compare_table(self, left, right, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi new file mode 100644 index 000000000000..64142873ebb5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/type_api.pyi @@ -0,0 +1,112 @@ +from typing import Any + +from .. import util +from . import operators +from .base import SchemaEventTarget +from .visitors import Traversible, TraversibleType + +BOOLEANTYPE: Any +INTEGERTYPE: Any +NULLTYPE: Any +STRINGTYPE: Any +MATCHTYPE: Any +INDEXABLE: Any +TABLEVALUE: Any + +class TypeEngine(Traversible): + class Comparator(operators.ColumnOperators): + default_comparator: Any + def __clause_element__(self): ... + expr: Any + type: Any + def __init__(self, expr) -> None: ... + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + def __reduce__(self): ... + hashable: bool + comparator_factory: Any + sort_key_function: Any + should_evaluate_none: bool + def evaluates_none(self): ... + def copy(self, **kw): ... + def compare_against_backend(self, dialect, conn_type) -> None: ... + def copy_value(self, value): ... + def literal_processor(self, dialect) -> None: ... + def bind_processor(self, dialect) -> None: ... + def result_processor(self, dialect, coltype) -> None: ... + def column_expression(self, colexpr) -> None: ... + def bind_expression(self, bindvalue) -> None: ... + def compare_values(self, x, y): ... + def get_dbapi_type(self, dbapi) -> None: ... + @property + def python_type(self) -> None: ... + def with_variant(self, type_, dialect_name): ... + def as_generic(self, allow_nulltype: bool = ...): ... + def dialect_impl(self, dialect): ... + def adapt(self, cls, **kw): ... + def coerce_compared_value(self, op, value): ... + def compile(self, dialect: Any | None = ...): ... + +class VisitableCheckKWArg(util.EnsureKWArgType, TraversibleType): ... + +class ExternalType: + cache_ok: Any + +class UserDefinedType: + __visit_name__: str + ensure_kwarg: str + def coerce_compared_value(self, op, value): ... + +class Emulated: + def adapt_to_emulated(self, impltype, **kw): ... + def adapt(self, impltype, **kw): ... + +class NativeForEmulated: + @classmethod + def adapt_native_to_emulated(cls, impl, **kw): ... + @classmethod + def adapt_emulated_to_native(cls, impl, **kw): ... + +class TypeDecorator(ExternalType, SchemaEventTarget, TypeEngine): + __visit_name__: str + impl: Any + def __init__(self, *args, **kwargs) -> None: ... + coerce_to_is_types: Any + + class Comparator(TypeEngine.Comparator): + def operate(self, op, *other, **kwargs): ... + def reverse_operate(self, op, other, **kwargs): ... + + @property + def comparator_factory(self): ... + def type_engine(self, dialect): ... + def load_dialect_impl(self, dialect): ... + def __getattr__(self, key): ... + def process_literal_param(self, value, dialect) -> None: ... + def process_bind_param(self, value, dialect) -> None: ... + def process_result_value(self, value, dialect) -> None: ... + def literal_processor(self, dialect): ... + def bind_processor(self, dialect): ... + def result_processor(self, dialect, coltype): ... + def bind_expression(self, bindparam): ... + def column_expression(self, column): ... + def coerce_compared_value(self, op, value): ... + def copy(self, **kw): ... + def get_dbapi_type(self, dbapi): ... + def compare_values(self, x, y): ... + @property + def sort_key_function(self): ... + +class Variant(TypeDecorator): + cache_ok: bool + impl: Any + mapping: Any + def __init__(self, base, mapping) -> None: ... + def coerce_compared_value(self, operator, value): ... + def load_dialect_impl(self, dialect): ... + def with_variant(self, type_, dialect_name): ... + @property + def comparator_factory(self): ... + +def to_instance(typeobj, *arg, **kw): ... +def adapt_type(typeobj, colspecs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi new file mode 100644 index 000000000000..78dd79992557 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/util.pyi @@ -0,0 +1,103 @@ +from typing import Any + +from . import visitors + +join_condition: Any + +def find_join_source(clauses, join_to): ... +def find_left_clause_that_matches_given(clauses, join_from): ... +def find_left_clause_to_join_from(clauses, join_to, onclause): ... +def visit_binary_product(fn, expr) -> None: ... +def find_tables( + clause, + check_columns: bool = ..., + include_aliases: bool = ..., + include_joins: bool = ..., + include_selects: bool = ..., + include_crud: bool = ..., +): ... +def unwrap_order_by(clause): ... +def unwrap_label_reference(element): ... +def expand_column_list_from_order_by(collist, order_by): ... +def clause_is_present(clause, search): ... +def tables_from_leftmost(clause) -> None: ... +def surface_selectables(clause) -> None: ... +def surface_selectables_only(clause) -> None: ... +def extract_first_column_annotation(column, annotation_name): ... +def selectables_overlap(left, right): ... +def bind_values(clause): ... + +class _repr_base: + max_chars: Any + def trunc(self, value): ... + +class _repr_row(_repr_base): + row: Any + max_chars: Any + def __init__(self, row, max_chars: int = ...) -> None: ... + +class _repr_params(_repr_base): + params: Any + ismulti: Any + batches: Any + max_chars: Any + def __init__(self, params, batches, max_chars: int = ..., ismulti: Any | None = ...) -> None: ... + +def adapt_criterion_to_null(crit, nulls): ... +def splice_joins(left, right, stop_on: Any | None = ...): ... +def reduce_columns(columns, *clauses, **kw): ... +def criterion_as_pairs( + expression, + consider_as_foreign_keys: Any | None = ..., + consider_as_referenced_keys: Any | None = ..., + any_operator: bool = ..., +): ... + +class ClauseAdapter(visitors.ReplacingExternalTraversal): + __traverse_options__: Any + selectable: Any + include_fn: Any + exclude_fn: Any + equivalents: Any + adapt_on_names: Any + adapt_from_selectables: Any + def __init__( + self, + selectable, + equivalents: Any | None = ..., + include_fn: Any | None = ..., + exclude_fn: Any | None = ..., + adapt_on_names: bool = ..., + anonymize_labels: bool = ..., + adapt_from_selectables: Any | None = ..., + ) -> None: ... + def replace(self, col, _include_singleton_constants: bool = ...): ... + +class ColumnAdapter(ClauseAdapter): + columns: Any + adapt_required: Any + allow_label_resolve: Any + def __init__( + self, + selectable, + equivalents: Any | None = ..., + adapt_required: bool = ..., + include_fn: Any | None = ..., + exclude_fn: Any | None = ..., + adapt_on_names: bool = ..., + allow_label_resolve: bool = ..., + anonymize_labels: bool = ..., + adapt_from_selectables: Any | None = ..., + ) -> None: ... + + class _IncludeExcludeMapping: + parent: Any + columns: Any + def __init__(self, parent, columns) -> None: ... + def __getitem__(self, key): ... + + def wrap(self, adapter): ... + def traverse(self, obj): ... + adapt_clause: Any + adapt_list: Any + def adapt_check_present(self, col): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi new file mode 100644 index 000000000000..c3dd44793e24 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/sql/visitors.pyi @@ -0,0 +1,86 @@ +from typing import Any + +class TraversibleType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class Traversible: + def __class_getitem__(cls, key): ... + def get_children(self, omit_attrs=..., **kw): ... + +class _InternalTraversalType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +class InternalTraversal: + def dispatch(self, visit_symbol): ... + def run_generated_dispatch(self, target, internal_dispatch, generate_dispatcher_name): ... + def generate_dispatch(self, target_cls, internal_dispatch, generate_dispatcher_name): ... + dp_has_cache_key: Any + dp_has_cache_key_list: Any + dp_clauseelement: Any + dp_fromclause_canonical_column_collection: Any + dp_clauseelement_tuples: Any + dp_clauseelement_list: Any + dp_clauseelement_tuple: Any + dp_executable_options: Any + dp_with_context_options: Any + dp_fromclause_ordered_set: Any + dp_string: Any + dp_string_list: Any + dp_anon_name: Any + dp_boolean: Any + dp_operator: Any + dp_type: Any + dp_plain_dict: Any + dp_dialect_options: Any + dp_string_clauseelement_dict: Any + dp_string_multi_dict: Any + dp_annotations_key: Any + dp_plain_obj: Any + dp_named_ddl_element: Any + dp_prefix_sequence: Any + dp_table_hint_list: Any + dp_setup_join_tuple: Any + dp_memoized_select_entities: Any + dp_statement_hint_list: Any + dp_unknown_structure: Any + dp_dml_ordered_values: Any + dp_dml_values: Any + dp_dml_multi_values: Any + dp_propagate_attrs: Any + +class ExtendedInternalTraversal(InternalTraversal): + dp_ignore: Any + dp_inspectable: Any + dp_multi: Any + dp_multi_list: Any + dp_has_cache_key_tuples: Any + dp_inspectable_list: Any + +class ExternalTraversal: + __traverse_options__: Any + def traverse_single(self, obj, **kw): ... + def iterate(self, obj): ... + def traverse(self, obj): ... + @property + def visitor_iterator(self) -> None: ... + def chain(self, visitor): ... + +class CloningExternalTraversal(ExternalTraversal): + def copy_and_process(self, list_): ... + def traverse(self, obj): ... + +class ReplacingExternalTraversal(CloningExternalTraversal): + def replace(self, elem) -> None: ... + def traverse(self, obj): ... + +Visitable = Traversible +VisitableType = TraversibleType +ClauseVisitor = ExternalTraversal +CloningVisitor = CloningExternalTraversal +ReplacingCloningVisitor = ReplacingExternalTraversal + +def iterate(obj, opts=...) -> None: ... +def traverse_using(iterator, obj, visitors): ... +def traverse(obj, opts, visitors): ... +def cloned_traverse(obj, opts, visitors): ... +def replacement_traverse(obj, opts, replace): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi new file mode 100644 index 000000000000..a27e338db2f9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/__init__.pyi @@ -0,0 +1,74 @@ +from . import config as config, mock as mock +from .assertions import ( + AssertsCompiledSQL as AssertsCompiledSQL, + AssertsExecutionResults as AssertsExecutionResults, + ComparesTables as ComparesTables, + assert_raises as assert_raises, + assert_raises_context_ok as assert_raises_context_ok, + assert_raises_message as assert_raises_message, + assert_raises_message_context_ok as assert_raises_message_context_ok, + emits_warning as emits_warning, + emits_warning_on as emits_warning_on, + eq_ as eq_, + eq_ignore_whitespace as eq_ignore_whitespace, + eq_regex as eq_regex, + expect_deprecated as expect_deprecated, + expect_deprecated_20 as expect_deprecated_20, + expect_raises as expect_raises, + expect_raises_message as expect_raises_message, + expect_warnings as expect_warnings, + in_ as in_, + is_ as is_, + is_false as is_false, + is_instance_of as is_instance_of, + is_none as is_none, + is_not as is_not, + is_not_ as is_not_, + is_not_none as is_not_none, + is_true as is_true, + le_ as le_, + ne_ as ne_, + not_in as not_in, + not_in_ as not_in_, + startswith_ as startswith_, + uses_deprecated as uses_deprecated, +) +from .config import ( + async_test as async_test, + combinations as combinations, + combinations_list as combinations_list, + db as db, + fixture as fixture, +) +from .exclusions import ( + db_spec as db_spec, + exclude as exclude, + fails as fails, + fails_if as fails_if, + fails_on as fails_on, + fails_on_everything_except as fails_on_everything_except, + future as future, + only_if as only_if, + only_on as only_on, + skip as skip, + skip_if as skip_if, +) +from .schema import eq_clause_element as eq_clause_element, eq_type_affinity as eq_type_affinity +from .util import ( + adict as adict, + fail as fail, + flag_combinations as flag_combinations, + force_drop_names as force_drop_names, + lambda_combinations as lambda_combinations, + metadata_fixture as metadata_fixture, + provide_metadata as provide_metadata, + resolve_lambda as resolve_lambda, + rowset as rowset, + run_as_contextmanager as run_as_contextmanager, + teardown_events as teardown_events, +) +from .warnings import assert_warnings as assert_warnings, warn_test_suite as warn_test_suite + +def against(*queries): ... + +crashes = skip diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi new file mode 100644 index 000000000000..024be0f28782 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertions.pyi @@ -0,0 +1,85 @@ +from typing import Any + +def expect_warnings(*messages, **kw): ... +def expect_warnings_on(db, *messages, **kw) -> None: ... +def emits_warning(*messages): ... +def expect_deprecated(*messages, **kw): ... +def expect_deprecated_20(*messages, **kw): ... +def emits_warning_on(db, *messages): ... +def uses_deprecated(*messages): ... +def global_cleanup_assertions() -> None: ... +def eq_regex(a, b, msg: Any | None = ...) -> None: ... +def eq_(a, b, msg: Any | None = ...) -> None: ... +def ne_(a, b, msg: Any | None = ...) -> None: ... +def le_(a, b, msg: Any | None = ...) -> None: ... +def is_instance_of(a, b, msg: Any | None = ...) -> None: ... +def is_none(a, msg: Any | None = ...) -> None: ... +def is_not_none(a, msg: Any | None = ...) -> None: ... +def is_true(a, msg: Any | None = ...) -> None: ... +def is_false(a, msg: Any | None = ...) -> None: ... +def is_(a, b, msg: Any | None = ...) -> None: ... +def is_not(a, b, msg: Any | None = ...) -> None: ... + +is_not_ = is_not + +def in_(a, b, msg: Any | None = ...) -> None: ... +def not_in(a, b, msg: Any | None = ...) -> None: ... + +not_in_ = not_in + +def startswith_(a, fragment, msg: Any | None = ...) -> None: ... +def eq_ignore_whitespace(a, b, msg: Any | None = ...) -> None: ... +def assert_raises(except_cls, callable_, *args, **kw): ... +def assert_raises_context_ok(except_cls, callable_, *args, **kw): ... +def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): ... +def assert_raises_message_context_ok(except_cls, msg, callable_, *args, **kwargs): ... + +class _ErrorContainer: + error: Any + +def expect_raises(except_cls, check_context: bool = ...): ... +def expect_raises_message(except_cls, msg, check_context: bool = ...): ... + +class AssertsCompiledSQL: + test_statement: Any + supports_execution: Any + def assert_compile( + self, + clause, + result, + params: Any | None = ..., + checkparams: Any | None = ..., + for_executemany: bool = ..., + check_literal_execute: Any | None = ..., + check_post_param: Any | None = ..., + dialect: Any | None = ..., + checkpositional: Any | None = ..., + check_prefetch: Any | None = ..., + use_default_dialect: bool = ..., + allow_dialect_select: bool = ..., + supports_default_values: bool = ..., + supports_default_metavalue: bool = ..., + literal_binds: bool = ..., + render_postcompile: bool = ..., + schema_translate_map: Any | None = ..., + render_schema_translate: bool = ..., + default_schema_name: Any | None = ..., + from_linting: bool = ..., + ): ... + +class ComparesTables: + def assert_tables_equal(self, table, reflected_table, strict_types: bool = ...) -> None: ... + def assert_types_base(self, c1, c2) -> None: ... + +class AssertsExecutionResults: + def assert_result(self, result, class_, *objects) -> None: ... + def assert_list(self, result, class_, list_) -> None: ... + def assert_row(self, class_, rowobj, desc) -> None: ... + def assert_unordered_result(self, result, cls, *expected): ... + def sql_execution_asserter(self, db: Any | None = ...): ... + def assert_sql_execution(self, db, callable_, *rules): ... + def assert_sql(self, db, callable_, rules): ... + def assert_sql_count(self, db, callable_, count) -> None: ... + def assert_multiple_sql_count(self, dbs, callable_, counts): ... + def assert_execution(self, db, *rules) -> None: ... + def assert_statement_count(self, db, count): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi new file mode 100644 index 000000000000..6e30c772702e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/assertsql.pyi @@ -0,0 +1,82 @@ +from typing import Any + +class AssertRule: + is_consumed: bool + errormessage: Any + consume_statement: bool + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class SQLMatchRule(AssertRule): ... + +class CursorSQL(SQLMatchRule): + statement: Any + params: Any + consume_statement: Any + def __init__(self, statement, params: Any | None = ..., consume_statement: bool = ...) -> None: ... + errormessage: Any + is_consumed: bool + def process_statement(self, execute_observed) -> None: ... + +class CompiledSQL(SQLMatchRule): + statement: Any + params: Any + dialect: Any + def __init__(self, statement, params: Any | None = ..., dialect: str = ...) -> None: ... + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class RegexSQL(CompiledSQL): + regex: Any + orig_regex: Any + params: Any + dialect: Any + def __init__(self, regex, params: Any | None = ..., dialect: str = ...) -> None: ... + +class DialectSQL(CompiledSQL): ... + +class CountStatements(AssertRule): + count: Any + def __init__(self, count) -> None: ... + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class AllOf(AssertRule): + rules: Any + def __init__(self, *rules) -> None: ... + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class EachOf(AssertRule): + rules: Any + def __init__(self, *rules) -> None: ... + errormessage: Any + is_consumed: bool + def process_statement(self, execute_observed) -> None: ... + def no_more_statements(self) -> None: ... + +class Conditional(EachOf): + def __init__(self, condition, rules, else_rules) -> None: ... + +class Or(AllOf): + is_consumed: bool + errormessage: Any + def process_statement(self, execute_observed) -> None: ... + +class SQLExecuteObserved: + context: Any + clauseelement: Any + parameters: Any + statements: Any + def __init__(self, context, clauseelement, multiparams, params) -> None: ... + +class SQLCursorExecuteObserved: ... + +class SQLAsserter: + accumulated: Any + def __init__(self) -> None: ... + def assert_(self, *rules) -> None: ... + +def assert_engine(engine) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi new file mode 100644 index 000000000000..7455e47b8f46 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/asyncio.pyi @@ -0,0 +1 @@ +ENABLE_ASYNCIO: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi new file mode 100644 index 000000000000..db67aa9cbe9d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/config.pyi @@ -0,0 +1,47 @@ +from typing import Any + +requirements: Any +db: Any +db_url: Any +db_opts: Any +file_config: Any +test_schema: Any +test_schema_2: Any +any_async: bool +ident: str + +def combinations(*comb, **kw): ... +def combinations_list(arg_iterable, **kw): ... +def fixture(*arg, **kw): ... +def get_current_test_name(): ... +def mark_base_test_class(): ... + +class Config: + db: Any + db_opts: Any + options: Any + file_config: Any + test_schema: str + test_schema_2: str + is_async: Any + def __init__(self, db, db_opts, options, file_config) -> None: ... + @classmethod + def register(cls, db, db_opts, options, file_config): ... + @classmethod + def set_as_current(cls, config, namespace) -> None: ... + @classmethod + def push_engine(cls, db, namespace) -> None: ... + @classmethod + def push(cls, config, namespace) -> None: ... + @classmethod + def pop(cls, namespace) -> None: ... + @classmethod + def reset(cls, namespace) -> None: ... + @classmethod + def all_configs(cls): ... + @classmethod + def all_dbs(cls) -> None: ... + def skip_test(self, msg) -> None: ... + +def skip_test(msg) -> None: ... +def async_test(fn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi new file mode 100644 index 000000000000..400503ba8462 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/engines.pyi @@ -0,0 +1,68 @@ +from typing import Any + +class ConnectionKiller: + proxy_refs: Any + testing_engines: Any + dbapi_connections: Any + def __init__(self) -> None: ... + def add_pool(self, pool) -> None: ... + def add_engine(self, engine, scope) -> None: ... + def rollback_all(self) -> None: ... + def checkin_all(self) -> None: ... + def close_all(self) -> None: ... + def prepare_for_drop_tables(self, connection) -> None: ... + def after_test(self) -> None: ... + def after_test_outside_fixtures(self, test) -> None: ... + def stop_test_class_inside_fixtures(self) -> None: ... + def stop_test_class_outside_fixtures(self) -> None: ... + def final_cleanup(self) -> None: ... + def assert_all_closed(self) -> None: ... + +testing_reaper: Any + +def assert_conns_closed(fn, *args, **kw) -> None: ... +def rollback_open_connections(fn, *args, **kw) -> None: ... +def close_first(fn, *args, **kw) -> None: ... +def close_open_connections(fn, *args, **kw) -> None: ... +def all_dialects(exclude: Any | None = ...) -> None: ... + +class ReconnectFixture: + dbapi: Any + connections: Any + is_stopped: bool + def __init__(self, dbapi) -> None: ... + def __getattr__(self, key): ... + def connect(self, *args, **kwargs): ... + def shutdown(self, stop: bool = ...) -> None: ... + def restart(self) -> None: ... + +def reconnecting_engine(url: Any | None = ..., options: Any | None = ...): ... +def testing_engine( + url: Any | None = ..., + options: Any | None = ..., + future: Any | None = ..., + asyncio: bool = ..., + transfer_staticpool: bool = ..., +): ... +def mock_engine(dialect_name: Any | None = ...): ... + +class DBAPIProxyCursor: + engine: Any + connection: Any + cursor: Any + def __init__(self, engine, conn, *args, **kwargs) -> None: ... + def execute(self, stmt, parameters: Any | None = ..., **kw): ... + def executemany(self, stmt, params, **kw): ... + def __iter__(self): ... + def __getattr__(self, key): ... + +class DBAPIProxyConnection: + conn: Any + engine: Any + cursor_cls: Any + def __init__(self, engine, cursor_cls) -> None: ... + def cursor(self, *args, **kwargs): ... + def close(self) -> None: ... + def __getattr__(self, key): ... + +def proxying_engine(conn_cls=..., cursor_cls=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi new file mode 100644 index 000000000000..0afb34e39941 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/entities.pyi @@ -0,0 +1,9 @@ +class BasicEntity: + def __init__(self, **kw) -> None: ... + +class ComparableMixin: + def __ne__(self, other): ... + def __eq__(self, other): ... + +class ComparableEntity(ComparableMixin, BasicEntity): + def __hash__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi new file mode 100644 index 000000000000..4cc9913e6369 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/exclusions.pyi @@ -0,0 +1,76 @@ +from typing import Any + +def skip_if(predicate, reason: Any | None = ...): ... +def fails_if(predicate, reason: Any | None = ...): ... + +class compound: + fails: Any + skips: Any + tags: Any + def __init__(self) -> None: ... + def __add__(self, other): ... + def as_skips(self): ... + def add(self, *others): ... + def not_(self): ... + @property + def enabled(self): ... + def enabled_for_config(self, config): ... + def matching_config_reasons(self, config): ... + def include_test(self, include_tags, exclude_tags): ... + def __call__(self, fn): ... + def fail_if(self) -> None: ... + +def requires_tag(tagname): ... +def tags(tagnames): ... +def only_if(predicate, reason: Any | None = ...): ... +def succeeds_if(predicate, reason: Any | None = ...): ... + +class Predicate: + @classmethod + def as_predicate(cls, predicate, description: Any | None = ...): ... + +class BooleanPredicate(Predicate): + value: Any + description: Any + def __init__(self, value, description: Any | None = ...) -> None: ... + def __call__(self, config): ... + +class SpecPredicate(Predicate): + db: Any + op: Any + spec: Any + description: Any + def __init__(self, db, op: Any | None = ..., spec: Any | None = ..., description: Any | None = ...) -> None: ... + def __call__(self, config): ... + +class LambdaPredicate(Predicate): + lambda_: Any + args: Any + kw: Any + description: Any + def __init__(self, lambda_, description: Any | None = ..., args: Any | None = ..., kw: Any | None = ...): ... + def __call__(self, config): ... + +class NotPredicate(Predicate): + predicate: Any + description: Any + def __init__(self, predicate, description: Any | None = ...) -> None: ... + def __call__(self, config): ... + +class OrPredicate(Predicate): + predicates: Any + description: Any + def __init__(self, predicates, description: Any | None = ...) -> None: ... + def __call__(self, config): ... + +def db_spec(*dbs): ... +def open(): ... +def closed(): ... +def fails(reason: Any | None = ...): ... +def future(fn, *arg): ... +def fails_on(db, reason: Any | None = ...): ... +def fails_on_everything_except(*dbs): ... +def skip(db, reason: Any | None = ...): ... +def only_on(dbs, reason: Any | None = ...): ... +def exclude(db, op, spec, reason: Any | None = ...): ... +def against(config, *queries): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi new file mode 100644 index 000000000000..58c6be2e8aa7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/fixtures.pyi @@ -0,0 +1,83 @@ +from typing import Any + +from . import assertions +from .entities import ComparableEntity as ComparableEntity + +class TestBase: + __requires__: Any + __unsupported_on__: Any + __only_on__: Any + __skip_if__: Any + __leave_connections_for_teardown__: bool + def assert_(self, val, msg: Any | None = ...) -> None: ... + def connection_no_trans(self) -> None: ... + def connection(self) -> None: ... + def registry(self, metadata) -> None: ... + def future_connection(self, future_engine, connection) -> None: ... + def future_engine(self) -> None: ... + def testing_engine(self): ... + def async_testing_engine(self, testing_engine): ... + def metadata(self, request) -> None: ... + def trans_ctx_manager_fixture(self, request, metadata): ... + +class FutureEngineMixin: ... + +class TablesTest(TestBase): + run_setup_bind: str + run_define_tables: str + run_create_tables: str + run_inserts: str + run_deletes: str + run_dispose_bind: Any + bind: Any + tables: Any + other: Any + sequences: Any + @property + def tables_test_metadata(self): ... + @classmethod + def setup_bind(cls): ... + @classmethod + def dispose_bind(cls, bind) -> None: ... + @classmethod + def define_tables(cls, metadata) -> None: ... + @classmethod + def fixtures(cls): ... + @classmethod + def insert_data(cls, connection) -> None: ... + def sql_count_(self, count, fn) -> None: ... + def sql_eq_(self, callable_, statements) -> None: ... + +class NoCache: ... + +class RemovesEvents: + def event_listen(self, target, name, fn, **kw) -> None: ... + +def fixture_session(**kw): ... +def stop_test_class_inside_fixtures(cls) -> None: ... +def after_test() -> None: ... + +class ORMTest(TestBase): ... + +class MappedTest(TablesTest, assertions.AssertsExecutionResults): + run_setup_classes: str + run_setup_mappers: str + classes: Any + @classmethod + def setup_classes(cls) -> None: ... + @classmethod + def setup_mappers(cls) -> None: ... + +class DeclarativeMappedTest(MappedTest): + run_setup_classes: str + run_setup_mappers: str + +class ComputedReflectionFixtureTest(TablesTest): + run_inserts: Any + run_deletes: Any + __backend__: bool + __requires__: Any + regexp: Any + def normalize(self, text): ... + @classmethod + def define_tables(cls, metadata) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/mock.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/mock.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi new file mode 100644 index 000000000000..837dfac0409d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/pickleable.pyi @@ -0,0 +1,62 @@ +from typing import Any + +from . import fixtures + +class User(fixtures.ComparableEntity): ... +class Order(fixtures.ComparableEntity): ... +class Dingaling(fixtures.ComparableEntity): ... +class EmailUser(User): ... +class Address(fixtures.ComparableEntity): ... +class Child1(fixtures.ComparableEntity): ... +class Child2(fixtures.ComparableEntity): ... +class Parent(fixtures.ComparableEntity): ... + +class Screen: + obj: Any + parent: Any + def __init__(self, obj, parent: Any | None = ...) -> None: ... + +class Foo: + data: str + stuff: Any + moredata: Any + def __init__(self, moredata, stuff: str = ...) -> None: ... + __hash__: Any + def __eq__(self, other): ... + +class Bar: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + __hash__: Any + def __eq__(self, other): ... + +class OldSchool: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + def __eq__(self, other): ... + +class OldSchoolWithoutCompare: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + +class BarWithoutCompare: + x: Any + y: Any + def __init__(self, x, y) -> None: ... + +class NotComparable: + data: Any + def __init__(self, data) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class BrokenComparable: + data: Any + def __init__(self, data) -> None: ... + def __hash__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi new file mode 100644 index 000000000000..5c554d0de466 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/bootstrap.pyi @@ -0,0 +1,6 @@ +from typing import Any + +bootstrap_file: Any +to_bootstrap: Any + +def load_file_as_module(name): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi new file mode 100644 index 000000000000..e0cdfbe59f28 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/plugin_base.pyi @@ -0,0 +1,63 @@ +import abc +from typing import Any + +bootstrapped_as_sqlalchemy: bool +log: Any +py3k: Any +ABC = abc.ABC + +fixtures: Any +engines: Any +exclusions: Any +warnings: Any +profiling: Any +provision: Any +assertions: Any +requirements: Any +config: Any +testing: Any +util: Any +file_config: Any +include_tags: Any +exclude_tags: Any +options: Any + +def setup_options(make_option) -> None: ... +def configure_follower(follower_ident) -> None: ... +def memoize_important_follower_config(dict_) -> None: ... +def restore_important_follower_config(dict_) -> None: ... +def read_config() -> None: ... +def pre_begin(opt) -> None: ... +def set_coverage_flag(value) -> None: ... +def post_begin() -> None: ... + +pre_configure: Any +post_configure: Any + +def pre(fn): ... +def post(fn): ... +def want_class(name, cls): ... +def want_method(cls, fn): ... +def generate_sub_tests(cls, module) -> None: ... +def start_test_class_outside_fixtures(cls) -> None: ... +def stop_test_class(cls) -> None: ... +def stop_test_class_outside_fixtures(cls) -> None: ... +def final_process_cleanup() -> None: ... +def before_test(test, test_module_name, test_class, test_name) -> None: ... +def after_test(test) -> None: ... +def after_test_fixtures(test) -> None: ... + +class FixtureFunctions(ABC, metaclass=abc.ABCMeta): + @abc.abstractmethod + def skip_test_exception(self, *arg, **kw): ... + @abc.abstractmethod + def combinations(self, *args, **kw): ... + @abc.abstractmethod + def param_ident(self, *args, **kw): ... + @abc.abstractmethod + def fixture(self, *arg, **kw): ... + def get_current_test_name(self) -> None: ... + @abc.abstractmethod + def mark_base_test_class(self): ... + +def set_fixture_functions(fixture_fn_class) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi new file mode 100644 index 000000000000..ded604f35850 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/pytestplugin.pyi @@ -0,0 +1,36 @@ +from typing import Any + +from . import plugin_base + +has_xdist: bool +py2k: Any + +def pytest_addoption(parser) -> None: ... +def pytest_configure(config) -> None: ... + +DUMP_PYANNOTATE: bool + +def collect_types_fixture() -> None: ... +def pytest_sessionstart(session) -> None: ... +def pytest_sessionfinish(session) -> None: ... +def pytest_collection_finish(session): ... +def pytest_configure_node(node) -> None: ... +def pytest_testnodedown(node, error) -> None: ... +def pytest_collection_modifyitems(session, config, items): ... +def pytest_pycollect_makeitem(collector, name, obj): ... +def pytest_runtest_setup(item) -> None: ... +def pytest_runtest_teardown(item, nextitem) -> None: ... +def pytest_runtest_call(item) -> None: ... +def pytest_runtest_logreport(report) -> None: ... +def setup_class_methods(request) -> None: ... +def setup_test_methods(request) -> None: ... +def getargspec(fn): ... + +class PytestFixtureFunctions(plugin_base.FixtureFunctions): + def skip_test_exception(self, *arg, **kw): ... + def mark_base_test_class(self): ... + def combinations(self, *arg_sets, **kw): ... + def param_ident(self, *parameters): ... + def fixture(self, *arg, **kw): ... + def get_current_test_name(self): ... + def async_test(self, fn): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi new file mode 100644 index 000000000000..83f3da8643b7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/plugin/reinvent_fixtures_py2k.pyi @@ -0,0 +1,6 @@ +def add_fixture(fn, fixture) -> None: ... +def scan_for_fixtures_to_use_for_class(item) -> None: ... +def run_class_fixture_setup(request) -> None: ... +def run_class_fixture_teardown(request) -> None: ... +def run_fn_fixture_setup(request) -> None: ... +def run_fn_fixture_teardown(request) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi new file mode 100644 index 000000000000..ff48b0d7e32b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/profiling.pyi @@ -0,0 +1,20 @@ +from typing import Any + +class ProfileStatsFile: + force_write: Any + write: Any + fname: Any + short_fname: Any + data: Any + dump: Any + sort: Any + def __init__(self, filename, sort: str = ..., dump: Any | None = ...): ... + @property + def platform_key(self): ... + def has_stats(self): ... + def result(self, callcount): ... + def reset_count(self) -> None: ... + def replace(self, callcount) -> None: ... + +def function_call_count(variance: float = ..., times: int = ..., warmup: int = ...): ... +def count_functions(variance: float = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi new file mode 100644 index 000000000000..ec8c6487d6b0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/provision.pyi @@ -0,0 +1,34 @@ +from typing import Any + +log: Any +FOLLOWER_IDENT: Any + +class register: + fns: Any + def __init__(self) -> None: ... + @classmethod + def init(cls, fn): ... + def for_db(self, *dbnames): ... + def __call__(self, cfg, *arg): ... + +def create_follower_db(follower_ident) -> None: ... +def setup_config(db_url, options, file_config, follower_ident): ... +def drop_follower_db(follower_ident) -> None: ... +def generate_db_urls(db_urls, extra_drivers) -> None: ... +def generate_driver_url(url, driver, query_str): ... +def drop_all_schema_objects_pre_tables(cfg, eng) -> None: ... +def drop_all_schema_objects_post_tables(cfg, eng) -> None: ... +def drop_all_schema_objects(cfg, eng) -> None: ... +def create_db(cfg, eng, ident) -> None: ... +def drop_db(cfg, eng, ident) -> None: ... +def update_db_opts(cfg, db_opts) -> None: ... +def post_configure_engine(url, engine, follower_ident) -> None: ... +def follower_url_from_main(url, ident): ... +def configure_follower(cfg, ident) -> None: ... +def run_reap_dbs(url, ident) -> None: ... +def reap_dbs(idents_file) -> None: ... +def temp_table_keyword_args(cfg, eng) -> None: ... +def prepare_for_drop_tables(config, connection) -> None: ... +def stop_test_class_outside_fixtures(config, db, testcls) -> None: ... +def get_temp_table_name(cfg, eng, base_name): ... +def set_default_schema_on_connection(cfg, dbapi_connection, schema_name) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi new file mode 100644 index 000000000000..5f0a97c1a49a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/requirements.pyi @@ -0,0 +1,381 @@ +class Requirements: ... + +class SuiteRequirements(Requirements): + @property + def create_table(self): ... + @property + def drop_table(self): ... + @property + def table_ddl_if_exists(self): ... + @property + def index_ddl_if_exists(self): ... + @property + def foreign_keys(self): ... + @property + def table_value_constructor(self): ... + @property + def standard_cursor_sql(self): ... + @property + def on_update_cascade(self): ... + @property + def non_updating_cascade(self): ... + @property + def deferrable_fks(self): ... + @property + def on_update_or_deferrable_fks(self): ... + @property + def queue_pool(self): ... + @property + def self_referential_foreign_keys(self): ... + @property + def foreign_key_ddl(self): ... + @property + def named_constraints(self): ... + @property + def implicitly_named_constraints(self): ... + @property + def subqueries(self): ... + @property + def offset(self): ... + @property + def bound_limit_offset(self): ... + @property + def sql_expression_limit_offset(self): ... + @property + def parens_in_union_contained_select_w_limit_offset(self): ... + @property + def parens_in_union_contained_select_wo_limit_offset(self): ... + @property + def boolean_col_expressions(self): ... + @property + def nullable_booleans(self): ... + @property + def nullsordering(self): ... + @property + def standalone_binds(self): ... + @property + def standalone_null_binds_whereclause(self): ... + @property + def intersect(self): ... + @property + def except_(self): ... + @property + def window_functions(self): ... + @property + def ctes(self): ... + @property + def ctes_with_update_delete(self): ... + @property + def ctes_on_dml(self): ... + @property + def autoincrement_insert(self): ... + @property + def fetch_rows_post_commit(self): ... + @property + def group_by_complex_expression(self): ... + @property + def sane_rowcount(self): ... + @property + def sane_multi_rowcount(self): ... + @property + def sane_rowcount_w_returning(self): ... + @property + def empty_inserts(self): ... + @property + def empty_inserts_executemany(self): ... + @property + def insert_from_select(self): ... + @property + def full_returning(self): ... + @property + def insert_executemany_returning(self): ... + @property + def returning(self): ... + @property + def tuple_in(self): ... + @property + def tuple_in_w_empty(self): ... + @property + def duplicate_names_in_cursor_description(self): ... + @property + def denormalized_names(self): ... + @property + def multivalues_inserts(self): ... + @property + def implements_get_lastrowid(self): ... + @property + def emulated_lastrowid(self): ... + @property + def emulated_lastrowid_even_with_sequences(self): ... + @property + def dbapi_lastrowid(self): ... + @property + def views(self): ... + @property + def schemas(self): ... + @property + def cross_schema_fk_reflection(self): ... + @property + def foreign_key_constraint_name_reflection(self): ... + @property + def implicit_default_schema(self): ... + @property + def default_schema_name_switch(self): ... + @property + def server_side_cursors(self): ... + @property + def sequences(self): ... + @property + def no_sequences(self): ... + @property + def sequences_optional(self): ... + @property + def supports_lastrowid(self): ... + @property + def no_lastrowid_support(self): ... + @property + def reflects_pk_names(self): ... + @property + def table_reflection(self): ... + @property + def reflect_tables_no_columns(self): ... + @property + def comment_reflection(self): ... + @property + def view_column_reflection(self): ... + @property + def view_reflection(self): ... + @property + def schema_reflection(self): ... + @property + def primary_key_constraint_reflection(self): ... + @property + def foreign_key_constraint_reflection(self): ... + @property + def foreign_key_constraint_option_reflection_ondelete(self): ... + @property + def fk_constraint_option_reflection_ondelete_restrict(self): ... + @property + def fk_constraint_option_reflection_ondelete_noaction(self): ... + @property + def foreign_key_constraint_option_reflection_onupdate(self): ... + @property + def fk_constraint_option_reflection_onupdate_restrict(self): ... + @property + def temp_table_reflection(self): ... + @property + def temp_table_reflect_indexes(self): ... + @property + def temp_table_names(self): ... + @property + def temporary_tables(self): ... + @property + def temporary_views(self): ... + @property + def index_reflection(self): ... + @property + def index_reflects_included_columns(self): ... + @property + def indexes_with_ascdesc(self): ... + @property + def indexes_with_expressions(self): ... + @property + def unique_constraint_reflection(self): ... + @property + def check_constraint_reflection(self): ... + @property + def duplicate_key_raises_integrity_error(self): ... + @property + def unbounded_varchar(self): ... + @property + def unicode_data(self): ... + @property + def unicode_ddl(self): ... + @property + def symbol_names_w_double_quote(self): ... + @property + def datetime_literals(self): ... + @property + def datetime(self): ... + @property + def datetime_microseconds(self): ... + @property + def timestamp_microseconds(self): ... + @property + def datetime_historic(self): ... + @property + def date(self): ... + @property + def date_coerces_from_datetime(self): ... + @property + def date_historic(self): ... + @property + def time(self): ... + @property + def time_microseconds(self): ... + @property + def binary_comparisons(self): ... + @property + def binary_literals(self): ... + @property + def autocommit(self): ... + @property + def isolation_level(self): ... + def get_isolation_levels(self, config) -> None: ... + @property + def json_type(self): ... + @property + def json_array_indexes(self): ... + @property + def json_index_supplementary_unicode_element(self): ... + @property + def legacy_unconditional_json_extract(self): ... + @property + def precision_numerics_general(self): ... + @property + def precision_numerics_enotation_small(self): ... + @property + def precision_numerics_enotation_large(self): ... + @property + def precision_numerics_many_significant_digits(self): ... + @property + def cast_precision_numerics_many_significant_digits(self): ... + @property + def implicit_decimal_binds(self): ... + @property + def nested_aggregates(self): ... + @property + def recursive_fk_cascade(self): ... + @property + def precision_numerics_retains_significant_digits(self): ... + @property + def infinity_floats(self): ... + @property + def precision_generic_float_type(self): ... + @property + def floats_to_four_decimals(self): ... + @property + def fetch_null_from_numeric(self): ... + @property + def text_type(self): ... + @property + def empty_strings_varchar(self): ... + @property + def empty_strings_text(self): ... + @property + def expressions_against_unbounded_text(self): ... + @property + def selectone(self): ... + @property + def savepoints(self): ... + @property + def two_phase_transactions(self): ... + @property + def update_from(self): ... + @property + def delete_from(self): ... + @property + def update_where_target_in_subquery(self): ... + @property + def mod_operator_as_percent_sign(self): ... + @property + def percent_schema_names(self): ... + @property + def order_by_col_from_union(self): ... + @property + def order_by_label_with_expression(self): ... + @property + def order_by_collation(self): ... + def get_order_by_collation(self, config) -> None: ... + @property + def unicode_connections(self): ... + @property + def graceful_disconnects(self): ... + @property + def independent_connections(self): ... + @property + def skip_mysql_on_windows(self): ... + @property + def ad_hoc_engines(self): ... + @property + def no_windows(self): ... + @property + def timing_intensive(self): ... + @property + def memory_intensive(self): ... + @property + def threading_with_mock(self): ... + @property + def sqlalchemy2_stubs(self): ... + @property + def python2(self): ... + @property + def python3(self): ... + @property + def pep520(self): ... + @property + def insert_order_dicts(self): ... + @property + def python36(self): ... + @property + def python37(self): ... + @property + def dataclasses(self): ... + @property + def python38(self): ... + @property + def cpython(self): ... + @property + def patch_library(self): ... + @property + def non_broken_pickle(self): ... + @property + def predictable_gc(self): ... + @property + def no_coverage(self): ... + @property + def sqlite(self): ... + @property + def cextensions(self): ... + @property + def async_dialect(self): ... + @property + def greenlet(self): ... + @property + def computed_columns(self): ... + @property + def computed_columns_stored(self): ... + @property + def computed_columns_virtual(self): ... + @property + def computed_columns_default_persisted(self): ... + @property + def computed_columns_reflect_persisted(self): ... + @property + def supports_distinct_on(self): ... + @property + def supports_is_distinct_from(self): ... + @property + def identity_columns(self): ... + @property + def identity_columns_standard(self): ... + @property + def regexp_match(self): ... + @property + def regexp_replace(self): ... + @property + def fetch_first(self): ... + @property + def fetch_percent(self): ... + @property + def fetch_ties(self): ... + @property + def fetch_no_order_by(self): ... + @property + def fetch_offset_with_options(self): ... + @property + def fetch_expression(self): ... + @property + def autoincrement_without_sequence(self): ... + @property + def generic_classes(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi new file mode 100644 index 000000000000..dc8b62196f67 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/schema.pyi @@ -0,0 +1,16 @@ +from typing import Any + +def Table(*args, **kw): ... +def Column(*args, **kw): ... + +class eq_type_affinity: + target: Any + def __init__(self, target) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +class eq_clause_element: + target: Any + def __init__(self, target) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi new file mode 100644 index 000000000000..e716d838a43c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/util.pyi @@ -0,0 +1,38 @@ +from typing import Any + +def non_refcount_gc_collect(*args) -> None: ... # only present on Python implementations with non-refcount gc +def gc_collect(generation: int = ...) -> None: ... +def lazy_gc() -> None: ... +def picklers(): ... +def random_choices(population, k: int = ...): ... +def round_decimal(value, prec): ... + +class RandomSet(set[Any]): + def __iter__(self): ... + def pop(self): ... + def union(self, other): ... + def difference(self, other): ... + def intersection(self, other): ... + def copy(self): ... + +def conforms_partial_ordering(tuples, sorted_elements): ... +def all_partial_orderings(tuples, elements): ... +def function_named(fn, name): ... +def run_as_contextmanager(ctx, fn, *arg, **kw): ... +def rowset(results): ... +def fail(msg) -> None: ... +def provide_metadata(fn, *args, **kw): ... +def flag_combinations(*combinations): ... +def lambda_combinations(lambda_arg_sets, **kw): ... +def resolve_lambda(__fn, **kw): ... +def metadata_fixture(ddl: str = ...): ... +def force_drop_names(*names): ... + +class adict(dict[Any, Any]): + def __getattribute__(self, key): ... + def __call__(self, *keys): ... + get_all: Any + +def drop_all_tables_from_metadata(metadata, engine_or_connection) -> None: ... +def drop_all_tables(engine, inspector, schema: Any | None = ..., include_names: Any | None = ...) -> None: ... +def teardown_events(event_cls): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi new file mode 100644 index 000000000000..9aa4255ceaf6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/testing/warnings.pyi @@ -0,0 +1,7 @@ +from .. import exc as sa_exc + +class SATestSuiteWarning(sa_exc.SAWarning): ... + +def warn_test_suite(message) -> None: ... +def setup_filters() -> None: ... +def assert_warnings(fn, warning_msgs, regex: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/types.pyi new file mode 100644 index 000000000000..ee455334d03a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/types.pyi @@ -0,0 +1,106 @@ +from .sql.sqltypes import ( + ARRAY as ARRAY, + BIGINT as BIGINT, + BINARY as BINARY, + BLOB as BLOB, + BOOLEAN as BOOLEAN, + CHAR as CHAR, + CLOB as CLOB, + DATE as DATE, + DATETIME as DATETIME, + DECIMAL as DECIMAL, + FLOAT as FLOAT, + INT as INT, + INTEGER as INTEGER, + JSON as JSON, + NCHAR as NCHAR, + NUMERIC as NUMERIC, + NVARCHAR as NVARCHAR, + REAL as REAL, + SMALLINT as SMALLINT, + TEXT as TEXT, + TIME as TIME, + TIMESTAMP as TIMESTAMP, + VARBINARY as VARBINARY, + VARCHAR as VARCHAR, + BigInteger as BigInteger, + Boolean as Boolean, + Concatenable as Concatenable, + Date as Date, + DateTime as DateTime, + Enum as Enum, + Float as Float, + Indexable as Indexable, + Integer as Integer, + Interval as Interval, + LargeBinary as LargeBinary, + MatchType as MatchType, + NullType as NullType, + Numeric as Numeric, + PickleType as PickleType, + SmallInteger as SmallInteger, + String as String, + Text as Text, + Time as Time, + TupleType as TupleType, + Unicode as Unicode, + UnicodeText as UnicodeText, + _Binary as _Binary, +) +from .sql.type_api import ( + ExternalType as ExternalType, + TypeDecorator as TypeDecorator, + TypeEngine as TypeEngine, + UserDefinedType as UserDefinedType, +) + +__all__ = [ + "TypeEngine", + "TypeDecorator", + "UserDefinedType", + "ExternalType", + "INT", + "CHAR", + "VARCHAR", + "NCHAR", + "NVARCHAR", + "TEXT", + "Text", + "FLOAT", + "NUMERIC", + "REAL", + "DECIMAL", + "TIMESTAMP", + "DATETIME", + "CLOB", + "BLOB", + "BINARY", + "VARBINARY", + "BOOLEAN", + "BIGINT", + "SMALLINT", + "INTEGER", + "DATE", + "TIME", + "TupleType", + "String", + "Integer", + "SmallInteger", + "BigInteger", + "Numeric", + "Float", + "DateTime", + "Date", + "Time", + "LargeBinary", + "Boolean", + "Unicode", + "Concatenable", + "UnicodeText", + "PickleType", + "Interval", + "Enum", + "Indexable", + "ARRAY", + "JSON", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi new file mode 100644 index 000000000000..d569a665dcd9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/__init__.pyi @@ -0,0 +1,174 @@ +from collections import defaultdict as defaultdict +from contextlib import contextmanager as contextmanager +from functools import partial as partial, update_wrapper as update_wrapper + +from ._collections import ( + EMPTY_DICT as EMPTY_DICT, + EMPTY_SET as EMPTY_SET, + FacadeDict as FacadeDict, + IdentitySet as IdentitySet, + ImmutableContainer as ImmutableContainer, + ImmutableProperties as ImmutableProperties, + LRUCache as LRUCache, + OrderedDict as OrderedDict, + OrderedIdentitySet as OrderedIdentitySet, + OrderedProperties as OrderedProperties, + OrderedSet as OrderedSet, + PopulateDict as PopulateDict, + Properties as Properties, + ScopedRegistry as ScopedRegistry, + ThreadLocalRegistry as ThreadLocalRegistry, + UniqueAppender as UniqueAppender, + WeakPopulateDict as WeakPopulateDict, + WeakSequence as WeakSequence, + coerce_generator_arg as coerce_generator_arg, + coerce_to_immutabledict as coerce_to_immutabledict, + collections_abc as collections_abc, + column_dict as column_dict, + column_set as column_set, + flatten_iterator as flatten_iterator, + has_dupes as has_dupes, + has_intersection as has_intersection, + immutabledict as immutabledict, + ordered_column_set as ordered_column_set, + sort_dictionary as sort_dictionary, + to_column_set as to_column_set, + to_list as to_list, + to_set as to_set, + unique_list as unique_list, + update_copy as update_copy, +) +from ._preloaded import preload_module as preload_module, preloaded as preloaded +from .compat import ( + ABC as ABC, + TYPE_CHECKING as TYPE_CHECKING, + StringIO as StringIO, + arm as arm, + b as b, + b64decode as b64decode, + b64encode as b64encode, + binary_type as binary_type, + binary_types as binary_types, + byte_buffer as byte_buffer, + callable as callable, + cmp as cmp, + cpython as cpython, + dataclass_fields as dataclass_fields, + decode_backslashreplace as decode_backslashreplace, + dottedgetter as dottedgetter, + has_refcount_gc as has_refcount_gc, + inspect_getfullargspec as inspect_getfullargspec, + int_types as int_types, + iterbytes as iterbytes, + itertools_filter as itertools_filter, + itertools_filterfalse as itertools_filterfalse, + local_dataclass_fields as local_dataclass_fields, + namedtuple as namedtuple, + next as next, + nullcontext as nullcontext, + osx as osx, + parse_qsl as parse_qsl, + perf_counter as perf_counter, + pickle as pickle, + print_ as print_, + py2k as py2k, + py3k as py3k, + py37 as py37, + py38 as py38, + py39 as py39, + pypy as pypy, + quote_plus as quote_plus, + raise_ as raise_, + raise_from_cause as raise_from_cause, + reduce as reduce, + reraise as reraise, + string_types as string_types, + text_type as text_type, + threading as threading, + timezone as timezone, + u as u, + ue as ue, + unquote as unquote, + unquote_plus as unquote_plus, + win32 as win32, + with_metaclass as with_metaclass, + zip_longest as zip_longest, +) +from .concurrency import ( + asyncio as asyncio, + await_fallback as await_fallback, + await_only as await_only, + greenlet_spawn as greenlet_spawn, + is_exit_exception as is_exit_exception, +) +from .deprecations import ( + SQLALCHEMY_WARN_20 as SQLALCHEMY_WARN_20, + deprecated as deprecated, + deprecated_20 as deprecated_20, + deprecated_20_cls as deprecated_20_cls, + deprecated_cls as deprecated_cls, + deprecated_params as deprecated_params, + inject_docstring_text as inject_docstring_text, + moved_20 as moved_20, + warn_deprecated as warn_deprecated, + warn_deprecated_20 as warn_deprecated_20, +) +from .langhelpers import ( + EnsureKWArgType as EnsureKWArgType, + HasMemoized as HasMemoized, + MemoizedSlots as MemoizedSlots, + NoneType as NoneType, + PluginLoader as PluginLoader, + add_parameter_text as add_parameter_text, + as_interface as as_interface, + asbool as asbool, + asint as asint, + assert_arg_type as assert_arg_type, + attrsetter as attrsetter, + bool_or_str as bool_or_str, + chop_traceback as chop_traceback, + class_hierarchy as class_hierarchy, + classproperty as classproperty, + clsname_as_plain_name as clsname_as_plain_name, + coerce_kw_type as coerce_kw_type, + constructor_copy as constructor_copy, + constructor_key as constructor_key, + counter as counter, + create_proxy_methods as create_proxy_methods, + decode_slice as decode_slice, + decorator as decorator, + dictlike_iteritems as dictlike_iteritems, + duck_type_collection as duck_type_collection, + ellipses_string as ellipses_string, + format_argspec_init as format_argspec_init, + format_argspec_plus as format_argspec_plus, + generic_repr as generic_repr, + get_callable_argspec as get_callable_argspec, + get_cls_kwargs as get_cls_kwargs, + get_func_kwargs as get_func_kwargs, + getargspec_init as getargspec_init, + has_compiled_ext as has_compiled_ext, + hybridmethod as hybridmethod, + hybridproperty as hybridproperty, + iterate_attributes as iterate_attributes, + map_bits as map_bits, + md5_hex as md5_hex, + memoized_instancemethod as memoized_instancemethod, + memoized_property as memoized_property, + method_is_overridden as method_is_overridden, + methods_equivalent as methods_equivalent, + monkeypatch_proxied_specials as monkeypatch_proxied_specials, + only_once as only_once, + portable_instancemethod as portable_instancemethod, + quoted_token_parser as quoted_token_parser, + safe_reraise as safe_reraise, + set_creation_order as set_creation_order, + string_or_unprintable as string_or_unprintable, + symbol as symbol, + unbound_method_to_callable as unbound_method_to_callable, + walk_subclasses as walk_subclasses, + warn as warn, + warn_exception as warn_exception, + warn_limited as warn_limited, + wrap_callable as wrap_callable, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi new file mode 100644 index 000000000000..e764687d1ec9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_collections.pyi @@ -0,0 +1,231 @@ +import collections.abc +import sys +from _typeshed import Self, SupportsKeysAndGetItem +from collections.abc import Callable, Iterable, Iterator, Mapping +from typing import Any, Generic, NoReturn, TypeVar, overload + +from ..cimmutabledict import immutabledict as immutabledict +from ..sql.elements import ColumnElement + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T = TypeVar("_T") + +collections_abc = collections.abc + +EMPTY_SET: frozenset[Any] + +class ImmutableContainer: + def __delitem__(self, *arg: object, **kw: object) -> NoReturn: ... + def __setitem__(self, *arg: object, **kw: object) -> NoReturn: ... + def __setattr__(self, *arg: object, **kw: object) -> NoReturn: ... + +@overload +def coerce_to_immutabledict(d: None) -> immutabledict[Any, Any]: ... +@overload +def coerce_to_immutabledict(d: Mapping[_KT, _VT]) -> immutabledict[_KT, _VT]: ... + +EMPTY_DICT: immutabledict[Any, Any] + +class FacadeDict(ImmutableContainer, dict[Any, Any]): + clear: Any + pop: Any + popitem: Any + setdefault: Any + update: Any + def __new__(cls, *args): ... + def copy(self) -> None: ... # type: ignore[override] + def __reduce__(self): ... + +class Properties(Generic[_T]): + def __init__(self, data: dict[str, _T]) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + def __dir__(self) -> list[str]: ... + def __add__(self, other: Iterable[_S]) -> list[_S | _T]: ... + def __setitem__(self, key: str, obj: _T) -> None: ... + def __getitem__(self, key: str) -> _T: ... + def __delitem__(self, key: str) -> None: ... + def __setattr__(self, key: str, obj: _T) -> None: ... + def __getattr__(self, key: str) -> _T: ... + def __contains__(self, key: str) -> bool: ... + def as_immutable(self) -> ImmutableProperties[_T]: ... + def update(self, value: Iterable[tuple[str, _T]] | SupportsKeysAndGetItem[str, _T]) -> None: ... + @overload + def get(self, key: str) -> _T | None: ... + @overload + def get(self, key: str, default: _S) -> _T | _S: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_T]: ... + def items(self) -> list[tuple[str, _T]]: ... + def has_key(self, key: str) -> bool: ... + def clear(self) -> None: ... + +class OrderedProperties(Properties[_T], Generic[_T]): + def __init__(self) -> None: ... + +class ImmutableProperties(ImmutableContainer, Properties[_T], Generic[_T]): ... + +if sys.version_info >= (3, 7): + OrderedDict = dict +else: + class OrderedDict(dict[Any, Any]): + def __reduce__(self): ... + def __init__(self, ____sequence: Any | None = ..., **kwargs) -> None: ... + def clear(self) -> None: ... + def copy(self): ... + def __copy__(self): ... + def update(self, ____sequence: Any | None = ..., **kwargs) -> None: ... + def setdefault(self, key, value): ... + def __iter__(self): ... + def keys(self): ... + def values(self): ... + def items(self): ... + def __setitem__(self, key, obj) -> None: ... + def __delitem__(self, key) -> None: ... + def pop(self, key, *default): ... + def popitem(self): ... + +def sort_dictionary(d, key: Any | None = ...): ... + +class OrderedSet(set[_T], Generic[_T]): + def __init__(self, d: Iterable[_T] | None = ...) -> None: ... + def add(self, element: _T) -> None: ... + def remove(self, element: _T) -> None: ... + def insert(self, pos: int, element: _T) -> None: ... + def discard(self, element: _T) -> None: ... + def clear(self) -> None: ... + def __getitem__(self, key: int) -> _T: ... + def __iter__(self) -> Iterator[_T]: ... + def __add__(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... + def update(self: Self, iterable: Iterable[_T]) -> Self: ... # type: ignore[override] + __ior__ = update # type: ignore[assignment] + def union(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... # type: ignore[override] + __or__ = union # type: ignore[assignment] + def intersection(self: Self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __and__ = intersection # type: ignore[assignment] + def symmetric_difference(self, other: Iterable[_S]) -> OrderedSet[_S | _T]: ... + __xor__ = symmetric_difference # type: ignore[assignment] + def difference(self: Self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __sub__ = difference # type: ignore[assignment] + def intersection_update(self: Self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __iand__ = intersection_update # type: ignore[assignment] + def symmetric_difference_update(self: Self, other: Iterable[_T]) -> Self: ... # type: ignore[override] + __ixor__ = symmetric_difference_update # type: ignore[assignment] + def difference_update(self: Self, other: Iterable[Any]) -> Self: ... # type: ignore[override] + __isub__ = difference_update # type: ignore[assignment] + +class IdentitySet: + def __init__(self, iterable: Any | None = ...) -> None: ... + def add(self, value) -> None: ... + def __contains__(self, value): ... + def remove(self, value) -> None: ... + def discard(self, value) -> None: ... + def pop(self): ... + def clear(self) -> None: ... + def __cmp__(self, other) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def issubset(self, iterable): ... + def __le__(self, other): ... + def __lt__(self, other): ... + def issuperset(self, iterable): ... + def __ge__(self, other): ... + def __gt__(self, other): ... + def union(self, iterable): ... + def __or__(self, other): ... + def update(self, iterable) -> None: ... + def __ior__(self, other): ... + def difference(self, iterable): ... + def __sub__(self, other): ... + def difference_update(self, iterable) -> None: ... + def __isub__(self, other): ... + def intersection(self, iterable): ... + def __and__(self, other): ... + def intersection_update(self, iterable) -> None: ... + def __iand__(self, other): ... + def symmetric_difference(self, iterable): ... + def __xor__(self, other): ... + def symmetric_difference_update(self, iterable) -> None: ... + def __ixor__(self, other): ... + def copy(self): ... + __copy__: Any + def __len__(self): ... + def __iter__(self): ... + def __hash__(self): ... + +class WeakSequence: + def __init__(self, __elements=...) -> None: ... + def append(self, item) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __getitem__(self, index): ... + +class OrderedIdentitySet(IdentitySet): + def __init__(self, iterable: Any | None = ...) -> None: ... + +class PopulateDict(dict[Any, Any]): + creator: Any + def __init__(self, creator) -> None: ... + def __missing__(self, key): ... + +class WeakPopulateDict(dict[Any, Any]): + creator: Any + weakself: Any + def __init__(self, creator_method) -> None: ... + def __missing__(self, key): ... + +column_set = set +column_dict = dict +ordered_column_set = OrderedSet[ColumnElement[Any]] + +def unique_list(seq: Iterable[_T], hashfunc: Callable[[_T], Any] | None = ...) -> list[_T]: ... + +class UniqueAppender: + data: Any + def __init__(self, data, via: Any | None = ...) -> None: ... + def append(self, item) -> None: ... + def __iter__(self): ... + +def coerce_generator_arg(arg): ... +def to_list(x, default: Any | None = ...): ... +def has_intersection(set_, iterable): ... +def to_set(x): ... +def to_column_set(x): ... +def update_copy(d, _new: Any | None = ..., **kw): ... +def flatten_iterator(x) -> None: ... + +class LRUCache(dict[Any, Any]): + capacity: Any + threshold: Any + size_alert: Any + def __init__(self, capacity: int = ..., threshold: float = ..., size_alert: Any | None = ...) -> None: ... + def get(self, key, default: Any | None = ...): ... + def __getitem__(self, key): ... + def values(self): ... + def setdefault(self, key, value): ... + def __setitem__(self, key, value) -> None: ... + @property + def size_threshold(self): ... + +class ScopedRegistry: + createfunc: Any + scopefunc: Any + registry: Any + def __init__(self, createfunc, scopefunc) -> None: ... + def __call__(self): ... + def has(self): ... + def set(self, obj) -> None: ... + def clear(self) -> None: ... + +class ThreadLocalRegistry(ScopedRegistry): + createfunc: Any + registry: Any + def __init__(self, createfunc) -> None: ... + def __call__(self): ... + def has(self): ... + def set(self, obj) -> None: ... + def clear(self) -> None: ... + +def has_dupes(sequence, target): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi new file mode 100644 index 000000000000..d23165bf6456 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_compat_py3k.pyi @@ -0,0 +1,10 @@ +from typing import Any + +class _AsyncGeneratorContextManager: + gen: Any + __doc__: Any + def __init__(self, func, args, kwds) -> None: ... + async def __aenter__(self): ... + async def __aexit__(self, typ, value, traceback): ... + +def asynccontextmanager(func): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi new file mode 100644 index 000000000000..b900f1a846b2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_concurrency_py3k.pyi @@ -0,0 +1,26 @@ +import asyncio as asyncio +from collections.abc import Callable, Coroutine +from typing import Any + +from .langhelpers import memoized_property + +_greenlet = Any # actually greenlet.greenlet + +def is_exit_exception(e): ... + +class _AsyncIoGreenlet(_greenlet): + driver: Any + gr_context: Any + def __init__(self, fn, driver) -> None: ... + +def await_only(awaitable: Coroutine[Any, Any, Any]) -> Any: ... +def await_fallback(awaitable: Coroutine[Any, Any, Any]) -> Any: ... +async def greenlet_spawn(fn: Callable[..., Any], *args, _require_await: bool = ..., **kwargs) -> Any: ... + +class AsyncAdaptedLock: + @memoized_property + def mutex(self): ... + def __enter__(self): ... + def __exit__(self, *arg, **kw) -> None: ... + +def get_event_loop(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi new file mode 100644 index 000000000000..eaabad39009d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/_preloaded.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class _ModuleRegistry: + module_registry: Any + prefix: Any + def __init__(self, prefix: str = ...) -> None: ... + def preload_module(self, *deps): ... + def import_prefix(self, path) -> None: ... + +preloaded: Any +preload_module: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi new file mode 100644 index 000000000000..6b05ce3901ea --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/compat.pyi @@ -0,0 +1,104 @@ +import builtins +import collections +import contextlib +import itertools +import operator +import pickle as pickle +import threading as threading +from abc import ABC as ABC +from datetime import timezone as timezone +from functools import reduce as reduce +from io import BytesIO as BytesIO, StringIO as StringIO +from itertools import zip_longest as zip_longest +from time import perf_counter as perf_counter +from typing import TYPE_CHECKING as TYPE_CHECKING, Any, NamedTuple +from urllib.parse import ( + parse_qsl as parse_qsl, + quote as quote, + quote_plus as quote_plus, + unquote as unquote, + unquote_plus as unquote_plus, +) + +byte_buffer = BytesIO + +py39: Any +py38: Any +py37: Any +py3k: Any +py2k: Any +pypy: Any +cpython: Any +win32: Any +osx: Any +arm: Any +has_refcount_gc: Any +contextmanager = contextlib.contextmanager +dottedgetter = operator.attrgetter +namedtuple = collections.namedtuple # noqa Y024 +next = builtins.next + +class FullArgSpec(NamedTuple): + args: Any + varargs: Any + varkw: Any + defaults: Any + kwonlyargs: Any + kwonlydefaults: Any + annotations: Any + +class nullcontext: + enter_result: Any + def __init__(self, enter_result: Any | None = ...) -> None: ... + def __enter__(self): ... + def __exit__(self, *excinfo) -> None: ... + +def inspect_getfullargspec(func): ... +def importlib_metadata_get(group): ... + +string_types: tuple[type, ...] +binary_types: tuple[type, ...] +binary_type = bytes +text_type = str +int_types: tuple[type, ...] +iterbytes = iter +long_type = int +itertools_filterfalse = itertools.filterfalse +itertools_filter = filter +itertools_imap = map +exec_: Any +import_: Any +print_: Any + +def b(s): ... +def b64decode(x): ... +def b64encode(x): ... +def decode_backslashreplace(text, encoding): ... +def cmp(a, b): ... +def raise_(exception, with_traceback: Any | None = ..., replace_context: Any | None = ..., from_: bool = ...) -> None: ... +def u(s): ... +def ue(s): ... + +callable = builtins.callable + +def safe_bytestring(text): ... +def inspect_formatargspec( + args, + varargs: Any | None = ..., + varkw: Any | None = ..., + defaults: Any | None = ..., + kwonlyargs=..., + kwonlydefaults=..., + annotations=..., + formatarg=..., + formatvarargs=..., + formatvarkw=..., + formatvalue=..., + formatreturns=..., + formatannotation=..., +): ... +def dataclass_fields(cls): ... +def local_dataclass_fields(cls): ... +def raise_from_cause(exception, exc_info: Any | None = ...) -> None: ... +def reraise(tp, value, tb: Any | None = ..., cause: Any | None = ...) -> None: ... +def with_metaclass(meta, *bases, **kw): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi new file mode 100644 index 000000000000..40fbc57509c5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/concurrency.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ._compat_py3k import asynccontextmanager as asynccontextmanager +from ._concurrency_py3k import ( + AsyncAdaptedLock as AsyncAdaptedLock, + await_fallback as await_fallback, + await_only as await_only, + greenlet_spawn as greenlet_spawn, + is_exit_exception as is_exit_exception, +) + +have_greenlet: bool +asyncio: Any | None diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi new file mode 100644 index 000000000000..73f7d1722bae --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/deprecations.pyi @@ -0,0 +1,25 @@ +from typing import Any + +from .langhelpers import ( + decorator as decorator, + inject_docstring_text as inject_docstring_text, + inject_param_text as inject_param_text, +) + +SQLALCHEMY_WARN_20: bool + +def warn_deprecated(msg, version, stacklevel: int = ..., code: Any | None = ...) -> None: ... +def warn_deprecated_limited(msg, args, version, stacklevel: int = ..., code: Any | None = ...) -> None: ... +def warn_deprecated_20(msg, stacklevel: int = ..., code: Any | None = ...) -> None: ... +def deprecated_cls(version, message, constructor: str = ...): ... +def deprecated_20_cls(clsname, alternative: Any | None = ..., constructor: str = ..., becomes_legacy: bool = ...): ... +def deprecated( + version, + message: Any | None = ..., + add_deprecation_to_docstring: bool = ..., + warning: Any | None = ..., + enable_warnings: bool = ..., +): ... +def moved_20(message, **kw): ... +def deprecated_20(api_name, alternative: Any | None = ..., becomes_legacy: bool = ..., **kw): ... +def deprecated_params(**specs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi new file mode 100644 index 000000000000..8a01199f938c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/langhelpers.pyi @@ -0,0 +1,163 @@ +from _typeshed import Self +from collections.abc import Callable +from typing import Any, Generic, TypeVar, overload + +from . import compat + +_R = TypeVar("_R") + +def md5_hex(x): ... + +class safe_reraise: + warn_only: Any + def __init__(self, warn_only: bool = ...) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, type_, value, traceback) -> None: ... + +def walk_subclasses(cls) -> None: ... +def string_or_unprintable(element): ... +def clsname_as_plain_name(cls): ... +def method_is_overridden(instance_or_cls, against_method): ... +def decode_slice(slc): ... +def map_bits(fn, n) -> None: ... +def decorator(target): ... +def public_factory(target, location, class_location: Any | None = ...): ... + +class PluginLoader: + group: Any + impls: Any + auto_fn: Any + def __init__(self, group, auto_fn: Any | None = ...) -> None: ... + def clear(self) -> None: ... + def load(self, name): ... + def register(self, name, modulepath, objname): ... + +def get_cls_kwargs(cls, _set: Any | None = ...): ... +def get_func_kwargs(func): ... +def get_callable_argspec(fn, no_self: bool = ..., _is_init: bool = ...): ... +def format_argspec_plus(fn, grouped: bool = ...): ... +def format_argspec_init(method, grouped: bool = ...): ... +def create_proxy_methods( + target_cls, target_cls_sphinx_name, proxy_cls_sphinx_name, classmethods=..., methods=..., attributes=... +): ... +def getargspec_init(method): ... +def unbound_method_to_callable(func_or_cls): ... +def generic_repr(obj, additional_kw=..., to_inspect: Any | None = ..., omit_kwarg=...): ... + +class portable_instancemethod: + target: Any + name: Any + kwargs: Any + def __init__(self, meth, kwargs=...) -> None: ... + def __call__(self, *arg, **kw): ... + +def class_hierarchy(cls): ... +def iterate_attributes(cls) -> None: ... +def monkeypatch_proxied_specials( + into_cls, from_cls, skip: Any | None = ..., only: Any | None = ..., name: str = ..., from_instance: Any | None = ... +) -> None: ... +def methods_equivalent(meth1, meth2): ... +def as_interface(obj, cls: Any | None = ..., methods: Any | None = ..., required: Any | None = ...): ... + +class memoized_property(Generic[_R]): + fget: Callable[..., _R] + __doc__: str + __name__: str + def __init__(self, fget: Callable[..., _R], doc: str | None = ...) -> None: ... + @overload + def __get__(self: Self, obj: None, cls: object) -> Self: ... + @overload + def __get__(self, obj: object, cls: object) -> _R: ... + @classmethod + def reset(cls, obj: object, name: str) -> None: ... + +def memoized_instancemethod(fn): ... + +class HasMemoized: + class memoized_attribute(Generic[_R]): + fget: Callable[..., _R] + __doc__: str + __name__: str + def __init__(self, fget: Callable[..., _R], doc: str | None = ...) -> None: ... + @overload + def __get__(self: Self, obj: None, cls: object) -> Self: ... + @overload + def __get__(self, obj: object, cls: object) -> _R: ... + + @classmethod + def memoized_instancemethod(cls, fn): ... + +class MemoizedSlots: + def __getattr__(self, key): ... + +def asbool(obj): ... +def bool_or_str(*text): ... +def asint(value): ... +def coerce_kw_type(kw, key, type_, flexi_bool: bool = ..., dest: Any | None = ...) -> None: ... +def constructor_key(obj, cls): ... +def constructor_copy(obj, cls, *args, **kw): ... +def counter(): ... +def duck_type_collection(specimen, default: Any | None = ...): ... +def assert_arg_type(arg, argtype, name): ... +def dictlike_iteritems(dictlike): ... + +class classproperty(property): + __doc__: Any + def __init__(self, fget, *arg, **kw) -> None: ... + def __get__(self, self_, cls): ... + +class hybridproperty(Generic[_R]): + func: Callable[..., _R] + clslevel: Callable[..., _R] + def __init__(self, func: Callable[..., _R]) -> None: ... + @overload + def __get__(self, instance: None, owner: Any) -> _R: ... + @overload + def __get__(self, instance: object, owner: object) -> _R: ... + def classlevel(self: Self, func: Callable[..., _R]) -> Self: ... + +class hybridmethod: + func: Any + clslevel: Any + def __init__(self, func) -> None: ... + def __get__(self, instance, owner): ... + def classlevel(self, func): ... + +class _symbol(int): + def __new__(cls, name, doc: Any | None = ..., canonical: Any | None = ...): ... + def __reduce__(self): ... + +class symbol: + symbols: Any + def __new__(cls, name, doc: Any | None = ..., canonical: Any | None = ...): ... + @classmethod + def parse_user_argument(cls, arg, choices, name, resolve_symbol_names: bool = ...): ... + +def set_creation_order(instance) -> None: ... +def warn_exception(func, *args, **kwargs): ... +def ellipses_string(value, len_: int = ...): ... + +class _hash_limit_string(compat.text_type): + def __new__(cls, value, num, args): ... + def __hash__(self): ... + def __eq__(self, other): ... + +def warn(msg, code: Any | None = ...) -> None: ... +def warn_limited(msg, args) -> None: ... +def only_once(fn, retry_on_exception): ... +def chop_traceback(tb, exclude_prefix=..., exclude_suffix=...): ... + +NoneType: Any + +def attrsetter(attrname): ... + +class EnsureKWArgType(type): + def __init__(cls, clsname, bases, clsdict) -> None: ... + +def wrap_callable(wrapper, fn): ... +def quoted_token_parser(value): ... +def add_parameter_text(params, text): ... +def inject_docstring_text(doctext, injecttext, pos): ... +def inject_param_text(doctext, inject_params): ... +def repr_tuple_names(names): ... +def has_compiled_ext(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi new file mode 100644 index 000000000000..d7b986a37ccb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/queue.pyi @@ -0,0 +1,34 @@ +from typing import Any + +class Empty(Exception): ... +class Full(Exception): ... + +class Queue: + mutex: Any + not_empty: Any + not_full: Any + use_lifo: Any + def __init__(self, maxsize: int = ..., use_lifo: bool = ...) -> None: ... + def qsize(self): ... + def empty(self): ... + def full(self): ... + def put(self, item, block: bool = ..., timeout: Any | None = ...) -> None: ... + def put_nowait(self, item): ... + def get(self, block: bool = ..., timeout: Any | None = ...): ... + def get_nowait(self): ... + +class AsyncAdaptedQueue: + await_: Any + use_lifo: Any + maxsize: Any + def __init__(self, maxsize: int = ..., use_lifo: bool = ...) -> None: ... + def empty(self): ... + def full(self): ... + def qsize(self): ... + def put_nowait(self, item): ... + def put(self, item, block: bool = ..., timeout: Any | None = ...): ... + def get_nowait(self): ... + def get(self, block: bool = ..., timeout: Any | None = ...): ... + +class FallbackAsyncAdaptedQueue(AsyncAdaptedQueue): + await_: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi new file mode 100644 index 000000000000..04428e1ba790 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/SQLAlchemy/sqlalchemy/util/topological.pyi @@ -0,0 +1,3 @@ +def sort_as_subsets(tuples, allitems) -> None: ... +def sort(tuples, allitems, deterministic_order: bool = ...) -> None: ... +def find_cycles(tuples, allitems): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Send2Trash/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Send2Trash/METADATA.toml index 71d785350136..690242a33e3e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/Send2Trash/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/Send2Trash/METADATA.toml @@ -1 +1 @@ -version = "1.8" +version = "1.8.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/METADATA.toml deleted file mode 100644 index 699e012cc811..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/METADATA.toml +++ /dev/null @@ -1,4 +0,0 @@ -version = "1.0" -python2 = true -requires = [] -obsolete_since = "2.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/__init__.pyi deleted file mode 100644 index 8c351ab47609..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/__init__.pyi +++ /dev/null @@ -1,151 +0,0 @@ -from types import ModuleType -from typing import Any - -from werkzeug import ( - _internal, - datastructures, - debug, - exceptions, - formparser, - http, - local, - security, - serving, - test, - testapp, - urls, - useragents, - utils, - wrappers, - wsgi, -) - -class module(ModuleType): - def __getattr__(self, name): ... - def __dir__(self): ... - -__version__: Any - -run_simple = serving.run_simple -test_app = testapp.test_app -UserAgent = useragents.UserAgent -_easteregg = _internal._easteregg -DebuggedApplication = debug.DebuggedApplication -MultiDict = datastructures.MultiDict -CombinedMultiDict = datastructures.CombinedMultiDict -Headers = datastructures.Headers -EnvironHeaders = datastructures.EnvironHeaders -ImmutableList = datastructures.ImmutableList -ImmutableDict = datastructures.ImmutableDict -ImmutableMultiDict = datastructures.ImmutableMultiDict -TypeConversionDict = datastructures.TypeConversionDict -ImmutableTypeConversionDict = datastructures.ImmutableTypeConversionDict -Accept = datastructures.Accept -MIMEAccept = datastructures.MIMEAccept -CharsetAccept = datastructures.CharsetAccept -LanguageAccept = datastructures.LanguageAccept -RequestCacheControl = datastructures.RequestCacheControl -ResponseCacheControl = datastructures.ResponseCacheControl -ETags = datastructures.ETags -HeaderSet = datastructures.HeaderSet -WWWAuthenticate = datastructures.WWWAuthenticate -Authorization = datastructures.Authorization -FileMultiDict = datastructures.FileMultiDict -CallbackDict = datastructures.CallbackDict -FileStorage = datastructures.FileStorage -OrderedMultiDict = datastructures.OrderedMultiDict -ImmutableOrderedMultiDict = datastructures.ImmutableOrderedMultiDict -escape = utils.escape -environ_property = utils.environ_property -append_slash_redirect = utils.append_slash_redirect -redirect = utils.redirect -cached_property = utils.cached_property -import_string = utils.import_string -dump_cookie = http.dump_cookie -parse_cookie = http.parse_cookie -unescape = utils.unescape -format_string = utils.format_string -find_modules = utils.find_modules -header_property = utils.header_property -html = utils.html -xhtml = utils.xhtml -HTMLBuilder = utils.HTMLBuilder -validate_arguments = utils.validate_arguments -ArgumentValidationError = utils.ArgumentValidationError -bind_arguments = utils.bind_arguments -secure_filename = utils.secure_filename -BaseResponse = wrappers.BaseResponse -BaseRequest = wrappers.BaseRequest -Request = wrappers.Request -Response = wrappers.Response -AcceptMixin = wrappers.AcceptMixin -ETagRequestMixin = wrappers.ETagRequestMixin -ETagResponseMixin = wrappers.ETagResponseMixin -ResponseStreamMixin = wrappers.ResponseStreamMixin -CommonResponseDescriptorsMixin = wrappers.CommonResponseDescriptorsMixin -UserAgentMixin = wrappers.UserAgentMixin -AuthorizationMixin = wrappers.AuthorizationMixin -WWWAuthenticateMixin = wrappers.WWWAuthenticateMixin -CommonRequestDescriptorsMixin = wrappers.CommonRequestDescriptorsMixin -Local = local.Local -LocalManager = local.LocalManager -LocalProxy = local.LocalProxy -LocalStack = local.LocalStack -release_local = local.release_local -generate_password_hash = security.generate_password_hash -check_password_hash = security.check_password_hash -Client = test.Client -EnvironBuilder = test.EnvironBuilder -create_environ = test.create_environ -run_wsgi_app = test.run_wsgi_app -get_current_url = wsgi.get_current_url -get_host = wsgi.get_host -pop_path_info = wsgi.pop_path_info -peek_path_info = wsgi.peek_path_info -SharedDataMiddleware = wsgi.SharedDataMiddleware -DispatcherMiddleware = wsgi.DispatcherMiddleware -ClosingIterator = wsgi.ClosingIterator -FileWrapper = wsgi.FileWrapper -make_line_iter = wsgi.make_line_iter -LimitedStream = wsgi.LimitedStream -responder = wsgi.responder -wrap_file = wsgi.wrap_file -extract_path_info = wsgi.extract_path_info -parse_etags = http.parse_etags -parse_date = http.parse_date -http_date = http.http_date -cookie_date = http.cookie_date -parse_cache_control_header = http.parse_cache_control_header -is_resource_modified = http.is_resource_modified -parse_accept_header = http.parse_accept_header -parse_set_header = http.parse_set_header -quote_etag = http.quote_etag -unquote_etag = http.unquote_etag -generate_etag = http.generate_etag -dump_header = http.dump_header -parse_list_header = http.parse_list_header -parse_dict_header = http.parse_dict_header -parse_authorization_header = http.parse_authorization_header -parse_www_authenticate_header = http.parse_www_authenticate_header -remove_entity_headers = http.remove_entity_headers -is_entity_header = http.is_entity_header -remove_hop_by_hop_headers = http.remove_hop_by_hop_headers -parse_options_header = http.parse_options_header -dump_options_header = http.dump_options_header -is_hop_by_hop_header = http.is_hop_by_hop_header -unquote_header_value = http.unquote_header_value -quote_header_value = http.quote_header_value -HTTP_STATUS_CODES = http.HTTP_STATUS_CODES -url_decode = urls.url_decode -url_encode = urls.url_encode -url_quote = urls.url_quote -url_quote_plus = urls.url_quote_plus -url_unquote = urls.url_unquote -url_unquote_plus = urls.url_unquote_plus -url_fix = urls.url_fix -Href = urls.Href -iri_to_uri = urls.iri_to_uri -uri_to_iri = urls.uri_to_iri -parse_form_data = formparser.parse_form_data -abort = exceptions.Aborter -Aborter = exceptions.Aborter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_compat.pyi deleted file mode 100644 index 788f16f7737c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_compat.pyi +++ /dev/null @@ -1,53 +0,0 @@ -import sys -from typing import Any, Text - -if sys.version_info >= (3,): - from io import BytesIO as BytesIO, StringIO as StringIO - - NativeStringIO = StringIO -else: - import cStringIO - from StringIO import StringIO as StringIO - - BytesIO = cStringIO.StringIO - NativeStringIO = BytesIO - -PY2: Any -WIN: Any -unichr: Any -text_type: Any -string_types: Any -integer_types: Any -iterkeys: Any -itervalues: Any -iteritems: Any -iterlists: Any -iterlistvalues: Any -int_to_byte: Any -iter_bytes: Any - -def fix_tuple_repr(obj): ... -def implements_iterator(cls): ... -def implements_to_string(cls): ... -def native_string_result(func): ... -def implements_bool(cls): ... - -range_type: Any - -def make_literal_wrapper(reference): ... -def normalize_string_tuple(tup): ... -def try_coerce_native(s): ... - -wsgi_get_bytes: Any - -def wsgi_decoding_dance(s, charset: Text = ..., errors: Text = ...): ... -def wsgi_encoding_dance(s, charset: Text = ..., errors: Text = ...): ... -def to_bytes(x, charset: Text = ..., errors: Text = ...): ... -def to_native(x, charset: Text = ..., errors: Text = ...): ... -def reraise(tp, value, tb: Any | None = ...): ... - -imap: Any -izip: Any -ifilter: Any - -def to_unicode(x, charset: Text = ..., errors: Text = ..., allow_none_charset: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_internal.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_internal.pyi deleted file mode 100644 index 2deec4fd3db5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_internal.pyi +++ /dev/null @@ -1,26 +0,0 @@ -from typing import Any - -class _Missing: - def __reduce__(self): ... - -class _DictAccessorProperty: - read_only: Any - name: Any - default: Any - load_func: Any - dump_func: Any - __doc__: Any - def __init__( - self, - name, - default: Any | None = ..., - load_func: Any | None = ..., - dump_func: Any | None = ..., - read_only: Any | None = ..., - doc: Any | None = ..., - ): ... - def __get__(self, obj, type: Any | None = ...): ... - def __set__(self, obj, value): ... - def __delete__(self, obj): ... - -def _easteregg(app: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_reloader.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_reloader.pyi deleted file mode 100644 index 5e9f95866ae3..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/_reloader.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any - -class ReloaderLoop: - name: Any - extra_files: Any - interval: float - def __init__(self, extra_files: Any | None = ..., interval: float = ...): ... - def run(self): ... - def restart_with_reloader(self): ... - def trigger_reload(self, filename): ... - def log_reload(self, filename): ... - -class StatReloaderLoop(ReloaderLoop): - name: Any - def run(self): ... - -class WatchdogReloaderLoop(ReloaderLoop): - observable_paths: Any - name: Any - observer_class: Any - event_handler: Any - should_reload: Any - def __init__(self, *args, **kwargs): ... - def trigger_reload(self, filename): ... - def run(self): ... - -reloader_loops: Any - -def run_with_reloader(main_func, extra_files: Any | None = ..., interval: float = ..., reloader_type: str = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/atom.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/atom.pyi deleted file mode 100644 index 8c73bb94bc9c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/atom.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Any - -XHTML_NAMESPACE: Any - -def format_iso8601(obj): ... - -class AtomFeed: - default_generator: Any - title: Any - title_type: Any - url: Any - feed_url: Any - id: Any - updated: Any - author: Any - icon: Any - logo: Any - rights: Any - rights_type: Any - subtitle: Any - subtitle_type: Any - generator: Any - links: Any - entries: Any - def __init__(self, title: Any | None = ..., entries: Any | None = ..., **kwargs): ... - def add(self, *args, **kwargs): ... - def generate(self): ... - def to_string(self): ... - def get_response(self): ... - def __call__(self, environ, start_response): ... - -class FeedEntry: - title: Any - title_type: Any - content: Any - content_type: Any - url: Any - id: Any - updated: Any - summary: Any - summary_type: Any - author: Any - published: Any - rights: Any - links: Any - categories: Any - xml_base: Any - def __init__(self, title: Any | None = ..., content: Any | None = ..., feed_url: Any | None = ..., **kwargs): ... - def generate(self): ... - def to_string(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/cache.pyi deleted file mode 100644 index 95841456b77f..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/cache.pyi +++ /dev/null @@ -1,92 +0,0 @@ -from typing import Any - -class BaseCache: - default_timeout: float - def __init__(self, default_timeout: float = ...): ... - def get(self, key): ... - def delete(self, key): ... - def get_many(self, *keys): ... - def get_dict(self, *keys): ... - def set(self, key, value, timeout: float | None = ...): ... - def add(self, key, value, timeout: float | None = ...): ... - def set_many(self, mapping, timeout: float | None = ...): ... - def delete_many(self, *keys): ... - def has(self, key): ... - def clear(self): ... - def inc(self, key, delta=...): ... - def dec(self, key, delta=...): ... - -class NullCache(BaseCache): ... - -class SimpleCache(BaseCache): - clear: Any - def __init__(self, threshold: int = ..., default_timeout: float = ...): ... - def get(self, key): ... - def set(self, key, value, timeout: float | None = ...): ... - def add(self, key, value, timeout: float | None = ...): ... - def delete(self, key): ... - def has(self, key): ... - -class MemcachedCache(BaseCache): - key_prefix: Any - def __init__(self, servers: Any | None = ..., default_timeout: float = ..., key_prefix: Any | None = ...): ... - def get(self, key): ... - def get_dict(self, *keys): ... - def add(self, key, value, timeout: float | None = ...): ... - def set(self, key, value, timeout: float | None = ...): ... - def get_many(self, *keys): ... - def set_many(self, mapping, timeout: float | None = ...): ... - def delete(self, key): ... - def delete_many(self, *keys): ... - def has(self, key): ... - def clear(self): ... - def inc(self, key, delta=...): ... - def dec(self, key, delta=...): ... - def import_preferred_memcache_lib(self, servers): ... - -GAEMemcachedCache: Any - -class RedisCache(BaseCache): - key_prefix: Any - def __init__( - self, - host: str = ..., - port: int = ..., - password: Any | None = ..., - db: int = ..., - default_timeout: float = ..., - key_prefix: Any | None = ..., - **kwargs, - ): ... - def dump_object(self, value): ... - def load_object(self, value): ... - def get(self, key): ... - def get_many(self, *keys): ... - def set(self, key, value, timeout: float | None = ...): ... - def add(self, key, value, timeout: float | None = ...): ... - def set_many(self, mapping, timeout: float | None = ...): ... - def delete(self, key): ... - def delete_many(self, *keys): ... - def has(self, key): ... - def clear(self): ... - def inc(self, key, delta=...): ... - def dec(self, key, delta=...): ... - -class FileSystemCache(BaseCache): - def __init__(self, cache_dir, threshold: int = ..., default_timeout: float = ..., mode: int = ...): ... - def clear(self): ... - def get(self, key): ... - def add(self, key, value, timeout: float | None = ...): ... - def set(self, key, value, timeout: float | None = ...): ... - def delete(self, key): ... - def has(self, key): ... - -class UWSGICache(BaseCache): - cache: Any - def __init__(self, default_timeout: float = ..., cache: str = ...): ... - def get(self, key): ... - def delete(self, key): ... - def set(self, key, value, timeout: float | None = ...): ... - def add(self, key, value, timeout: float | None = ...): ... - def clear(self): ... - def has(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/fixers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/fixers.pyi deleted file mode 100644 index 37097f471c9b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/fixers.pyi +++ /dev/null @@ -1,35 +0,0 @@ -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import Any, Iterable, Mapping, Set, Text - -from ..middleware.proxy_fix import ProxyFix as ProxyFix - -class CGIRootFix(object): - app: WSGIApplication - app_root: Text - def __init__(self, app: WSGIApplication, app_root: Text = ...) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... - -class LighttpdCGIRootFix(CGIRootFix): ... - -class PathInfoFromRequestUriFix(object): - app: WSGIApplication - def __init__(self, app: WSGIApplication) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... - -class HeaderRewriterFix(object): - app: WSGIApplication - remove_headers: Set[Text] - add_headers: list[Text] - def __init__( - self, app: WSGIApplication, remove_headers: Iterable[Text] | None = ..., add_headers: Iterable[Text] | None = ... - ) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... - -class InternetExplorerFix(object): - app: WSGIApplication - fix_vary: bool - fix_attach: bool - def __init__(self, app: WSGIApplication, fix_vary: bool = ..., fix_attach: bool = ...) -> None: ... - def fix_headers(self, environ: WSGIEnvironment, headers: Mapping[str, str], status: Any | None = ...) -> None: ... - def run_fixed(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/iterio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/iterio.pyi deleted file mode 100644 index 08162fdcede7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/iterio.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from typing import Any, Text - -greenlet: Any - -class IterIO: - def __new__(cls, obj, sentinel: Text | bytes = ...): ... - def __iter__(self): ... - def tell(self): ... - def isatty(self): ... - def seek(self, pos, mode: int = ...): ... - def truncate(self, size: Any | None = ...): ... - def write(self, s): ... - def writelines(self, list): ... - def read(self, n: int = ...): ... - def readlines(self, sizehint: int = ...): ... - def readline(self, length: Any | None = ...): ... - def flush(self): ... - def __next__(self): ... - -class IterI(IterIO): - sentinel: Any - def __new__(cls, func, sentinel: Text | bytes = ...): ... - closed: Any - def close(self): ... - def write(self, s): ... - def writelines(self, list): ... - def flush(self): ... - -class IterO(IterIO): - sentinel: Any - closed: Any - pos: Any - def __new__(cls, gen, sentinel: Text | bytes = ...): ... - def __iter__(self): ... - def close(self): ... - def seek(self, pos, mode: int = ...): ... - def read(self, n: int = ...): ... - def readline(self, length: Any | None = ...): ... - def readlines(self, sizehint: int = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/jsrouting.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/jsrouting.pyi deleted file mode 100644 index 46f1972df786..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/jsrouting.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Any - -def dumps(*args): ... -def render_template(name_parts, rules, converters): ... -def generate_map(map, name: str = ...): ... -def generate_adapter(adapter, name: str = ..., map_name: str = ...): ... -def js_to_url_function(converter): ... -def NumberConverter_js_to_url(conv): ... - -js_to_url_functions: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/limiter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/limiter.pyi deleted file mode 100644 index 0734a242a13f..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/limiter.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Any - -class StreamLimitMiddleware: - app: Any - maximum_size: Any - def __init__(self, app, maximum_size=...): ... - def __call__(self, environ, start_response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/lint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/lint.pyi deleted file mode 100644 index 9a25daf5aab7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/lint.pyi +++ /dev/null @@ -1 +0,0 @@ -from ..middleware.lint import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/profiler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/profiler.pyi deleted file mode 100644 index eb32ea461c17..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/profiler.pyi +++ /dev/null @@ -1,9 +0,0 @@ -from _typeshed import SupportsWrite -from typing import AnyStr, Generic, Tuple - -from ..middleware.profiler import * - -class MergeStream(Generic[AnyStr]): - streams: Tuple[SupportsWrite[AnyStr], ...] - def __init__(self, *streams: SupportsWrite[AnyStr]) -> None: ... - def write(self, data: AnyStr) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/securecookie.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/securecookie.pyi deleted file mode 100644 index 30f27b316410..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/securecookie.pyi +++ /dev/null @@ -1,39 +0,0 @@ -from hashlib import sha1 as _default_hash -from hmac import new as hmac -from typing import Any - -from werkzeug.contrib.sessions import ModificationTrackingDict - -class UnquoteError(Exception): ... - -class SecureCookie(ModificationTrackingDict[Any, Any]): - hash_method: Any - serialization_method: Any - quote_base64: Any - secret_key: Any - new: Any - def __init__(self, data: Any | None = ..., secret_key: Any | None = ..., new: bool = ...): ... - @property - def should_save(self): ... - @classmethod - def quote(cls, value): ... - @classmethod - def unquote(cls, value): ... - def serialize(self, expires: Any | None = ...): ... - @classmethod - def unserialize(cls, string, secret_key): ... - @classmethod - def load_cookie(cls, request, key: str = ..., secret_key: Any | None = ...): ... - def save_cookie( - self, - response, - key: str = ..., - expires: Any | None = ..., - session_expires: Any | None = ..., - max_age: Any | None = ..., - path: str = ..., - domain: Any | None = ..., - secure: Any | None = ..., - httponly: bool = ..., - force: bool = ..., - ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/sessions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/sessions.pyi deleted file mode 100644 index 2d0bba57406c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/sessions.pyi +++ /dev/null @@ -1,77 +0,0 @@ -from typing import Any, Text, TypeVar - -from werkzeug.datastructures import CallbackDict - -_K = TypeVar("_K") -_V = TypeVar("_V") - -def generate_key(salt: Any | None = ...): ... - -class ModificationTrackingDict(CallbackDict[_K, _V]): - modified: Any - def __init__(self, *args, **kwargs): ... - def copy(self): ... - def __copy__(self): ... - -class Session(ModificationTrackingDict[_K, _V]): - sid: Any - new: Any - def __init__(self, data, sid, new: bool = ...): ... - @property - def should_save(self): ... - -class SessionStore: - session_class: Any - def __init__(self, session_class: Any | None = ...): ... - def is_valid_key(self, key): ... - def generate_key(self, salt: Any | None = ...): ... - def new(self): ... - def save(self, session): ... - def save_if_modified(self, session): ... - def delete(self, session): ... - def get(self, sid): ... - -class FilesystemSessionStore(SessionStore): - path: Any - filename_template: str - renew_missing: Any - mode: Any - def __init__( - self, - path: Any | None = ..., - filename_template: Text = ..., - session_class: Any | None = ..., - renew_missing: bool = ..., - mode: int = ..., - ): ... - def get_session_filename(self, sid): ... - def save(self, session): ... - def delete(self, session): ... - def get(self, sid): ... - def list(self): ... - -class SessionMiddleware: - app: Any - store: Any - cookie_name: Any - cookie_age: Any - cookie_expires: Any - cookie_path: Any - cookie_domain: Any - cookie_secure: Any - cookie_httponly: Any - environ_key: Any - def __init__( - self, - app, - store, - cookie_name: str = ..., - cookie_age: Any | None = ..., - cookie_expires: Any | None = ..., - cookie_path: str = ..., - cookie_domain: Any | None = ..., - cookie_secure: Any | None = ..., - cookie_httponly: bool = ..., - environ_key: str = ..., - ): ... - def __call__(self, environ, start_response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/testtools.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/testtools.pyi deleted file mode 100644 index da0f92d15565..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/testtools.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from werkzeug.wrappers import Response - -class ContentAccessors: - def xml(self): ... - def lxml(self): ... - def json(self): ... - -class TestResponse(Response, ContentAccessors): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/wrappers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/wrappers.pyi deleted file mode 100644 index 683eda0f22c7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/contrib/wrappers.pyi +++ /dev/null @@ -1,27 +0,0 @@ -from typing import Any - -def is_known_charset(charset): ... - -class JSONRequestMixin: - def json(self): ... - -class ProtobufRequestMixin: - protobuf_check_initialization: Any - def parse_protobuf(self, proto_type): ... - -class RoutingArgsRequestMixin: - routing_args: Any - routing_vars: Any - -class ReverseSlashBehaviorRequestMixin: - def path(self): ... - def script_root(self): ... - -class DynamicCharsetRequestMixin: - default_charset: Any - def unknown_charset(self, charset): ... - def charset(self): ... - -class DynamicCharsetResponseMixin: - default_charset: Any - charset: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/datastructures.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/datastructures.pyi deleted file mode 100644 index bd35c652ec11..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/datastructures.pyi +++ /dev/null @@ -1,474 +0,0 @@ -import sys -from _typeshed import SupportsWrite -from typing import ( - IO, - Any, - Callable, - Container, - Dict, - Generic, - Iterable, - Iterator, - List, - Mapping, - MutableSet, - NoReturn, - Text, - Tuple, - Type, - TypeVar, - overload, -) - -if sys.version_info >= (3, 8): - from typing import SupportsIndex -else: - from typing_extensions import SupportsIndex - -_K = TypeVar("_K") -_V = TypeVar("_V") -_R = TypeVar("_R") -_D = TypeVar("_D") - -def is_immutable(self) -> NoReturn: ... -def iter_multi_items(mapping): ... -def native_itermethods(names): ... - -class ImmutableListMixin(Generic[_V]): - def __hash__(self) -> int: ... - def __reduce_ex__(self: _D, protocol) -> Tuple[Type[_D], list[_V]]: ... - def __delitem__(self, key: _V) -> NoReturn: ... - def __iadd__(self, other: Any) -> NoReturn: ... - def __imul__(self, other: Any) -> NoReturn: ... - def __setitem__(self, key: str, value: Any) -> NoReturn: ... - def append(self, item: Any) -> NoReturn: ... - def remove(self, item: Any) -> NoReturn: ... - def extend(self, iterable: Any) -> NoReturn: ... - def insert(self, pos: int, value: Any) -> NoReturn: ... - def pop(self, index: int = ...) -> NoReturn: ... - def reverse(self) -> NoReturn: ... - def sort(self, cmp: Any | None = ..., key: Any | None = ..., reverse: Any | None = ...) -> NoReturn: ... - -class ImmutableList(ImmutableListMixin[_V], List[_V]): ... # type: ignore - -class ImmutableDictMixin(object): - @classmethod - def fromkeys(cls, *args, **kwargs): ... - def __reduce_ex__(self, protocol): ... - def __hash__(self) -> int: ... - def setdefault(self, key, default: Any | None = ...): ... - def update(self, *args, **kwargs): ... - def pop(self, key, default: Any | None = ...): ... - def popitem(self): ... - def __setitem__(self, key, value): ... - def __delitem__(self, key): ... - def clear(self): ... - -class ImmutableMultiDictMixin(ImmutableDictMixin): - def __reduce_ex__(self, protocol): ... - def add(self, key, value): ... - def popitemlist(self): ... - def poplist(self, key): ... - def setlist(self, key, new_list): ... - def setlistdefault(self, key, default_list: Any | None = ...): ... - -class UpdateDictMixin(object): - on_update: Any - def setdefault(self, key, default: Any | None = ...): ... - def pop(self, key, default=...): ... - __setitem__: Any - __delitem__: Any - clear: Any - popitem: Any - update: Any - -class TypeConversionDict(Dict[_K, _V]): - @overload - def get(self, key: _K, *, type: None = ...) -> _V | None: ... - @overload - def get(self, key: _K, default: _D, type: None = ...) -> _V | _D: ... - @overload - def get(self, key: _K, *, type: Callable[[_V], _R]) -> _R | None: ... - @overload - def get(self, key: _K, default: _D, type: Callable[[_V], _R]) -> _R | _D: ... - -class ImmutableTypeConversionDict(ImmutableDictMixin, TypeConversionDict[_K, _V]): # type: ignore - def copy(self) -> TypeConversionDict[_K, _V]: ... - def __copy__(self) -> ImmutableTypeConversionDict[_K, _V]: ... - -class ViewItems: - def __init__(self, multi_dict, method, repr_name, *a, **kw): ... - def __iter__(self): ... - -class MultiDict(TypeConversionDict[_K, _V]): - def __init__(self, mapping: Any | None = ...): ... - def __getitem__(self, key): ... - def __setitem__(self, key, value): ... - def add(self, key, value): ... - def getlist(self, key, type: Any | None = ...): ... - def setlist(self, key, new_list): ... - def setdefault(self, key, default: Any | None = ...): ... - def setlistdefault(self, key, default_list: Any | None = ...): ... - def items(self, multi: bool = ...): ... - def lists(self): ... - def keys(self): ... - __iter__: Any - def values(self): ... - def listvalues(self): ... - def copy(self): ... - def deepcopy(self, memo: Any | None = ...): ... - def to_dict(self, flat: bool = ...): ... - def update(self, other_dict): ... - def pop(self, key, default=...): ... - def popitem(self): ... - def poplist(self, key): ... - def popitemlist(self): ... - def __copy__(self): ... - def __deepcopy__(self, memo): ... - -class _omd_bucket: - prev: Any - key: Any - value: Any - next: Any - def __init__(self, omd, key, value): ... - def unlink(self, omd): ... - -class OrderedMultiDict(MultiDict[_K, _V]): - def __init__(self, mapping: Any | None = ...): ... - def __eq__(self, other): ... - def __ne__(self, other): ... - def __reduce_ex__(self, protocol): ... - def __getitem__(self, key): ... - def __setitem__(self, key, value): ... - def __delitem__(self, key): ... - def keys(self): ... - __iter__: Any - def values(self): ... - def items(self, multi: bool = ...): ... - def lists(self): ... - def listvalues(self): ... - def add(self, key, value): ... - def getlist(self, key, type: Any | None = ...): ... - def setlist(self, key, new_list): ... - def setlistdefault(self, key, default_list: Any | None = ...): ... - def update(self, mapping): ... - def poplist(self, key): ... - def pop(self, key, default=...): ... - def popitem(self): ... - def popitemlist(self): ... - -class Headers(object): - def __init__(self, defaults: Any | None = ...): ... - def __getitem__(self, key, _get_mode: bool = ...): ... - def __eq__(self, other): ... - def __ne__(self, other): ... - @overload - def get(self, key: str, *, type: None = ...) -> str | None: ... - @overload - def get(self, key: str, default: _D, type: None = ...) -> str | _D: ... - @overload - def get(self, key: str, *, type: Callable[[str], _R]) -> _R | None: ... - @overload - def get(self, key: str, default: _D, type: Callable[[str], _R]) -> _R | _D: ... - @overload - def get(self, key: str, *, as_bytes: bool) -> Any: ... - @overload - def get(self, key: str, *, type: None, as_bytes: bool) -> Any: ... - @overload - def get(self, key: str, *, type: Callable[[Any], _R], as_bytes: bool) -> _R | None: ... - @overload - def get(self, key: str, default: Any, type: None, as_bytes: bool) -> Any: ... - @overload - def get(self, key: str, default: _D, type: Callable[[Any], _R], as_bytes: bool) -> _R | _D: ... - def getlist(self, key, type: Any | None = ..., as_bytes: bool = ...): ... - def get_all(self, name): ... - def items(self, lower: bool = ...): ... - def keys(self, lower: bool = ...): ... - def values(self): ... - def extend(self, iterable): ... - def __delitem__(self, key: Any) -> None: ... - def remove(self, key): ... - @overload - def pop(self, key: int | None = ...) -> str: ... # default is ignored, using it is an error - @overload - def pop(self, key: str) -> str: ... - @overload - def pop(self, key: str, default: str) -> str: ... - @overload - def pop(self, key: str, default: None) -> str | None: ... - def popitem(self): ... - def __contains__(self, key): ... - has_key: Any - def __iter__(self): ... - def __len__(self): ... - def add(self, _key, _value, **kw): ... - def add_header(self, _key, _value, **_kw): ... - def clear(self): ... - def set(self, _key, _value, **kw): ... - def setdefault(self, key, value): ... - def __setitem__(self, key, value): ... - def to_list(self, charset: Text = ...): ... - def to_wsgi_list(self): ... - def copy(self): ... - def __copy__(self): ... - -class ImmutableHeadersMixin: - def __delitem__(self, key: str) -> None: ... - def __setitem__(self, key, value): ... - set: Any - def add(self, *args, **kwargs): ... - remove: Any - add_header: Any - def extend(self, iterable): ... - def insert(self, pos, value): ... - @overload - def pop(self, key: int | None = ...) -> str: ... # default is ignored, using it is an error - @overload - def pop(self, key: str) -> str: ... - @overload - def pop(self, key: str, default: str) -> str: ... - @overload - def pop(self, key: str, default: None) -> str | None: ... - def popitem(self): ... - def setdefault(self, key, default): ... - -class EnvironHeaders(ImmutableHeadersMixin, Headers): - environ: Any - def __init__(self, environ): ... - def __eq__(self, other): ... - def __getitem__(self, key, _get_mode: bool = ...): ... - def __len__(self): ... - def __iter__(self): ... - def copy(self): ... - -class CombinedMultiDict(ImmutableMultiDictMixin, MultiDict[_K, _V]): # type: ignore - def __reduce_ex__(self, protocol): ... - dicts: Any - def __init__(self, dicts: Any | None = ...): ... - @classmethod - def fromkeys(cls): ... - def __getitem__(self, key): ... - def get(self, key, default: Any | None = ..., type: Any | None = ...): ... - def getlist(self, key, type: Any | None = ...): ... - def keys(self): ... - __iter__: Any - def items(self, multi: bool = ...): ... - def values(self): ... - def lists(self): ... - def listvalues(self): ... - def copy(self): ... - def to_dict(self, flat: bool = ...): ... - def __len__(self): ... - def __contains__(self, key): ... - has_key: Any - -class FileMultiDict(MultiDict[_K, _V]): - def add_file(self, name, file, filename: Any | None = ..., content_type: Any | None = ...): ... - -class ImmutableDict(ImmutableDictMixin, Dict[_K, _V]): # type: ignore - def copy(self): ... - def __copy__(self): ... - -class ImmutableMultiDict(ImmutableMultiDictMixin, MultiDict[_K, _V]): # type: ignore - def copy(self): ... - def __copy__(self): ... - -class ImmutableOrderedMultiDict(ImmutableMultiDictMixin, OrderedMultiDict[_K, _V]): # type: ignore - def copy(self): ... - def __copy__(self): ... - -class Accept(ImmutableList[Tuple[str, float]]): - provided: bool - def __init__(self, values: None | Accept | Iterable[Tuple[str, float]] = ...) -> None: ... - @overload - def __getitem__(self, key: SupportsIndex) -> Tuple[str, float]: ... - @overload - def __getitem__(self, s: slice) -> list[Tuple[str, float]]: ... - @overload - def __getitem__(self, key: str) -> float: ... - def quality(self, key: str) -> float: ... - def __contains__(self, value: str) -> bool: ... # type: ignore - def index(self, key: str | Tuple[str, float]) -> int: ... # type: ignore - def find(self, key: str | Tuple[str, float]) -> int: ... - def values(self) -> Iterator[str]: ... - def to_header(self) -> str: ... - @overload - def best_match(self, matches: Iterable[str], default: None = ...) -> str | None: ... - @overload - def best_match(self, matches: Iterable[str], default: _D) -> str | _D: ... - @property - def best(self) -> str | None: ... - -class MIMEAccept(Accept): - @property - def accept_html(self) -> bool: ... - @property - def accept_xhtml(self) -> bool: ... - @property - def accept_json(self) -> bool: ... - -class LanguageAccept(Accept): ... -class CharsetAccept(Accept): ... - -def cache_property(key, empty, type): ... - -class _CacheControl(UpdateDictMixin, Dict[str, Any]): - no_cache: Any - no_store: Any - max_age: Any - no_transform: Any - on_update: Any - provided: Any - def __init__(self, values=..., on_update: Any | None = ...): ... - def to_header(self): ... - -class RequestCacheControl(ImmutableDictMixin, _CacheControl): # type: ignore - max_stale: Any - min_fresh: Any - no_transform: Any - only_if_cached: Any - -class ResponseCacheControl(_CacheControl): - public: Any - private: Any - must_revalidate: Any - proxy_revalidate: Any - s_maxage: Any - -class CallbackDict(UpdateDictMixin, Dict[_K, _V]): - on_update: Any - def __init__(self, initial: Any | None = ..., on_update: Any | None = ...): ... - -class HeaderSet(MutableSet[str]): - on_update: Any - def __init__(self, headers: Any | None = ..., on_update: Any | None = ...): ... - def add(self, header): ... - def remove(self, header): ... - def update(self, iterable): ... - def discard(self, header): ... - def find(self, header): ... - def index(self, header): ... - def clear(self): ... - def as_set(self, preserve_casing: bool = ...): ... - def to_header(self): ... - def __getitem__(self, idx): ... - def __delitem__(self, idx): ... - def __setitem__(self, idx, value): ... - def __contains__(self, header): ... - def __len__(self): ... - def __iter__(self): ... - def __nonzero__(self): ... - -class ETags(Container[str], Iterable[str]): - star_tag: Any - def __init__(self, strong_etags: Any | None = ..., weak_etags: Any | None = ..., star_tag: bool = ...): ... - def as_set(self, include_weak: bool = ...): ... - def is_weak(self, etag): ... - def contains_weak(self, etag): ... - def contains(self, etag): ... - def contains_raw(self, etag): ... - def to_header(self): ... - def __call__(self, etag: Any | None = ..., data: Any | None = ..., include_weak: bool = ...): ... - def __bool__(self): ... - __nonzero__: Any - def __iter__(self): ... - def __contains__(self, etag): ... - -class IfRange: - etag: Any - date: Any - def __init__(self, etag: Any | None = ..., date: Any | None = ...): ... - def to_header(self): ... - -class Range: - units: Any - ranges: Any - def __init__(self, units, ranges): ... - def range_for_length(self, length): ... - def make_content_range(self, length): ... - def to_header(self): ... - def to_content_range_header(self, length): ... - -class ContentRange: - on_update: Any - units: str | None - start: Any - stop: Any - length: Any - def __init__(self, units: str | None, start, stop, length: Any | None = ..., on_update: Any | None = ...): ... - def set(self, start, stop, length: Any | None = ..., units: str | None = ...): ... - def unset(self) -> None: ... - def to_header(self): ... - def __nonzero__(self): ... - __bool__: Any - -class Authorization(ImmutableDictMixin, Dict[str, Any]): # type: ignore - type: str - def __init__(self, auth_type: str, data: Mapping[str, Any] | None = ...) -> None: ... - @property - def username(self) -> str | None: ... - @property - def password(self) -> str | None: ... - @property - def realm(self) -> str | None: ... - @property - def nonce(self) -> str | None: ... - @property - def uri(self) -> str | None: ... - @property - def nc(self) -> str | None: ... - @property - def cnonce(self) -> str | None: ... - @property - def response(self) -> str | None: ... - @property - def opaque(self) -> str | None: ... - @property - def qop(self) -> str | None: ... - -class WWWAuthenticate(UpdateDictMixin, Dict[str, Any]): - on_update: Any - def __init__(self, auth_type: Any | None = ..., values: Any | None = ..., on_update: Any | None = ...): ... - def set_basic(self, realm: str = ...): ... - def set_digest(self, realm, nonce, qop=..., opaque: Any | None = ..., algorithm: Any | None = ..., stale: bool = ...): ... - def to_header(self): ... - @staticmethod - def auth_property(name, doc: Any | None = ...): ... - type: Any - realm: Any - domain: Any - nonce: Any - opaque: Any - algorithm: Any - qop: Any - stale: Any - -class FileStorage(object): - name: Text | None - stream: IO[bytes] - filename: Text | None - headers: Headers - def __init__( - self, - stream: IO[bytes] | None = ..., - filename: None | Text | bytes = ..., - name: Text | None = ..., - content_type: Text | None = ..., - content_length: int | None = ..., - headers: Headers | None = ..., - ): ... - @property - def content_type(self) -> Text | None: ... - @property - def content_length(self) -> int: ... - @property - def mimetype(self) -> str: ... - @property - def mimetype_params(self) -> dict[str, str]: ... - def save(self, dst: Text | SupportsWrite[bytes], buffer_size: int = ...): ... - def close(self) -> None: ... - def __nonzero__(self) -> bool: ... - def __bool__(self) -> bool: ... - def __getattr__(self, name: Text) -> Any: ... - def __iter__(self) -> Iterator[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/__init__.pyi deleted file mode 100644 index 87a62c339f54..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/__init__.pyi +++ /dev/null @@ -1,51 +0,0 @@ -from typing import Any - -from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response - -PIN_TIME: Any - -def hash_pin(pin): ... -def get_machine_id(): ... - -class _ConsoleFrame: - console: Any - id: Any - def __init__(self, namespace): ... - -def get_pin_and_cookie_name(app): ... - -class DebuggedApplication: - app: Any - evalex: Any - frames: Any - tracebacks: Any - request_key: Any - console_path: Any - console_init_func: Any - show_hidden_frames: Any - secret: Any - pin_logging: Any - pin: Any - def __init__( - self, - app, - evalex: bool = ..., - request_key: str = ..., - console_path: str = ..., - console_init_func: Any | None = ..., - show_hidden_frames: bool = ..., - lodgeit_url: Any | None = ..., - pin_security: bool = ..., - pin_logging: bool = ..., - ): ... - @property - def pin_cookie_name(self): ... - def debug_application(self, environ, start_response): ... - def execute_command(self, request, command, frame): ... - def display_console(self, request): ... - def paste_traceback(self, request, traceback): ... - def get_resource(self, request, filename): ... - def check_pin_trust(self, environ): ... - def pin_auth(self, request): ... - def log_pin_request(self): ... - def __call__(self, environ, start_response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/console.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/console.pyi deleted file mode 100644 index e1a121973670..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/console.pyi +++ /dev/null @@ -1,44 +0,0 @@ -import code -from typing import Any - -class HTMLStringO: - def __init__(self): ... - def isatty(self): ... - def close(self): ... - def flush(self): ... - def seek(self, n, mode: int = ...): ... - def readline(self): ... - def reset(self): ... - def write(self, x): ... - def writelines(self, x): ... - -class ThreadedStream: - @staticmethod - def push(): ... - @staticmethod - def fetch(): ... - @staticmethod - def displayhook(obj): ... - def __setattr__(self, name, value): ... - def __dir__(self): ... - def __getattribute__(self, name): ... - -class _ConsoleLoader: - def __init__(self): ... - def register(self, code, source): ... - def get_source_by_code(self, code): ... - -class _InteractiveConsole(code.InteractiveInterpreter): - globals: Any - more: Any - buffer: Any - def __init__(self, globals, locals): ... - def runsource(self, source): ... - def runcode(self, code): ... - def showtraceback(self): ... - def showsyntaxerror(self, filename: Any | None = ...): ... - def write(self, data): ... - -class Console: - def __init__(self, globals: Any | None = ..., locals: Any | None = ...): ... - def eval(self, code): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/repr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/repr.pyi deleted file mode 100644 index 073fd26128cb..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/repr.pyi +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Any - -deque: Any -missing: Any -RegexType: Any -HELP_HTML: Any -OBJECT_DUMP_HTML: Any - -def debug_repr(obj): ... -def dump(obj=...): ... - -class _Helper: - def __call__(self, topic: Any | None = ...): ... - -helper: Any - -class DebugReprGenerator: - def __init__(self): ... - list_repr: Any - tuple_repr: Any - set_repr: Any - frozenset_repr: Any - deque_repr: Any - def regex_repr(self, obj): ... - def string_repr(self, obj, limit: int = ...): ... - def dict_repr(self, d, recursive, limit: int = ...): ... - def object_repr(self, obj): ... - def dispatch_repr(self, obj, recursive): ... - def fallback_repr(self): ... - def repr(self, obj): ... - def dump_object(self, obj): ... - def dump_locals(self, d): ... - def render_object_dump(self, items, title, repr: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/tbtools.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/tbtools.pyi deleted file mode 100644 index 45bcf7048a7b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/debug/tbtools.pyi +++ /dev/null @@ -1,63 +0,0 @@ -from typing import Any - -UTF8_COOKIE: Any -system_exceptions: Any -HEADER: Any -FOOTER: Any -PAGE_HTML: Any -CONSOLE_HTML: Any -SUMMARY_HTML: Any -FRAME_HTML: Any -SOURCE_LINE_HTML: Any - -def render_console_html(secret, evalex_trusted: bool = ...): ... -def get_current_traceback(ignore_system_exceptions: bool = ..., show_hidden_frames: bool = ..., skip: int = ...): ... - -class Line: - lineno: Any - code: Any - in_frame: Any - current: Any - def __init__(self, lineno, code): ... - def classes(self): ... - def render(self): ... - -class Traceback: - exc_type: Any - exc_value: Any - exception_type: Any - frames: Any - def __init__(self, exc_type, exc_value, tb): ... - def filter_hidden_frames(self): ... - def is_syntax_error(self): ... - def exception(self): ... - def log(self, logfile: Any | None = ...): ... - def paste(self): ... - def render_summary(self, include_title: bool = ...): ... - def render_full(self, evalex: bool = ..., secret: Any | None = ..., evalex_trusted: bool = ...): ... - def generate_plaintext_traceback(self): ... - def plaintext(self): ... - id: Any - -class Frame: - lineno: Any - function_name: Any - locals: Any - globals: Any - filename: Any - module: Any - loader: Any - code: Any - hide: Any - info: Any - def __init__(self, exc_type, exc_value, tb): ... - def render(self): ... - def render_line_context(self): ... - def get_annotated_lines(self): ... - def eval(self, code, mode: str = ...): ... - def sourcelines(self): ... - def get_context_lines(self, context: int = ...): ... - @property - def current_line(self): ... - def console(self): ... - id: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/exceptions.pyi deleted file mode 100644 index 53b0a0c05626..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/exceptions.pyi +++ /dev/null @@ -1,180 +0,0 @@ -import datetime -from _typeshed.wsgi import StartResponse, WSGIEnvironment -from typing import Any, Iterable, NoReturn, Protocol, Text, Tuple, Type - -from werkzeug.wrappers import Response - -class _EnvironContainer(Protocol): - @property - def environ(self) -> WSGIEnvironment: ... - -class HTTPException(Exception): - code: int | None - description: Text | None - response: Response | None - def __init__(self, description: Text | None = ..., response: Response | None = ...) -> None: ... - @classmethod - def wrap(cls, exception: Type[Exception], name: str | None = ...) -> Any: ... - @property - def name(self) -> str: ... - def get_description(self, environ: WSGIEnvironment | None = ...) -> Text: ... - def get_body(self, environ: WSGIEnvironment | None = ...) -> Text: ... - def get_headers(self, environ: WSGIEnvironment | None = ...) -> list[Tuple[str, str]]: ... - def get_response(self, environ: WSGIEnvironment | _EnvironContainer | None = ...) -> Response: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... - -default_exceptions: dict[int, Type[HTTPException]] - -class BadRequest(HTTPException): - code: int - description: Text - -class ClientDisconnected(BadRequest): ... -class SecurityError(BadRequest): ... -class BadHost(BadRequest): ... - -class Unauthorized(HTTPException): - code: int - description: Text - www_authenticate: Iterable[object] | None - def __init__( - self, - description: Text | None = ..., - response: Response | None = ..., - www_authenticate: None | Tuple[object, ...] | list[object] | object = ..., - ) -> None: ... - -class Forbidden(HTTPException): - code: int - description: Text - -class NotFound(HTTPException): - code: int - description: Text - -class MethodNotAllowed(HTTPException): - code: int - description: Text - valid_methods: Any - def __init__(self, valid_methods: Any | None = ..., description: Any | None = ...): ... - -class NotAcceptable(HTTPException): - code: int - description: Text - -class RequestTimeout(HTTPException): - code: int - description: Text - -class Conflict(HTTPException): - code: int - description: Text - -class Gone(HTTPException): - code: int - description: Text - -class LengthRequired(HTTPException): - code: int - description: Text - -class PreconditionFailed(HTTPException): - code: int - description: Text - -class RequestEntityTooLarge(HTTPException): - code: int - description: Text - -class RequestURITooLarge(HTTPException): - code: int - description: Text - -class UnsupportedMediaType(HTTPException): - code: int - description: Text - -class RequestedRangeNotSatisfiable(HTTPException): - code: int - description: Text - length: Any - units: str - def __init__(self, length: Any | None = ..., units: str = ..., description: Any | None = ...): ... - -class ExpectationFailed(HTTPException): - code: int - description: Text - -class ImATeapot(HTTPException): - code: int - description: Text - -class UnprocessableEntity(HTTPException): - code: int - description: Text - -class Locked(HTTPException): - code: int - description: Text - -class FailedDependency(HTTPException): - code: int - description: Text - -class PreconditionRequired(HTTPException): - code: int - description: Text - -class _RetryAfter(HTTPException): - retry_after: None | int | datetime.datetime - def __init__( - self, description: Text | None = ..., response: Response | None = ..., retry_after: None | int | datetime.datetime = ... - ) -> None: ... - -class TooManyRequests(_RetryAfter): - code: int - description: Text - -class RequestHeaderFieldsTooLarge(HTTPException): - code: int - description: Text - -class UnavailableForLegalReasons(HTTPException): - code: int - description: Text - -class InternalServerError(HTTPException): - def __init__( - self, description: Text | None = ..., response: Response | None = ..., original_exception: Exception | None = ... - ) -> None: ... - code: int - description: Text - -class NotImplemented(HTTPException): - code: int - description: Text - -class BadGateway(HTTPException): - code: int - description: Text - -class ServiceUnavailable(_RetryAfter): - code: int - description: Text - -class GatewayTimeout(HTTPException): - code: int - description: Text - -class HTTPVersionNotSupported(HTTPException): - code: int - description: Text - -class Aborter: - mapping: Any - def __init__(self, mapping: Any | None = ..., extra: Any | None = ...) -> None: ... - def __call__(self, code: int | Response, *args: Any, **kwargs: Any) -> NoReturn: ... - -def abort(status: int | Response, *args: Any, **kwargs: Any) -> NoReturn: ... - -class BadRequestKeyError(BadRequest, KeyError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/filesystem.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/filesystem.pyi deleted file mode 100644 index 58695fa28f1b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/filesystem.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from typing import Any - -has_likely_buggy_unicode_filesystem: Any - -class BrokenFilesystemWarning(RuntimeWarning, UnicodeWarning): ... - -def get_filesystem_encoding(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/formparser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/formparser.pyi deleted file mode 100644 index 1820a1213323..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/formparser.pyi +++ /dev/null @@ -1,87 +0,0 @@ -from _typeshed.wsgi import WSGIEnvironment -from typing import IO, Any, Callable, Generator, Iterable, Mapping, NoReturn, Optional, Protocol, Text, Tuple, TypeVar - -from .datastructures import Headers - -_Dict = Any -_ParseFunc = Callable[[IO[bytes], str, Optional[int], Mapping[str, str]], Tuple[IO[bytes], _Dict, _Dict]] - -_F = TypeVar("_F", bound=Callable[..., Any]) - -class _StreamFactory(Protocol): - def __call__( - self, total_content_length: int | None, filename: str, content_type: str, content_length: int | None = ... - ) -> IO[bytes]: ... - -def default_stream_factory( - total_content_length: int | None, filename: str, content_type: str, content_length: int | None = ... -) -> IO[bytes]: ... -def parse_form_data( - environ: WSGIEnvironment, - stream_factory: _StreamFactory | None = ..., - charset: Text = ..., - errors: Text = ..., - max_form_memory_size: int | None = ..., - max_content_length: int | None = ..., - cls: Callable[[], _Dict] | None = ..., - silent: bool = ..., -) -> Tuple[IO[bytes], _Dict, _Dict]: ... -def exhaust_stream(f: _F) -> _F: ... - -class FormDataParser(object): - stream_factory: _StreamFactory - charset: Text - errors: Text - max_form_memory_size: int | None - max_content_length: int | None - cls: Callable[[], _Dict] - silent: bool - def __init__( - self, - stream_factory: _StreamFactory | None = ..., - charset: Text = ..., - errors: Text = ..., - max_form_memory_size: int | None = ..., - max_content_length: int | None = ..., - cls: Callable[[], _Dict] | None = ..., - silent: bool = ..., - ) -> None: ... - def get_parse_func(self, mimetype: str, options: Any) -> _ParseFunc | None: ... - def parse_from_environ(self, environ: WSGIEnvironment) -> Tuple[IO[bytes], _Dict, _Dict]: ... - def parse( - self, stream: IO[bytes], mimetype: Text, content_length: int | None, options: Mapping[str, str] | None = ... - ) -> Tuple[IO[bytes], _Dict, _Dict]: ... - parse_functions: dict[Text, _ParseFunc] - -def is_valid_multipart_boundary(boundary: str) -> bool: ... -def parse_multipart_headers(iterable: Iterable[Text | bytes]) -> Headers: ... - -class MultiPartParser(object): - charset: Text - errors: Text - max_form_memory_size: int | None - stream_factory: _StreamFactory - cls: Callable[[], _Dict] - buffer_size: int - def __init__( - self, - stream_factory: _StreamFactory | None = ..., - charset: Text = ..., - errors: Text = ..., - max_form_memory_size: int | None = ..., - cls: Callable[[], _Dict] | None = ..., - buffer_size: int = ..., - ) -> None: ... - def fail(self, message: Text) -> NoReturn: ... - def get_part_encoding(self, headers: Mapping[str, str]) -> str | None: ... - def get_part_charset(self, headers: Mapping[str, str]) -> Text: ... - def start_file_streaming( - self, filename: Text | bytes, headers: Mapping[str, str], total_content_length: int | None - ) -> Tuple[Text, IO[bytes]]: ... - def in_memory_threshold_reached(self, bytes: Any) -> NoReturn: ... - def validate_boundary(self, boundary: str | None) -> None: ... - def parse_lines( - self, file: Any, boundary: bytes, content_length: int, cap_at_buffer: bool = ... - ) -> Generator[Tuple[str, Any], None, None]: ... - def parse_parts(self, file: Any, boundary: bytes, content_length: int) -> Generator[Tuple[str, Any], None, None]: ... - def parse(self, file: Any, boundary: bytes, content_length: int) -> Tuple[_Dict, _Dict]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/http.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/http.pyi deleted file mode 100644 index 187b2a52f20e..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/http.pyi +++ /dev/null @@ -1,120 +0,0 @@ -import sys -from _typeshed.wsgi import WSGIEnvironment -from datetime import datetime, timedelta -from typing import Any, Callable, Iterable, Mapping, SupportsInt, Text, Tuple, Type, TypeVar, Union, overload - -from .datastructures import ( - Accept, - Authorization, - ContentRange, - ETags, - Headers, - HeaderSet, - IfRange, - Range, - RequestCacheControl, - TypeConversionDict, - WWWAuthenticate, -) - -if sys.version_info >= (3, 0): - _Str = str - _ToBytes = Union[bytes, bytearray, memoryview, str] - _ETagData = Union[bytes, bytearray, memoryview] -else: - _Str = TypeVar("_Str", str, unicode) - _ToBytes = Union[bytes, bytearray, buffer, unicode] - _ETagData = Union[str, unicode, bytearray, buffer, memoryview] - -_T = TypeVar("_T") -_U = TypeVar("_U") - -HTTP_STATUS_CODES: dict[int, str] - -def wsgi_to_bytes(data: bytes | Text) -> bytes: ... -def bytes_to_wsgi(data: bytes) -> str: ... -def quote_header_value(value: Any, extra_chars: str = ..., allow_token: bool = ...) -> str: ... -def unquote_header_value(value: _Str, is_filename: bool = ...) -> _Str: ... -def dump_options_header(header: _Str | None, options: Mapping[_Str, Any]) -> _Str: ... -def dump_header(iterable: Iterable[Any] | dict[_Str, Any], allow_token: bool = ...) -> _Str: ... -def parse_list_header(value: _Str) -> list[_Str]: ... -@overload -def parse_dict_header(value: bytes | Text) -> dict[Text, Text | None]: ... -@overload -def parse_dict_header(value: bytes | Text, cls: Type[_T]) -> _T: ... -@overload -def parse_options_header(value: None, multiple: bool = ...) -> Tuple[str, dict[str, str | None]]: ... -@overload -def parse_options_header(value: _Str) -> Tuple[_Str, dict[_Str, _Str | None]]: ... - -# actually returns Tuple[_Str, dict[_Str, _Str | None], ...] -@overload -def parse_options_header(value: _Str, multiple: bool = ...) -> Tuple[Any, ...]: ... -@overload -def parse_accept_header(value: Text | None) -> Accept: ... -@overload -def parse_accept_header(value: _Str | None, cls: Callable[[list[Tuple[str, float]] | None], _T]) -> _T: ... -@overload -def parse_cache_control_header( - value: None | bytes | Text, on_update: Callable[[RequestCacheControl], Any] | None = ... -) -> RequestCacheControl: ... -@overload -def parse_cache_control_header( - value: None | bytes | Text, on_update: _T, cls: Callable[[dict[Text, Text | None], _T], _U] -) -> _U: ... -@overload -def parse_cache_control_header(value: None | bytes | Text, *, cls: Callable[[dict[Text, Text | None], None], _U]) -> _U: ... -def parse_set_header(value: Text, on_update: Callable[[HeaderSet], Any] | None = ...) -> HeaderSet: ... -def parse_authorization_header(value: None | bytes | Text) -> Authorization | None: ... -def parse_www_authenticate_header( - value: None | bytes | Text, on_update: Callable[[WWWAuthenticate], Any] | None = ... -) -> WWWAuthenticate: ... -def parse_if_range_header(value: Text | None) -> IfRange: ... -def parse_range_header(value: Text | None, make_inclusive: bool = ...) -> Range | None: ... -def parse_content_range_header( - value: Text | None, on_update: Callable[[ContentRange], Any] | None = ... -) -> ContentRange | None: ... -def quote_etag(etag: _Str, weak: bool = ...) -> _Str: ... -def unquote_etag(etag: _Str | None) -> Tuple[_Str | None, _Str | None]: ... -def parse_etags(value: Text | None) -> ETags: ... -def generate_etag(data: _ETagData) -> str: ... -def parse_date(value: str | None) -> datetime | None: ... -def cookie_date(expires: None | float | datetime = ...) -> str: ... -def http_date(timestamp: None | float | datetime = ...) -> str: ... -def parse_age(value: SupportsInt | None = ...) -> timedelta | None: ... -def dump_age(age: None | timedelta | SupportsInt) -> str | None: ... -def is_resource_modified( - environ: WSGIEnvironment, - etag: Text | None = ..., - data: _ETagData | None = ..., - last_modified: None | Text | datetime = ..., - ignore_if_range: bool = ..., -) -> bool: ... -def remove_entity_headers(headers: list[Tuple[Text, Text]] | Headers, allowed: Iterable[Text] = ...) -> None: ... -def remove_hop_by_hop_headers(headers: list[Tuple[Text, Text]] | Headers) -> None: ... -def is_entity_header(header: Text) -> bool: ... -def is_hop_by_hop_header(header: Text) -> bool: ... -@overload -def parse_cookie( - header: None | WSGIEnvironment | Text | bytes, charset: Text = ..., errors: Text = ... -) -> TypeConversionDict[Any, Any]: ... -@overload -def parse_cookie( - header: None | WSGIEnvironment | Text | bytes, - charset: Text = ..., - errors: Text = ..., - cls: Callable[[Iterable[Tuple[Text, Text]]], _T] | None = ..., -) -> _T: ... -def dump_cookie( - key: _ToBytes, - value: _ToBytes = ..., - max_age: None | float | timedelta = ..., - expires: None | Text | float | datetime = ..., - path: None | Tuple[Any, ...] | str | bytes = ..., - domain: None | str | bytes = ..., - secure: bool = ..., - httponly: bool = ..., - charset: Text = ..., - sync_expires: bool = ..., -) -> str: ... -def is_byte_range_valid(start: int | None, stop: int | None, length: int | None) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/local.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/local.pyi deleted file mode 100644 index 11d133b725bf..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/local.pyi +++ /dev/null @@ -1,100 +0,0 @@ -from typing import Any - -def release_local(local): ... - -class Local: - def __init__(self): ... - def __iter__(self): ... - def __call__(self, proxy): ... - def __release_local__(self): ... - def __getattr__(self, name): ... - def __setattr__(self, name, value): ... - def __delattr__(self, name): ... - -class LocalStack: - def __init__(self): ... - def __release_local__(self): ... - def _get__ident_func__(self): ... - def _set__ident_func__(self, value): ... - __ident_func__: Any - def __call__(self): ... - def push(self, obj): ... - def pop(self): ... - @property - def top(self): ... - -class LocalManager: - locals: Any - ident_func: Any - def __init__(self, locals: Any | None = ..., ident_func: Any | None = ...): ... - def get_ident(self): ... - def cleanup(self): ... - def make_middleware(self, app): ... - def middleware(self, func): ... - -class LocalProxy: - def __init__(self, local, name: Any | None = ...): ... - @property - def __dict__(self): ... - def __bool__(self): ... - def __unicode__(self): ... - def __dir__(self): ... - def __getattr__(self, name): ... - def __setitem__(self, key, value): ... - def __delitem__(self, key): ... - __getslice__: Any - def __setslice__(self, i, j, seq): ... - def __delslice__(self, i, j): ... - __setattr__: Any - __delattr__: Any - __lt__: Any - __le__: Any - __eq__: Any - __ne__: Any - __gt__: Any - __ge__: Any - __cmp__: Any - __hash__: Any - __call__: Any - __len__: Any - __getitem__: Any - __iter__: Any - __contains__: Any - __add__: Any - __sub__: Any - __mul__: Any - __floordiv__: Any - __mod__: Any - __divmod__: Any - __pow__: Any - __lshift__: Any - __rshift__: Any - __and__: Any - __xor__: Any - __or__: Any - __div__: Any - __truediv__: Any - __neg__: Any - __pos__: Any - __abs__: Any - __invert__: Any - __complex__: Any - __int__: Any - __long__: Any - __float__: Any - __oct__: Any - __hex__: Any - __index__: Any - __coerce__: Any - __enter__: Any - __exit__: Any - __radd__: Any - __rsub__: Any - __rmul__: Any - __rdiv__: Any - __rtruediv__: Any - __rfloordiv__: Any - __rmod__: Any - __rdivmod__: Any - __copy__: Any - __deepcopy__: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/dispatcher.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/dispatcher.pyi deleted file mode 100644 index 1b8a28a45fa7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/dispatcher.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import Iterable, Mapping, Text - -class DispatcherMiddleware(object): - app: WSGIApplication - mounts: Mapping[Text, WSGIApplication] - def __init__(self, app: WSGIApplication, mounts: Mapping[Text, WSGIApplication] | None = ...) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/http_proxy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/http_proxy.pyi deleted file mode 100644 index 0285b67401e1..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/http_proxy.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import Any, Iterable, Mapping, MutableMapping, Text - -_Opts = Mapping[Text, Any] -_MutableOpts = MutableMapping[Text, Any] - -class ProxyMiddleware(object): - app: WSGIApplication - targets: dict[Text, _MutableOpts] - def __init__( - self, app: WSGIApplication, targets: Mapping[Text, _MutableOpts], chunk_size: int = ..., timeout: int = ... - ) -> None: ... - def proxy_to(self, opts: _Opts, path: Text, prefix: Text) -> WSGIApplication: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/lint.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/lint.pyi deleted file mode 100644 index f308b2fdf4a1..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/lint.pyi +++ /dev/null @@ -1,62 +0,0 @@ -import sys -from _typeshed import SupportsWrite -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import Any, Iterable, Iterator, Mapping, Protocol, Tuple - -from ..datastructures import Headers - -class WSGIWarning(Warning): ... -class HTTPWarning(Warning): ... - -def check_string(context: str, obj: object, stacklevel: int = ...) -> None: ... - -class _SupportsReadEtc(Protocol): - def read(self, __size: int = ...) -> bytes: ... - def readline(self, __size: int = ...) -> bytes: ... - def __iter__(self) -> Iterator[bytes]: ... - def close(self) -> Any: ... - -class InputStream(object): - def __init__(self, stream: _SupportsReadEtc) -> None: ... - def read(self, __size: int = ...) -> bytes: ... - def readline(self, __size: int = ...) -> bytes: ... - def __iter__(self) -> Iterator[bytes]: ... - def close(self) -> None: ... - -class _SupportsWriteEtc(Protocol): - def write(self, __s: str) -> Any: ... - def flush(self) -> Any: ... - def close(self) -> Any: ... - -class ErrorStream(object): - def __init__(self, stream: _SupportsWriteEtc) -> None: ... - def write(self, s: str) -> None: ... - def flush(self) -> None: ... - def writelines(self, seq: Iterable[str]) -> None: ... - def close(self) -> None: ... - -class GuardedWrite(object): - def __init__(self, write: SupportsWrite[str], chunks: list[int]) -> None: ... - def __call__(self, s: str) -> None: ... - -class GuardedIterator(object): - closed: bool - headers_set: bool - chunks: list[int] - def __init__(self, iterator: Iterable[str], headers_set: bool, chunks: list[int]) -> None: ... - def __iter__(self) -> GuardedIterator: ... - if sys.version_info >= (3, 0): - def __next__(self) -> str: ... - else: - def next(self) -> str: ... - def close(self) -> None: ... - -class LintMiddleware(object): - def __init__(self, app: WSGIApplication) -> None: ... - def check_environ(self, environ: WSGIEnvironment) -> None: ... - def check_start_response( - self, status: str, headers: list[Tuple[str, str]], exc_info: Tuple[Any, ...] | None - ) -> Tuple[int, Headers]: ... - def check_headers(self, headers: Mapping[str, str]) -> None: ... - def check_iterator(self, app_iter: Iterable[bytes]) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> GuardedIterator: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/profiler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/profiler.pyi deleted file mode 100644 index 10b073dd1a83..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/profiler.pyi +++ /dev/null @@ -1,14 +0,0 @@ -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import IO, Iterable, Text, Tuple - -class ProfilerMiddleware(object): - def __init__( - self, - app: WSGIApplication, - stream: IO[str] = ..., - sort_by: Tuple[Text, Text] = ..., - restrictions: Iterable[str | float] = ..., - profile_dir: Text | None = ..., - filename_format: Text = ..., - ) -> None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> list[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/proxy_fix.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/proxy_fix.pyi deleted file mode 100644 index 3d166e6960d0..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/proxy_fix.pyi +++ /dev/null @@ -1,23 +0,0 @@ -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import Iterable - -class ProxyFix(object): - app: WSGIApplication - x_for: int - x_proto: int - x_host: int - x_port: int - x_prefix: int - num_proxies: int - def __init__( - self, - app: WSGIApplication, - num_proxies: int | None = ..., - x_for: int = ..., - x_proto: int = ..., - x_host: int = ..., - x_port: int = ..., - x_prefix: int = ..., - ) -> None: ... - def get_remote_addr(self, forwarded_for: Iterable[str]) -> str | None: ... - def __call__(self, environ: WSGIEnvironment, start_response: StartResponse) -> Iterable[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/shared_data.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/shared_data.pyi deleted file mode 100644 index 46ad68782dc8..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/middleware/shared_data.pyi +++ /dev/null @@ -1,29 +0,0 @@ -import datetime -from _typeshed.wsgi import StartResponse, WSGIApplication, WSGIEnvironment -from typing import IO, Callable, Iterable, Mapping, Optional, Text, Tuple, Union - -_V = Union[Tuple[Text, Text], Text] - -_Opener = Callable[[], Tuple[IO[bytes], datetime.datetime, int]] -_Loader = Callable[[Optional[Text]], Union[Tuple[None, None], Tuple[Text, _Opener]]] - -class SharedDataMiddleware(object): - app: WSGIApplication - exports: list[Tuple[Text, _Loader]] - cache: bool - cache_timeout: float - def __init__( - self, - app: WSGIApplication, - exports: Mapping[Text, _V] | Iterable[Tuple[Text, _V]], - disallow: Text | None = ..., - cache: bool = ..., - cache_timeout: float = ..., - fallback_mimetype: Text = ..., - ) -> None: ... - def is_allowed(self, filename: Text) -> bool: ... - def get_file_loader(self, filename: Text) -> _Loader: ... - def get_package_loader(self, package: Text, package_path: Text) -> _Loader: ... - def get_directory_loader(self, directory: Text) -> _Loader: ... - def generate_etag(self, mtime: datetime.datetime, file_size: int, real_filename: Text | bytes) -> str: ... - def __call__(self, environment: WSGIEnvironment, start_response: StartResponse) -> WSGIApplication: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/posixemulation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/posixemulation.pyi deleted file mode 100644 index 334cb3d0afa7..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/posixemulation.pyi +++ /dev/null @@ -1,8 +0,0 @@ -from typing import Any - -from ._compat import to_unicode as to_unicode -from .filesystem import get_filesystem_encoding as get_filesystem_encoding - -can_rename_open_file: Any - -def rename(src, dst): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/routing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/routing.pyi deleted file mode 100644 index 51bd18363bc0..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/routing.pyi +++ /dev/null @@ -1,219 +0,0 @@ -from typing import Any, Text - -from werkzeug.exceptions import HTTPException - -def parse_converter_args(argstr): ... -def parse_rule(rule): ... - -class RoutingException(Exception): ... - -class RequestRedirect(HTTPException, RoutingException): - code: Any - new_url: Any - def __init__(self, new_url): ... - def get_response(self, environ): ... - -class RequestSlash(RoutingException): ... - -class RequestAliasRedirect(RoutingException): - matched_values: Any - def __init__(self, matched_values): ... - -class BuildError(RoutingException, LookupError): - endpoint: Any - values: Any - method: Any - adapter: MapAdapter | None - def __init__(self, endpoint, values, method, adapter: MapAdapter | None = ...) -> None: ... - @property - def suggested(self) -> Rule | None: ... - def closest_rule(self, adapter: MapAdapter | None) -> Rule | None: ... - -class ValidationError(ValueError): ... - -class RuleFactory: - def get_rules(self, map): ... - -class Subdomain(RuleFactory): - subdomain: Any - rules: Any - def __init__(self, subdomain, rules): ... - def get_rules(self, map): ... - -class Submount(RuleFactory): - path: Any - rules: Any - def __init__(self, path, rules): ... - def get_rules(self, map): ... - -class EndpointPrefix(RuleFactory): - prefix: Any - rules: Any - def __init__(self, prefix, rules): ... - def get_rules(self, map): ... - -class RuleTemplate: - rules: Any - def __init__(self, rules): ... - def __call__(self, *args, **kwargs): ... - -class RuleTemplateFactory(RuleFactory): - rules: Any - context: Any - def __init__(self, rules, context): ... - def get_rules(self, map): ... - -class Rule(RuleFactory): - rule: Any - is_leaf: Any - map: Any - strict_slashes: Any - subdomain: Any - host: Any - defaults: Any - build_only: Any - alias: Any - methods: Any - endpoint: Any - redirect_to: Any - arguments: Any - def __init__( - self, - string, - defaults: Any | None = ..., - subdomain: Any | None = ..., - methods: Any | None = ..., - build_only: bool = ..., - endpoint: Any | None = ..., - strict_slashes: Any | None = ..., - redirect_to: Any | None = ..., - alias: bool = ..., - host: Any | None = ..., - ): ... - def empty(self): ... - def get_empty_kwargs(self): ... - def get_rules(self, map): ... - def refresh(self): ... - def bind(self, map, rebind: bool = ...): ... - def get_converter(self, variable_name, converter_name, args, kwargs): ... - def compile(self): ... - def match(self, path, method: Any | None = ...): ... - def build(self, values, append_unknown: bool = ...): ... - def provides_defaults_for(self, rule): ... - def suitable_for(self, values, method: Any | None = ...): ... - def match_compare_key(self): ... - def build_compare_key(self): ... - def __eq__(self, other): ... - def __ne__(self, other): ... - -class BaseConverter: - regex: Any - weight: Any - map: Any - def __init__(self, map): ... - def to_python(self, value): ... - def to_url(self, value) -> str: ... - -class UnicodeConverter(BaseConverter): - regex: Any - def __init__(self, map, minlength: int = ..., maxlength: Any | None = ..., length: Any | None = ...): ... - -class AnyConverter(BaseConverter): - regex: Any - def __init__(self, map, *items): ... - -class PathConverter(BaseConverter): - regex: Any - weight: Any - -class NumberConverter(BaseConverter): - weight: Any - fixed_digits: Any - min: Any - max: Any - def __init__(self, map, fixed_digits: int = ..., min: Any | None = ..., max: Any | None = ...): ... - def to_python(self, value): ... - def to_url(self, value) -> str: ... - -class IntegerConverter(NumberConverter): - regex: Any - num_convert: Any - -class FloatConverter(NumberConverter): - regex: Any - num_convert: Any - def __init__(self, map, min: Any | None = ..., max: Any | None = ...): ... - -class UUIDConverter(BaseConverter): - regex: Any - def to_python(self, value): ... - def to_url(self, value) -> str: ... - -DEFAULT_CONVERTERS: Any - -class Map: - default_converters: Any - default_subdomain: Any - charset: Text - encoding_errors: Text - strict_slashes: Any - redirect_defaults: Any - host_matching: Any - converters: Any - sort_parameters: Any - sort_key: Any - def __init__( - self, - rules: Any | None = ..., - default_subdomain: str = ..., - charset: Text = ..., - strict_slashes: bool = ..., - redirect_defaults: bool = ..., - converters: Any | None = ..., - sort_parameters: bool = ..., - sort_key: Any | None = ..., - encoding_errors: Text = ..., - host_matching: bool = ..., - ): ... - def is_endpoint_expecting(self, endpoint, *arguments): ... - def iter_rules(self, endpoint: Any | None = ...): ... - def add(self, rulefactory): ... - def bind( - self, - server_name, - script_name: Any | None = ..., - subdomain: Any | None = ..., - url_scheme: str = ..., - default_method: str = ..., - path_info: Any | None = ..., - query_args: Any | None = ..., - ): ... - def bind_to_environ(self, environ, server_name: Any | None = ..., subdomain: Any | None = ...): ... - def update(self): ... - -class MapAdapter: - map: Any - server_name: Any - script_name: Any - subdomain: Any - url_scheme: Any - path_info: Any - default_method: Any - query_args: Any - def __init__( - self, map, server_name, script_name, subdomain, url_scheme, path_info, default_method, query_args: Any | None = ... - ): ... - def dispatch(self, view_func, path_info: Any | None = ..., method: Any | None = ..., catch_http_exceptions: bool = ...): ... - def match( - self, path_info: Any | None = ..., method: Any | None = ..., return_rule: bool = ..., query_args: Any | None = ... - ): ... - def test(self, path_info: Any | None = ..., method: Any | None = ...): ... - def allowed_methods(self, path_info: Any | None = ...): ... - def get_host(self, domain_part): ... - def get_default_redirect(self, rule, method, values, query_args): ... - def encode_query_args(self, query_args): ... - def make_redirect_url(self, path_info, query_args: Any | None = ..., domain_part: Any | None = ...): ... - def make_alias_redirect_url(self, path, endpoint, values, method, query_args): ... - def build( - self, endpoint, values: Any | None = ..., method: Any | None = ..., force_external: bool = ..., append_unknown: bool = ... - ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/script.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/script.pyi deleted file mode 100644 index 697be6727f3b..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/script.pyi +++ /dev/null @@ -1,24 +0,0 @@ -from typing import Any - -argument_types: Any -converters: Any - -def run(namespace: Any | None = ..., action_prefix: str = ..., args: Any | None = ...): ... -def fail(message, code: int = ...): ... -def find_actions(namespace, action_prefix): ... -def print_usage(actions): ... -def analyse_action(func): ... -def make_shell(init_func: Any | None = ..., banner: Any | None = ..., use_ipython: bool = ...): ... -def make_runserver( - app_factory, - hostname: str = ..., - port: int = ..., - use_reloader: bool = ..., - use_debugger: bool = ..., - use_evalex: bool = ..., - threaded: bool = ..., - processes: int = ..., - static_files: Any | None = ..., - extra_files: Any | None = ..., - ssl_context: Any | None = ..., -): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/security.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/security.pyi deleted file mode 100644 index 7e9e90763cea..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/security.pyi +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any - -SALT_CHARS: Any -DEFAULT_PBKDF2_ITERATIONS: Any - -def pbkdf2_hex(data, salt, iterations=..., keylen: Any | None = ..., hashfunc: Any | None = ...): ... -def pbkdf2_bin(data, salt, iterations=..., keylen: Any | None = ..., hashfunc: Any | None = ...): ... -def safe_str_cmp(a, b): ... -def gen_salt(length): ... -def generate_password_hash(password, method: str = ..., salt_length: int = ...): ... -def check_password_hash(pwhash, password): ... -def safe_join(directory, filename): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/serving.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/serving.pyi deleted file mode 100644 index b27ae56d3eba..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/serving.pyi +++ /dev/null @@ -1,140 +0,0 @@ -import sys -from typing import Any - -if sys.version_info >= (3, 0): - from http.server import BaseHTTPRequestHandler, HTTPServer - from socketserver import ThreadingMixIn -else: - from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer - from SocketServer import ThreadingMixIn - -if sys.platform == "win32": - class ForkingMixIn(object): ... - -else: - if sys.version_info >= (3, 0): - from socketserver import ForkingMixIn as ForkingMixIn - else: - from SocketServer import ForkingMixIn as ForkingMixIn - -class _SslDummy: - def __getattr__(self, name): ... - -ssl: Any -LISTEN_QUEUE: Any -can_open_by_fd: Any - -class WSGIRequestHandler(BaseHTTPRequestHandler): - @property - def server_version(self): ... - def make_environ(self): ... - environ: Any - close_connection: Any - def run_wsgi(self): ... - def handle(self): ... - def initiate_shutdown(self): ... - def connection_dropped(self, error, environ: Any | None = ...): ... - raw_requestline: Any - def handle_one_request(self): ... - def send_response(self, code, message: Any | None = ...): ... - def version_string(self): ... - def address_string(self): ... - def port_integer(self): ... - def log_request(self, code: object = ..., size: object = ...) -> None: ... - def log_error(self, *args): ... - def log_message(self, format, *args): ... - def log(self, type, message, *args): ... - -BaseRequestHandler: Any - -def generate_adhoc_ssl_pair(cn: Any | None = ...): ... -def make_ssl_devcert(base_path, host: Any | None = ..., cn: Any | None = ...): ... -def generate_adhoc_ssl_context(): ... -def load_ssl_context(cert_file, pkey_file: Any | None = ..., protocol: Any | None = ...): ... - -class _SSLContext: - def __init__(self, protocol): ... - def load_cert_chain(self, certfile, keyfile: Any | None = ..., password: Any | None = ...): ... - def wrap_socket(self, sock, **kwargs): ... - -def is_ssl_error(error: Any | None = ...): ... -def select_ip_version(host, port): ... - -class BaseWSGIServer(HTTPServer): - multithread: Any - multiprocess: Any - request_queue_size: Any - address_family: Any - app: Any - passthrough_errors: Any - shutdown_signal: Any - host: Any - port: Any - socket: Any - server_address: Any - ssl_context: Any - def __init__( - self, - host, - port, - app, - handler: Any | None = ..., - passthrough_errors: bool = ..., - ssl_context: Any | None = ..., - fd: Any | None = ..., - ): ... - def log(self, type, message, *args): ... - def serve_forever(self): ... - def handle_error(self, request, client_address): ... - def get_request(self): ... - -class ThreadedWSGIServer(ThreadingMixIn, BaseWSGIServer): - multithread: Any - daemon_threads: Any - -class ForkingWSGIServer(ForkingMixIn, BaseWSGIServer): - multiprocess: Any - max_children: Any - def __init__( - self, - host, - port, - app, - processes: int = ..., - handler: Any | None = ..., - passthrough_errors: bool = ..., - ssl_context: Any | None = ..., - fd: Any | None = ..., - ): ... - -def make_server( - host: Any | None = ..., - port: Any | None = ..., - app: Any | None = ..., - threaded: bool = ..., - processes: int = ..., - request_handler: Any | None = ..., - passthrough_errors: bool = ..., - ssl_context: Any | None = ..., - fd: Any | None = ..., -): ... -def is_running_from_reloader(): ... -def run_simple( - hostname, - port, - application, - use_reloader: bool = ..., - use_debugger: bool = ..., - use_evalex: bool = ..., - extra_files: Any | None = ..., - reloader_interval: int = ..., - reloader_type: str = ..., - threaded: bool = ..., - processes: int = ..., - request_handler: Any | None = ..., - static_files: Any | None = ..., - passthrough_errors: bool = ..., - ssl_context: Any | None = ..., -): ... -def run_with_reloader(*args, **kwargs): ... -def main(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/test.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/test.pyi deleted file mode 100644 index 79b93fb51cbb..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/test.pyi +++ /dev/null @@ -1,169 +0,0 @@ -import sys -from _typeshed.wsgi import WSGIEnvironment -from typing import Any, Generic, Text, Tuple, Type, TypeVar, overload -from typing_extensions import Literal - -if sys.version_info >= (3, 0): - from http.cookiejar import CookieJar - from urllib.request import Request as U2Request -else: - from cookielib import CookieJar - from urllib2 import Request as U2Request - -def stream_encode_multipart(values, use_tempfile: int = ..., threshold=..., boundary: Any | None = ..., charset: Text = ...): ... -def encode_multipart(values, boundary: Any | None = ..., charset: Text = ...): ... -def File(fd, filename: Any | None = ..., mimetype: Any | None = ...): ... - -class _TestCookieHeaders: - headers: Any - def __init__(self, headers): ... - def getheaders(self, name): ... - def get_all(self, name, default: Any | None = ...): ... - -class _TestCookieResponse: - headers: Any - def __init__(self, headers): ... - def info(self): ... - -class _TestCookieJar(CookieJar): - def inject_wsgi(self, environ): ... - def extract_wsgi(self, environ, headers): ... - -class EnvironBuilder: - server_protocol: Any - wsgi_version: Any - request_class: Any - charset: Text - path: Any - base_url: Any - query_string: Any - args: Any - method: Any - headers: Any - content_type: Any - errors_stream: Any - multithread: Any - multiprocess: Any - run_once: Any - environ_base: Any - environ_overrides: Any - input_stream: Any - content_length: Any - closed: Any - def __init__( - self, - path: str = ..., - base_url: Any | None = ..., - query_string: Any | None = ..., - method: str = ..., - input_stream: Any | None = ..., - content_type: Any | None = ..., - content_length: Any | None = ..., - errors_stream: Any | None = ..., - multithread: bool = ..., - multiprocess: bool = ..., - run_once: bool = ..., - headers: Any | None = ..., - data: Any | None = ..., - environ_base: Any | None = ..., - environ_overrides: Any | None = ..., - charset: Text = ..., - ): ... - form: Any - files: Any - @property - def server_name(self) -> str: ... - @property - def server_port(self) -> int: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def get_environ(self) -> WSGIEnvironment: ... - def get_request(self, cls: Any | None = ...): ... - -class ClientRedirectError(Exception): ... - -# Response type for the client below. -# By default _R is Tuple[Iterable[Any], Text | int, datastructures.Headers] -_R = TypeVar("_R") - -class Client(Generic[_R]): - application: Any - response_wrapper: Type[_R] | None - cookie_jar: Any - allow_subdomain_redirects: Any - def __init__( - self, application, response_wrapper: Type[_R] | None = ..., use_cookies: bool = ..., allow_subdomain_redirects: bool = ... - ): ... - def set_cookie( - self, - server_name, - key, - value: str = ..., - max_age: Any | None = ..., - expires: Any | None = ..., - path: str = ..., - domain: Any | None = ..., - secure: Any | None = ..., - httponly: bool = ..., - charset: Text = ..., - ): ... - def delete_cookie(self, server_name, key, path: str = ..., domain: Any | None = ...): ... - def run_wsgi_app(self, environ, buffered: bool = ...): ... - def resolve_redirect(self, response, new_location, environ, buffered: bool = ...): ... - @overload - def open(self, *args, as_tuple: Literal[True], **kwargs) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def open(self, *args, as_tuple: Literal[False] = ..., **kwargs) -> _R: ... - @overload - def open(self, *args, as_tuple: bool, **kwargs) -> Any: ... - @overload - def get(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def get(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def get(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def patch(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def patch(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def patch(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def post(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def post(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def post(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def head(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def head(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def head(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def put(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def put(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def put(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def delete(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def delete(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def delete(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def options(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def options(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def options(self, *args, as_tuple: bool, **kw) -> Any: ... - @overload - def trace(self, *args, as_tuple: Literal[True], **kw) -> Tuple[WSGIEnvironment, _R]: ... - @overload - def trace(self, *args, as_tuple: Literal[False] = ..., **kw) -> _R: ... - @overload - def trace(self, *args, as_tuple: bool, **kw) -> Any: ... - -def create_environ(*args, **kwargs): ... -def run_wsgi_app(app, environ, buffered: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/testapp.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/testapp.pyi deleted file mode 100644 index a074482bd8e5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/testapp.pyi +++ /dev/null @@ -1,10 +0,0 @@ -from typing import Any - -from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response - -logo: Any -TEMPLATE: Any - -def iter_sys_path(): ... -def render_testapp(req): ... -def test_app(environ, start_response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/urls.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/urls.pyi deleted file mode 100644 index 62f4d374b313..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/urls.pyi +++ /dev/null @@ -1,94 +0,0 @@ -from typing import Any, NamedTuple, Text - -class _URLTuple(NamedTuple): - scheme: Any - netloc: Any - path: Any - query: Any - fragment: Any - -class BaseURL(_URLTuple): - def replace(self, **kwargs): ... - @property - def host(self): ... - @property - def ascii_host(self): ... - @property - def port(self): ... - @property - def auth(self): ... - @property - def username(self): ... - @property - def raw_username(self): ... - @property - def password(self): ... - @property - def raw_password(self): ... - def decode_query(self, *args, **kwargs): ... - def join(self, *args, **kwargs): ... - def to_url(self): ... - def decode_netloc(self): ... - def to_uri_tuple(self): ... - def to_iri_tuple(self): ... - def get_file_location(self, pathformat: Any | None = ...): ... - -class URL(BaseURL): - def encode_netloc(self): ... - def encode(self, charset: Text = ..., errors: Text = ...): ... - -class BytesURL(BaseURL): - def encode_netloc(self): ... - def decode(self, charset: Text = ..., errors: Text = ...): ... - -def url_parse(url, scheme: Any | None = ..., allow_fragments: bool = ...): ... -def url_quote(string, charset: Text = ..., errors: Text = ..., safe: str = ..., unsafe: str = ...): ... -def url_quote_plus(string, charset: Text = ..., errors: Text = ..., safe: str = ...): ... -def url_unparse(components): ... -def url_unquote(string, charset: Text = ..., errors: Text = ..., unsafe: str = ...): ... -def url_unquote_plus(s, charset: Text = ..., errors: Text = ...): ... -def url_fix(s, charset: Text = ...): ... -def uri_to_iri(uri, charset: Text = ..., errors: Text = ...): ... -def iri_to_uri(iri, charset: Text = ..., errors: Text = ..., safe_conversion: bool = ...): ... -def url_decode( - s, - charset: Text = ..., - decode_keys: bool = ..., - include_empty: bool = ..., - errors: Text = ..., - separator: str = ..., - cls: Any | None = ..., -): ... -def url_decode_stream( - stream, - charset: Text = ..., - decode_keys: bool = ..., - include_empty: bool = ..., - errors: Text = ..., - separator: str = ..., - cls: Any | None = ..., - limit: Any | None = ..., - return_iterator: bool = ..., -): ... -def url_encode( - obj, charset: Text = ..., encode_keys: bool = ..., sort: bool = ..., key: Any | None = ..., separator: bytes = ... -): ... -def url_encode_stream( - obj, - stream: Any | None = ..., - charset: Text = ..., - encode_keys: bool = ..., - sort: bool = ..., - key: Any | None = ..., - separator: bytes = ..., -): ... -def url_join(base, url, allow_fragments: bool = ...): ... - -class Href: - base: Any - charset: Text - sort: Any - key: Any - def __init__(self, base: str = ..., charset: Text = ..., sort: bool = ..., key: Any | None = ...): ... - def __getattr__(self, name): ... - def __call__(self, *path, **query): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/useragents.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/useragents.pyi deleted file mode 100644 index 702067046911..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/useragents.pyi +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Any - -class UserAgentParser: - platforms: Any - browsers: Any - def __init__(self): ... - def __call__(self, user_agent): ... - -class UserAgent: - string: Any - platform: str | None - browser: str | None - version: str | None - language: str | None - def __init__(self, environ_or_string): ... - def to_header(self): ... - def __nonzero__(self): ... - __bool__: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/utils.pyi deleted file mode 100644 index 5b4f507dea98..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/utils.pyi +++ /dev/null @@ -1,59 +0,0 @@ -from typing import Any, Text, Type, TypeVar, overload - -from werkzeug._internal import _DictAccessorProperty -from werkzeug.wrappers import Response - -class cached_property(property): - __name__: Any - __module__: Any - __doc__: Any - func: Any - def __init__(self, func, name: Any | None = ..., doc: Any | None = ...): ... - def __set__(self, obj, value): ... - def __get__(self, obj, type: Any | None = ...): ... - -class environ_property(_DictAccessorProperty): - read_only: Any - def lookup(self, obj): ... - -class header_property(_DictAccessorProperty): - def lookup(self, obj): ... - -class HTMLBuilder: - def __init__(self, dialect): ... - def __call__(self, s): ... - def __getattr__(self, tag): ... - -html: Any -xhtml: Any - -def get_content_type(mimetype, charset): ... -def format_string(string, context): ... -def secure_filename(filename: Text) -> Text: ... -def escape(s, quote: Any | None = ...): ... -def unescape(s): ... - -# 'redirect' returns a werkzeug Response, unless you give it -# another Response type to use instead. -_RC = TypeVar("_RC", bound=Response) - -@overload -def redirect(location: str, code: int = ..., Response: None = ...) -> Response: ... -@overload -def redirect(location: str, code: int = ..., Response: Type[_RC] = ...) -> _RC: ... -def append_slash_redirect(environ, code: int = ...): ... -def import_string(import_name, silent: bool = ...): ... -def find_modules(import_path, include_packages: bool = ..., recursive: bool = ...): ... -def validate_arguments(func, args, kwargs, drop_extra: bool = ...): ... -def bind_arguments(func, args, kwargs): ... - -class ArgumentValidationError(ValueError): - missing: Any - extra: Any - extra_positional: Any - def __init__(self, missing: Any | None = ..., extra: Any | None = ..., extra_positional: Any | None = ...): ... - -class ImportStringError(ImportError): - import_name: Any - exception: Any - def __init__(self, import_name, exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wrappers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wrappers.pyi deleted file mode 100644 index ef11750febb2..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wrappers.pyi +++ /dev/null @@ -1,274 +0,0 @@ -from _typeshed.wsgi import InputStream, WSGIEnvironment -from datetime import datetime, timedelta -from typing import Any, Callable, Iterable, Iterator, Mapping, MutableMapping, Sequence, Text, Tuple, Type, TypeVar, overload -from typing_extensions import Literal - -from .datastructures import ( - Accept, - Authorization, - CharsetAccept, - CombinedMultiDict, - EnvironHeaders, - Headers, - HeaderSet, - ImmutableMultiDict, - ImmutableTypeConversionDict, - LanguageAccept, - MIMEAccept, - MultiDict, -) -from .useragents import UserAgent - -class BaseRequest: - charset: str - encoding_errors: str - max_content_length: int | None - max_form_memory_size: int - parameter_storage_class: Type[Any] - list_storage_class: Type[Any] - dict_storage_class: Type[Any] - form_data_parser_class: Type[Any] - trusted_hosts: Sequence[Text] | None - disable_data_descriptor: Any - environ: WSGIEnvironment = ... - shallow: Any - def __init__(self, environ: WSGIEnvironment, populate_request: bool = ..., shallow: bool = ...) -> None: ... - @property - def url_charset(self) -> str: ... - @classmethod - def from_values(cls, *args, **kwargs) -> BaseRequest: ... - @classmethod - def application(cls, f): ... - @property - def want_form_data_parsed(self): ... - def make_form_data_parser(self): ... - def close(self) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type, exc_value, tb): ... - @property - def stream(self) -> InputStream: ... - input_stream: InputStream - args: ImmutableMultiDict[Any, Any] - @property - def data(self) -> bytes: ... - @overload - def get_data(self, cache: bool = ..., as_text: Literal[False] = ..., parse_form_data: bool = ...) -> bytes: ... - @overload - def get_data(self, cache: bool, as_text: Literal[True], parse_form_data: bool = ...) -> Text: ... - @overload - def get_data(self, *, as_text: Literal[True], parse_form_data: bool = ...) -> Text: ... - @overload - def get_data(self, cache: bool, as_text: bool, parse_form_data: bool = ...) -> Any: ... - @overload - def get_data(self, *, as_text: bool, parse_form_data: bool = ...) -> Any: ... - form: ImmutableMultiDict[Any, Any] - values: CombinedMultiDict[Any, Any] - files: MultiDict[Any, Any] - @property - def cookies(self) -> ImmutableTypeConversionDict[str, str]: ... - headers: EnvironHeaders - path: Text - full_path: Text - script_root: Text - url: Text - base_url: Text - url_root: Text - host_url: Text - host: Text - query_string: bytes - method: Text - @property - def access_route(self) -> Sequence[str]: ... - @property - def remote_addr(self) -> str: ... - remote_user: Text - scheme: str - is_xhr: bool - is_secure: bool - is_multithread: bool - is_multiprocess: bool - is_run_once: bool - -_OnCloseT = TypeVar("_OnCloseT", bound=Callable[[], Any]) -_SelfT = TypeVar("_SelfT", bound=BaseResponse) - -class BaseResponse: - charset: str - default_status: int - default_mimetype: str | None - implicit_sequence_conversion: bool - autocorrect_location_header: bool - automatically_set_content_length: bool - headers: Headers - status_code: int - status: str - direct_passthrough: bool - response: Iterable[bytes] - def __init__( - self, - response: str | bytes | bytearray | Iterable[str] | Iterable[bytes] | None = ..., - status: Text | int | None = ..., - headers: Headers | Mapping[Text, Text] | Sequence[Tuple[Text, Text]] | None = ..., - mimetype: Text | None = ..., - content_type: Text | None = ..., - direct_passthrough: bool = ..., - ) -> None: ... - def call_on_close(self, func: _OnCloseT) -> _OnCloseT: ... - @classmethod - def force_type(cls: Type[_SelfT], response: object, environ: WSGIEnvironment | None = ...) -> _SelfT: ... - @classmethod - def from_app(cls: Type[_SelfT], app: Any, environ: WSGIEnvironment, buffered: bool = ...) -> _SelfT: ... - @overload - def get_data(self, as_text: Literal[False] = ...) -> bytes: ... - @overload - def get_data(self, as_text: Literal[True]) -> Text: ... - @overload - def get_data(self, as_text: bool) -> Any: ... - def set_data(self, value: bytes | Text) -> None: ... - data: Any - def calculate_content_length(self) -> int | None: ... - def make_sequence(self) -> None: ... - def iter_encoded(self) -> Iterator[bytes]: ... - def set_cookie( - self, - key: str, - value: str | bytes = ..., - max_age: float | timedelta | None = ..., - expires: int | datetime | None = ..., - path: str = ..., - domain: str | None = ..., - secure: bool = ..., - httponly: bool = ..., - samesite: str | None = ..., - ) -> None: ... - def delete_cookie(self, key, path: str = ..., domain: Any | None = ...): ... - @property - def is_streamed(self) -> bool: ... - @property - def is_sequence(self) -> bool: ... - def close(self) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type, exc_value, tb): ... - # The no_etag argument if fictional, but required for compatibility with - # ETagResponseMixin - def freeze(self, no_etag: bool = ...) -> None: ... - def get_wsgi_headers(self, environ): ... - def get_app_iter(self, environ): ... - def get_wsgi_response(self, environ): ... - def __call__(self, environ, start_response): ... - -class AcceptMixin(object): - @property - def accept_mimetypes(self) -> MIMEAccept: ... - @property - def accept_charsets(self) -> CharsetAccept: ... - @property - def accept_encodings(self) -> Accept: ... - @property - def accept_languages(self) -> LanguageAccept: ... - -class ETagRequestMixin: - @property - def cache_control(self): ... - @property - def if_match(self): ... - @property - def if_none_match(self): ... - @property - def if_modified_since(self): ... - @property - def if_unmodified_since(self): ... - @property - def if_range(self): ... - @property - def range(self): ... - -class UserAgentMixin: - @property - def user_agent(self) -> UserAgent: ... - -class AuthorizationMixin: - @property - def authorization(self) -> Authorization | None: ... - -class StreamOnlyMixin: - disable_data_descriptor: Any - want_form_data_parsed: Any - -class ETagResponseMixin: - @property - def cache_control(self): ... - status_code: Any - def make_conditional(self, request_or_environ, accept_ranges: bool = ..., complete_length: Any | None = ...): ... - def add_etag(self, overwrite: bool = ..., weak: bool = ...): ... - def set_etag(self, etag, weak: bool = ...): ... - def get_etag(self): ... - def freeze(self, no_etag: bool = ...) -> None: ... - accept_ranges: Any - content_range: Any - -class ResponseStream: - mode: Any - response: Any - closed: Any - def __init__(self, response): ... - def write(self, value): ... - def writelines(self, seq): ... - def close(self): ... - def flush(self): ... - def isatty(self): ... - @property - def encoding(self): ... - -class ResponseStreamMixin: - @property - def stream(self) -> ResponseStream: ... - -class CommonRequestDescriptorsMixin: - @property - def content_type(self) -> str | None: ... - @property - def content_length(self) -> int | None: ... - @property - def content_encoding(self) -> str | None: ... - @property - def content_md5(self) -> str | None: ... - @property - def referrer(self) -> str | None: ... - @property - def date(self) -> datetime | None: ... - @property - def max_forwards(self) -> int | None: ... - @property - def mimetype(self) -> str: ... - @property - def mimetype_params(self) -> Mapping[str, str]: ... - @property - def pragma(self) -> HeaderSet: ... - -class CommonResponseDescriptorsMixin: - mimetype: str | None = ... - @property - def mimetype_params(self) -> MutableMapping[str, str]: ... - location: str | None = ... - age: Any = ... # get: datetime.timedelta | None - content_type: str | None = ... - content_length: int | None = ... - content_location: str | None = ... - content_encoding: str | None = ... - content_md5: str | None = ... - date: Any = ... # get: datetime.datetime | None - expires: Any = ... # get: datetime.datetime | None - last_modified: Any = ... # get: datetime.datetime | None - retry_after: Any = ... # get: datetime.datetime | None - vary: str | None = ... - content_language: str | None = ... - allow: str | None = ... - -class WWWAuthenticateMixin: - @property - def www_authenticate(self): ... - -class Request(BaseRequest, AcceptMixin, ETagRequestMixin, UserAgentMixin, AuthorizationMixin, CommonRequestDescriptorsMixin): ... -class PlainRequest(StreamOnlyMixin, Request): ... -class Response(BaseResponse, ETagResponseMixin, ResponseStreamMixin, CommonResponseDescriptorsMixin, WWWAuthenticateMixin): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wsgi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wsgi.pyi deleted file mode 100644 index a0bf5ab9b0ca..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/Werkzeug/werkzeug/wsgi.pyi +++ /dev/null @@ -1,74 +0,0 @@ -from _typeshed import SupportsRead -from _typeshed.wsgi import InputStream, WSGIEnvironment -from typing import Any, Iterable, Text - -from .middleware.dispatcher import DispatcherMiddleware as DispatcherMiddleware -from .middleware.http_proxy import ProxyMiddleware as ProxyMiddleware -from .middleware.shared_data import SharedDataMiddleware as SharedDataMiddleware - -def responder(f): ... -def get_current_url( - environ, root_only: bool = ..., strip_querystring: bool = ..., host_only: bool = ..., trusted_hosts: Any | None = ... -): ... -def host_is_trusted(hostname, trusted_list): ... -def get_host(environ, trusted_hosts: Any | None = ...): ... -def get_content_length(environ: WSGIEnvironment) -> int | None: ... -def get_input_stream(environ: WSGIEnvironment, safe_fallback: bool = ...) -> InputStream: ... -def get_query_string(environ): ... -def get_path_info(environ, charset: Text = ..., errors: Text = ...): ... -def get_script_name(environ, charset: Text = ..., errors: Text = ...): ... -def pop_path_info(environ, charset: Text = ..., errors: Text = ...): ... -def peek_path_info(environ, charset: Text = ..., errors: Text = ...): ... -def extract_path_info( - environ_or_baseurl, path_or_url, charset: Text = ..., errors: Text = ..., collapse_http_schemes: bool = ... -): ... - -class ClosingIterator: - def __init__(self, iterable, callbacks: Any | None = ...): ... - def __iter__(self): ... - def __next__(self): ... - def close(self): ... - -def wrap_file(environ: WSGIEnvironment, file: SupportsRead[bytes], buffer_size: int = ...) -> Iterable[bytes]: ... - -class FileWrapper: - file: SupportsRead[bytes] - buffer_size: int - def __init__(self, file: SupportsRead[bytes], buffer_size: int = ...) -> None: ... - def close(self) -> None: ... - def seekable(self) -> bool: ... - def seek(self, offset: int, whence: int = ...) -> None: ... - def tell(self) -> int | None: ... - def __iter__(self) -> FileWrapper: ... - def __next__(self) -> bytes: ... - -class _RangeWrapper: - iterable: Any - byte_range: Any - start_byte: Any - end_byte: Any - read_length: Any - seekable: Any - end_reached: Any - def __init__(self, iterable, start_byte: int = ..., byte_range: Any | None = ...): ... - def __iter__(self): ... - def __next__(self): ... - def close(self): ... - -def make_line_iter(stream, limit: Any | None = ..., buffer_size=..., cap_at_buffer: bool = ...): ... -def make_chunk_iter(stream, separator, limit: Any | None = ..., buffer_size=..., cap_at_buffer: bool = ...): ... - -class LimitedStream: - limit: Any - def __init__(self, stream, limit): ... - def __iter__(self): ... - @property - def is_exhausted(self): ... - def on_exhausted(self): ... - def on_disconnect(self): ... - def exhaust(self, chunk_size=...): ... - def read(self, size: Any | None = ...): ... - def readline(self, size: Any | None = ...): ... - def readlines(self, size: Any | None = ...): ... - def tell(self): ... - def __next__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/METADATA.toml index 40f3c61c10ae..29511ee7d5a7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/METADATA.toml @@ -1,2 +1 @@ -version = "0.1" -requires = [] +version = "0.8.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/base.pyi index 3859bc5ce660..38c4ae1029b7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/base.pyi @@ -1,6 +1,6 @@ from _typeshed import Self from types import CodeType, FrameType, TracebackType, coroutine -from typing import Any, Coroutine, Generator, Generic, Iterator, Type, TypeVar +from typing import Any, Coroutine, Generator, Generic, Iterator, TypeVar _T = TypeVar("_T") _T_co = TypeVar("_T_co", covariant=True) @@ -15,7 +15,7 @@ class AsyncBase(Generic[_T]): class AiofilesContextManager(Generic[_T_co, _T_contra, _V_co]): def __init__(self, coro: Coroutine[_T_co, _T_contra, _V_co]) -> None: ... def send(self, value: _T_contra) -> _T_co: ... - def throw(self, typ: Type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ...) -> _T_co: ... + def throw(self, typ: type[BaseException], val: BaseException | object = ..., tb: TracebackType | None = ...) -> _T_co: ... def close(self) -> None: ... @property def gi_frame(self) -> FrameType: ... @@ -30,5 +30,5 @@ class AiofilesContextManager(Generic[_T_co, _T_contra, _V_co]): async def __anext__(self) -> _V_co: ... async def __aenter__(self) -> _V_co: ... async def __aexit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/os.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/os.pyi index b48884c4430e..761d425564fb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/os.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/os.pyi @@ -1,28 +1,73 @@ import sys from _typeshed import StrOrBytesPath +from asyncio.events import AbstractEventLoop from os import stat_result -from typing import Sequence, Union, overload +from typing import Any, Sequence, Union, overload + +from . import ospath as path _FdOrAnyPath = Union[int, StrOrBytesPath] -async def stat(path: _FdOrAnyPath, *, dir_fd: int | None = ..., follow_symlinks: bool = ...) -> stat_result: ... +async def stat( + path: _FdOrAnyPath, # noqa: F811 + *, + dir_fd: int | None = ..., + follow_symlinks: bool = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> stat_result: ... async def rename( - src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = ..., dst_dir_fd: int | None = ... + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> None: ... +async def replace( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., +) -> None: ... +async def remove( + path: StrOrBytesPath, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ... # noqa: F811 +) -> None: ... +async def mkdir( + path: StrOrBytesPath, # noqa: F811 + mode: int = ..., + *, + dir_fd: int | None = ..., + loop: AbstractEventLoop | None = ..., + executor: Any = ..., # noqa: F811 +) -> None: ... +async def makedirs( + name: StrOrBytesPath, mode: int = ..., exist_ok: bool = ..., *, loop: AbstractEventLoop | None = ..., executor: Any = ... ) -> None: ... -async def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... -async def mkdir(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... -async def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +async def rmdir( + path: StrOrBytesPath, *, dir_fd: int | None = ..., loop: AbstractEventLoop | None = ..., executor: Any = ... # noqa: F811 +) -> None: ... +async def removedirs(name: StrOrBytesPath, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> None: ... if sys.platform != "win32": @overload - async def sendfile(__out_fd: int, __in_fd: int, offset: int | None, count: int) -> int: ... + async def sendfile( + out_fd: int, in_fd: int, offset: int | None, count: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ... + ) -> int: ... @overload async def sendfile( - __out_fd: int, - __in_fd: int, + out_fd: int, + in_fd: int, offset: int, count: int, headers: Sequence[bytes] = ..., trailers: Sequence[bytes] = ..., flags: int = ..., - ) -> int: ... + *, + loop: AbstractEventLoop | None = ..., + executor: Any = ..., + ) -> int: ... # FreeBSD and Mac OS X only diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/ospath.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/ospath.pyi new file mode 100644 index 000000000000..4b5735cfa427 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/ospath.pyi @@ -0,0 +1,15 @@ +from _typeshed import StrOrBytesPath +from asyncio.events import AbstractEventLoop +from typing import Any + +async def exists(path: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def isfile(path: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def isdir(s: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... +async def getsize(filename: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> int: ... +async def getmtime(filename: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def getatime(filename: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def getctime(filename: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> float: ... +async def samefile( + f1: StrOrBytesPath | int, f2: StrOrBytesPath | int, *, loop: AbstractEventLoop | None = ..., executor: Any = ... +) -> bool: ... +async def sameopenfile(fp1: int, fp2: int, *, loop: AbstractEventLoop | None = ..., executor: Any = ...) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/threadpool/text.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/threadpool/text.pyi index fd2a90122e2a..8711bddb4d4e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/threadpool/text.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aiofiles/aiofiles/threadpool/text.pyi @@ -1,5 +1,5 @@ from _typeshed import StrOrBytesPath -from typing import BinaryIO, Iterable, Tuple +from typing import BinaryIO, Iterable from ..base import AsyncBase @@ -31,7 +31,7 @@ class AsyncTextIOWrapper(AsyncBase[str]): @property def line_buffering(self) -> bool: ... @property - def newlines(self) -> str | Tuple[str, ...] | None: ... + def newlines(self) -> str | tuple[str, ...] | None: ... @property def name(self) -> StrOrBytesPath | int: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/annoy/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/annoy/METADATA.toml index d84fa99e4dbe..74b2b8d57c4f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/annoy/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/annoy/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.17" +version = "1.17.*" requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/appdirs/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/appdirs/METADATA.toml index 46f5153ea243..6d3b92238f58 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/appdirs/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/appdirs/METADATA.toml @@ -1 +1 @@ -version = "1.4" +version = "1.4.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/METADATA.toml index fe057a0d9612..e20445253656 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.4" +version = "1.4.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/atomicwrites/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/atomicwrites/__init__.pyi index 388ac27182fe..e1629cee44af 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/atomicwrites/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/atomicwrites/atomicwrites/__init__.pyi @@ -1,5 +1,5 @@ from _typeshed import StrOrBytesPath -from typing import IO, Any, AnyStr, Callable, ContextManager, Text, Type +from typing import IO, Any, AnyStr, Callable, ContextManager, Text def replace_atomic(src: AnyStr, dst: AnyStr) -> None: ... def move_atomic(src: AnyStr, dst: AnyStr) -> None: ... @@ -13,4 +13,4 @@ class AtomicWriter(object): def commit(self, f: IO[Any]) -> None: ... def rollback(self, f: IO[Any]) -> None: ... -def atomic_write(path: StrOrBytesPath, writer_cls: Type[AtomicWriter] = ..., **cls_kwargs: object) -> ContextManager[IO[Any]]: ... +def atomic_write(path: StrOrBytesPath, writer_cls: type[AtomicWriter] = ..., **cls_kwargs: object) -> ContextManager[IO[Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml index 62802ab41fa8..fa0e835cbebd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/METADATA.toml @@ -1 +1 @@ -version = "2.8" +version = "2.8.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi index 490baa42dfd3..ba1d9b06a909 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/aws-xray-sdk/aws_xray_sdk/core/models/trace_header.pyi @@ -1,3 +1,4 @@ +from _typeshed import Self from typing import Any log: Any @@ -12,7 +13,7 @@ class TraceHeader: self, root: str | None = ..., parent: str | None = ..., sampled: bool | None = ..., data: dict[str, Any] | None = ... ) -> None: ... @classmethod - def from_header_str(cls, header): ... + def from_header_str(cls: type[Self], header) -> Self: ... def to_header_str(self): ... @property def root(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/babel/METADATA.toml index d8484dc609d9..cb16b293a669 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/METADATA.toml @@ -1 +1,2 @@ -version = "2.9" \ No newline at end of file +version = "2.9.*" +requires = ["types-pytz"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/core.pyi index 9c274f73ad5e..9b936be5e1bf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/core.pyi @@ -1,4 +1,5 @@ from typing import Any +from typing_extensions import Literal class UnknownLocaleError(Exception): identifier: Any @@ -108,3 +109,21 @@ def default_locale(category: Any | None = ..., aliases=...): ... def negotiate_locale(preferred, available, sep: str = ..., aliases=...): ... def parse_locale(identifier, sep: str = ...): ... def get_locale_identifier(tup, sep: str = ...): ... +def get_global(key: _GLOBAL_KEY): ... + +_GLOBAL_KEY = Literal[ + "all_currencies", + "currency_fractions", + "language_aliases", + "likely_subtags", + "parent_exceptions", + "script_aliases", + "territory_aliases", + "territory_currencies", + "territory_languages", + "territory_zones", + "variant_aliases", + "windows_zone_mapping", + "zone_aliases", + "zone_territories", +] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/dates.pyi b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/dates.pyi index 29b3f8a80439..4e86c985b665 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/dates.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/dates.pyi @@ -1,30 +1,100 @@ -from datetime import date, datetime, time -from typing import Any +from datetime import date, datetime, time, timedelta, tzinfo +from typing import Any, overload +from typing_extensions import Literal -NO_INHERITANCE_MARKER: str -LC_TIME: Any -date_ = date -datetime_ = datetime -time_ = time +from babel.core import Locale +from babel.util import LOCALTZ as LOCALTZ, UTC as UTC +from pytz import BaseTzInfo + +# The module contents here are organized the same way they are in the API documentation at +# http://babel.pocoo.org/en/latest/api/dates.html + +# Date and Time Formatting +_Instant = date | time | datetime | float | None +_PredefinedTimeFormat = Literal["full", "long", "medium", "short"] + +def format_datetime( + datetime: _Instant = ..., format: _PredefinedTimeFormat | str = ..., tzinfo: tzinfo | None = ..., locale: str | Locale = ... +) -> str: ... +def format_date( + date: date | datetime | None = ..., format: _PredefinedTimeFormat | str = ..., locale: str | Locale = ... +) -> str: ... +def format_time( + time: time | datetime | float | None = ..., + format: _PredefinedTimeFormat | str = ..., + tzinfo: tzinfo | None = ..., + locale: str | Locale = ..., +) -> str: ... +def format_timedelta( + delta: timedelta | int, + granularity: Literal["year", "month", "week", "day", "hour", "minute", "second"] = ..., + threshold: float = ..., + add_direction: bool = ..., + format: Literal["narrow", "short", "medium", "long"] = ..., + locale: str | Locale = ..., +) -> str: ... +def format_skeleton( + skeleton: str, datetime: _Instant = ..., tzinfo: tzinfo | None = ..., fuzzy: bool = ..., locale: str | Locale = ... +) -> str: ... +def format_interval( + start: _Instant, + end: _Instant, + skeleton: str | None = ..., + tzinfo: tzinfo | None = ..., + fuzzy: bool = ..., + locale: str | Locale = ..., +) -> str: ... + +# Timezone Functionality +@overload +def get_timezone(zone: str | BaseTzInfo | None = ...) -> BaseTzInfo: ... +@overload +def get_timezone(zone: tzinfo) -> tzinfo: ... +def get_timezone_gmt( + datetime: _Instant = ..., + width: Literal["long", "short", "iso8601", "iso8601_short"] = ..., + locale: str | Locale = ..., + return_z: bool = ..., +) -> str: ... + +_DtOrTzinfo = datetime | tzinfo | str | int | time | None + +def get_timezone_location(dt_or_tzinfo: _DtOrTzinfo = ..., locale: str | Locale = ..., return_city: bool = ...) -> str: ... +def get_timezone_name( + dt_or_tzinfo: _DtOrTzinfo = ..., + width: Literal["long", "short"] = ..., + uncommon: bool = ..., + locale: str | Locale = ..., + zone_variant: Literal["generic", "daylight", "standard"] | None = ..., + return_zone: bool = ..., +) -> str: ... -def get_timezone(zone: Any | None = ...): ... -def get_next_timezone_transition(zone: Any | None = ..., dt: Any | None = ...): ... +# Note: While Babel accepts any tzinfo for the most part, the get_next_timeout_transition() +# function requires a tzinfo that is produced by get_timezone()/pytz AND has DST info. +# The typing here will help you with the first requirement, but will not protect against +# pytz tzinfo's without DST info, like what you get from get_timezone("UTC") for instance. +def get_next_timezone_transition(zone: BaseTzInfo | None = ..., dt: _Instant = ...) -> TimezoneTransition: ... class TimezoneTransition: - activates: Any - from_tzinfo: Any - to_tzinfo: Any - reference_date: Any - def __init__(self, activates, from_tzinfo, to_tzinfo, reference_date: Any | None = ...) -> None: ... + # This class itself is not included in the documentation, yet it is mentioned by name. + # See https://github.com/python-babel/babel/issues/823 + activates: datetime + from_tzinfo: tzinfo + to_tzinfo: tzinfo + reference_date: datetime | None + def __init__( + self, activates: datetime, from_tzinfo: tzinfo, to_tzinfo: tzinfo, reference_date: datetime | None = ... + ) -> None: ... @property - def from_tz(self): ... + def from_tz(self) -> str: ... @property - def to_tz(self): ... + def to_tz(self) -> str: ... @property - def from_offset(self): ... + def from_offset(self) -> int: ... @property - def to_offset(self): ... + def to_offset(self) -> int: ... +# Data Access def get_period_names(width: str = ..., context: str = ..., locale=...): ... def get_day_names(width: str = ..., context: str = ..., locale=...): ... def get_month_names(width: str = ..., context: str = ..., locale=...): ... @@ -33,30 +103,22 @@ def get_era_names(width: str = ..., locale=...): ... def get_date_format(format: str = ..., locale=...): ... def get_datetime_format(format: str = ..., locale=...): ... def get_time_format(format: str = ..., locale=...): ... -def get_timezone_gmt(datetime: Any | None = ..., width: str = ..., locale=..., return_z: bool = ...): ... -def get_timezone_location(dt_or_tzinfo: Any | None = ..., locale=..., return_city: bool = ...): ... -def get_timezone_name( - dt_or_tzinfo: Any | None = ..., - width: str = ..., - uncommon: bool = ..., - locale=..., - zone_variant: Any | None = ..., - return_zone: bool = ..., -): ... -def format_date(date: Any | None = ..., format: str = ..., locale=...): ... -def format_datetime(datetime: Any | None = ..., format: str = ..., tzinfo: Any | None = ..., locale=...): ... -def format_time(time: Any | None = ..., format: str = ..., tzinfo: Any | None = ..., locale=...): ... -def format_skeleton(skeleton, datetime: Any | None = ..., tzinfo: Any | None = ..., fuzzy: bool = ..., locale=...): ... + +# Basic Parsing +def parse_date(string, locale=...): ... +def parse_time(string, locale=...): ... +def parse_pattern(pattern): ... + +# Undocumented +NO_INHERITANCE_MARKER: str +LC_TIME: Any +date_ = date +datetime_ = datetime +time_ = time TIMEDELTA_UNITS: Any -def format_timedelta( - delta, granularity: str = ..., threshold: float = ..., add_direction: bool = ..., format: str = ..., locale=... -): ... -def format_interval(start, end, skeleton: Any | None = ..., tzinfo: Any | None = ..., fuzzy: bool = ..., locale=...): ... def get_period_id(time, tzinfo: Any | None = ..., type: Any | None = ..., locale=...): ... -def parse_date(string, locale=...): ... -def parse_time(string, locale=...): ... class DateTimePattern: pattern: Any @@ -91,7 +153,6 @@ class DateTimeFormat: PATTERN_CHARS: Any PATTERN_CHAR_ORDER: str -def parse_pattern(pattern): ... def tokenize_pattern(pattern): ... def untokenize_pattern(tokens): ... def split_interval_pattern(pattern): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/messages/plurals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/messages/plurals.pyi index 0f9ac943b0f5..5f75160f3d7f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/messages/plurals.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/messages/plurals.pyi @@ -1,10 +1,10 @@ -from typing import Any, Tuple +from typing import Any LC_CTYPE: Any PLURALS: Any DEFAULT_PLURAL: Any -class _PluralTuple(Tuple[int, str]): +class _PluralTuple(tuple[int, str]): num_plurals: Any plural_expr: Any plural_forms: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/support.pyi b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/support.pyi index 3755073d5cda..09ccd93abaa9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/support.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/support.pyi @@ -44,8 +44,8 @@ class LazyProxy: def __delitem__(self, key) -> None: ... def __getitem__(self, key): ... def __setitem__(self, key, value) -> None: ... - def __copy__(self): ... - def __deepcopy__(self, memo): ... + def __copy__(self) -> LazyProxy: ... + def __deepcopy__(self, memo: Any) -> LazyProxy: ... class NullTranslations(gettext.NullTranslations): DEFAULT_DOMAIN: Any @@ -79,7 +79,7 @@ class NullTranslations(gettext.NullTranslations): ugettext: Any ungettext: Any -class Translations(NullTranslations, gettext.GNUTranslations): +class Translations(NullTranslations, gettext.GNUTranslations): # type: ignore # argument disparities between base classes DEFAULT_DOMAIN: str domain: Any def __init__(self, fp: Any | None = ..., domain: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/util.pyi index 8788030b825d..48b6720aa9a3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/babel/babel/util.pyi @@ -4,6 +4,7 @@ from datetime import tzinfo from typing import Any from babel import localtime as localtime +from pytz import BaseTzInfo missing: Any @@ -32,8 +33,8 @@ class FixedOffsetTimezone(tzinfo): def tzname(self, dt): ... def dst(self, dt): ... -UTC: Any -LOCALTZ: Any +UTC: BaseTzInfo +LOCALTZ: BaseTzInfo get_localzone = localtime.get_localzone STDOFFSET: Any DSTOFFSET: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/backports.ssl_match_hostname/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/backports.ssl_match_hostname/METADATA.toml index 9509ba1a7b30..839f7d3a9b8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/backports.ssl_match_hostname/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/backports.ssl_match_hostname/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.7" +version = "3.7.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/backports_abc/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/backports_abc/METADATA.toml index 255b4494223b..22313195c363 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/backports_abc/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/backports_abc/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.5" +version = "0.5.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/METADATA.toml index ff50f6cbe269..39914d078bcf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/METADATA.toml @@ -1 +1 @@ -version = "4.9" +version = "4.10.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/__init__.pyi index d975dfc8f061..ebb01dbca72e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/__init__.pyi @@ -1,8 +1,8 @@ from _typeshed import Self, SupportsRead -from typing import Any, Sequence, Type +from typing import Any, Sequence from .builder import TreeBuilder -from .element import PageElement, SoupStrainer, Tag +from .element import PageElement, SoupStrainer as SoupStrainer, Tag as Tag from .formatter import Formatter class GuessedAtParserWarning(UserWarning): ... @@ -23,11 +23,11 @@ class BeautifulSoup(Tag): self, markup: str | bytes | SupportsRead[str] | SupportsRead[bytes] = ..., features: str | Sequence[str] | None = ..., - builder: TreeBuilder | Type[TreeBuilder] | None = ..., + builder: TreeBuilder | type[TreeBuilder] | None = ..., parse_only: SoupStrainer | None = ..., from_encoding: str | None = ..., exclude_encodings: Sequence[str] | None = ..., - element_classes: dict[Type[PageElement], Type[Any]] | None = ..., + element_classes: dict[type[PageElement], type[Any]] | None = ..., **kwargs, ) -> None: ... def __copy__(self: Self) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi index c990e7890bc4..bca74d70c182 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_htmlparser.pyi @@ -11,7 +11,7 @@ class BeautifulSoupHTMLParser(HTMLParser): on_duplicate_attribute: Any already_closed_empty_element: Any def __init__(self, *args, **kwargs) -> None: ... - def error(self, msg) -> None: ... + def error(self, msg: str) -> None: ... def handle_startendtag(self, name, attrs) -> None: ... def handle_starttag(self, name, attrs, handle_empty_element: bool = ...) -> None: ... def handle_endtag(self, name, check_already_closed: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_lxml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_lxml.pyi index 492848911ad1..9133e6a17615 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_lxml.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/builder/_lxml.pyi @@ -37,7 +37,7 @@ class LXMLTreeBuilderForXML(TreeBuilder): def comment(self, content) -> None: ... def test_fragment_to_document(self, fragment): ... -class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): +class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML): # type: ignore # argument disparities between base classes NAME: Any ALTERNATE_NAMES: Any features: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/dammit.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/dammit.pyi index eaa7bc47debb..9dd69f4b4c46 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/dammit.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/dammit.pyi @@ -1,4 +1,7 @@ +from collections.abc import Iterable, Iterator +from logging import Logger from typing import Any +from typing_extensions import Literal chardet_type: Any @@ -25,44 +28,60 @@ class EntitySubstitution: def substitute_html(cls, s): ... class EncodingDetector: - override_encodings: Any - exclude_encodings: Any - chardet_encoding: Any - is_html: Any - declared_encoding: Any + known_definite_encodings: list[str] + user_encodings: list[str] + exclude_encodings: set[str] + chardet_encoding: Any | None + is_html: bool + declared_encoding: str | None + markup: Any + sniffed_encoding: str | None def __init__( - self, markup, override_encodings: Any | None = ..., is_html: bool = ..., exclude_encodings: Any | None = ... + self, + markup, + known_definite_encodings: Iterable[str] | None = ..., + is_html: bool = ..., + exclude_encodings: list[str] | None = ..., + user_encodings: list[str] | None = ..., + override_encodings: list[str] | None = ..., ) -> None: ... @property - def encodings(self) -> None: ... + def encodings(self) -> Iterator[str]: ... @classmethod def strip_byte_order_mark(cls, data): ... @classmethod - def find_declared_encoding(cls, markup, is_html: bool = ..., search_entire_document: bool = ...): ... + def find_declared_encoding(cls, markup, is_html: bool = ..., search_entire_document: bool = ...) -> str | None: ... class UnicodeDammit: - CHARSET_ALIASES: Any - ENCODINGS_WITH_SMART_QUOTES: Any - smart_quotes_to: Any - tried_encodings: Any + CHARSET_ALIASES: dict[str, str] + ENCODINGS_WITH_SMART_QUOTES: list[str] + smart_quotes_to: Literal["ascii", "xml", "html"] | None + tried_encodings: list[tuple[str, str]] contains_replacement_characters: bool - is_html: Any - log: Any - detector: Any + is_html: bool + log: Logger + detector: EncodingDetector markup: Any - unicode_markup: Any - original_encoding: Any + unicode_markup: str + original_encoding: Any | None def __init__( - self, markup, override_encodings=..., smart_quotes_to: Any | None = ..., is_html: bool = ..., exclude_encodings=... + self, + markup, + known_definite_encodings: list[str] | None = ..., + smart_quotes_to: Literal["ascii", "xml", "html"] | None = ..., + is_html: bool = ..., + exclude_encodings: list[str] | None = ..., + user_encodings: list[str] | None = ..., + override_encodings: list[str] | None = ..., ) -> None: ... @property - def declared_html_encoding(self): ... - def find_codec(self, charset): ... - MS_CHARS: Any - MS_CHARS_TO_ASCII: Any - WINDOWS_1252_TO_UTF8: Any - MULTIBYTE_MARKERS_AND_SIZES: Any - FIRST_MULTIBYTE_MARKER: Any - LAST_MULTIBYTE_MARKER: Any + def declared_html_encoding(self) -> str | None: ... + def find_codec(self, charset: str) -> str | None: ... + MS_CHARS: dict[bytes, str | tuple[str, ...]] + MS_CHARS_TO_ASCII: dict[bytes, str] + WINDOWS_1252_TO_UTF8: dict[int, bytes] + MULTIBYTE_MARKERS_AND_SIZES: list[tuple[int, int, int]] + FIRST_MULTIBYTE_MARKER: int + LAST_MULTIBYTE_MARKER: int @classmethod - def detwingle(cls, in_bytes, main_encoding: str = ..., embedded_encoding: str = ...): ... + def detwingle(cls, in_bytes: bytes, main_encoding: str = ..., embedded_encoding: str = ...) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/element.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/element.pyi index f7d6d9c6fc1b..35d31dce75d9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/element.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/element.pyi @@ -1,5 +1,6 @@ from _typeshed import Self -from typing import Any, Callable, Generic, Iterable, List, Mapping, Pattern, Set, Tuple, Type, TypeVar, Union, overload +from collections.abc import Iterator +from typing import Any, Callable, Generic, Iterable, Pattern, TypeVar, Union, overload from . import BeautifulSoup from .builder import TreeBuilder @@ -9,10 +10,10 @@ DEFAULT_OUTPUT_ENCODING: str PY3K: bool nonwhitespace_re: Pattern[str] whitespace_re: Pattern[str] -PYTHON_SPECIFIC_ENCODINGS: Set[str] +PYTHON_SPECIFIC_ENCODINGS: set[str] class NamespacedAttribute(str): - def __new__(cls: Type[Self], prefix: str, name: str | None = ..., namespace: str | None = ...) -> Self: ... + def __new__(cls: type[Self], prefix: str, name: str | None = ..., namespace: str | None = ...) -> Self: ... class AttributeValueWithCharsetSubstitution(str): ... @@ -50,7 +51,13 @@ class PageElement: def formatter_for_name(self, formatter: Formatter | str | _EntitySubstitution): ... nextSibling: PageElement | None previousSibling: PageElement | None - def replace_with(self: Self, replace_with: PageElement | str) -> Self: ... + @property + def stripped_strings(self) -> Iterator[str]: ... + def get_text(self, separator: str = ..., strip: bool = ..., types: tuple[type[NavigableString], ...] = ...) -> str: ... + getText = get_text + @property + def text(self) -> str: ... + def replace_with(self: Self, *args: PageElement | str) -> Self: ... replaceWith = replace_with def unwrap(self: Self) -> Self: ... replace_with_children = unwrap @@ -175,14 +182,14 @@ class NavigableString(str, PageElement): PREFIX: str SUFFIX: str known_xml: bool | None - def __new__(cls: Type[Self], value: str | bytes) -> Self: ... + def __new__(cls: type[Self], value: str | bytes) -> Self: ... def __copy__(self: Self) -> Self: ... def __getnewargs__(self) -> tuple[str]: ... - @property - def string(self) -> str: ... def output_ready(self, formatter: Formatter | str | None = ...) -> str: ... @property def name(self) -> None: ... + @property + def strings(self) -> Iterable[str]: ... class PreformattedString(NavigableString): PREFIX: str @@ -220,15 +227,15 @@ class Script(NavigableString): ... class TemplateString(NavigableString): ... class Tag(PageElement): - parser_class: Type[BeautifulSoup] | None + parser_class: type[BeautifulSoup] | None name: str namespace: str | None prefix: str | None sourceline: int | None sourcepos: int | None known_xml: bool | None - attrs: Mapping[str, str] - contents: List[PageElement] + attrs: dict[str, str] + contents: list[PageElement] hidden: bool can_be_empty_element: bool | None cdata_list_attributes: list[str] | None @@ -240,7 +247,7 @@ class Tag(PageElement): name: str | None = ..., namespace: str | None = ..., prefix: str | None = ..., - attrs: Mapping[str, str] | None = ..., + attrs: dict[str, str] | None = ..., parent: Tag | None = ..., previous: PageElement | None = ..., is_xml: bool | None = ..., @@ -249,8 +256,9 @@ class Tag(PageElement): can_be_empty_element: bool | None = ..., cdata_list_attributes: list[str] | None = ..., preserve_whitespace_tags: list[str] | None = ..., + interesting_string_types: type[NavigableString] | tuple[type[NavigableString], ...] | None = ..., ) -> None: ... - parserClass: Type[BeautifulSoup] | None + parserClass: type[BeautifulSoup] | None def __copy__(self: Self) -> Self: ... @property def is_empty_element(self) -> bool: ... @@ -259,14 +267,9 @@ class Tag(PageElement): def string(self) -> str | None: ... @string.setter def string(self, string: str) -> None: ... + DEFAULT_INTERESTING_STRING_TYPES: tuple[type[NavigableString], ...] @property def strings(self) -> Iterable[str]: ... - @property - def stripped_strings(self) -> Iterable[str]: ... - def get_text(self, separator: str = ..., strip: bool = ..., types: Tuple[Type[NavigableString], ...] = ...) -> str: ... - getText = get_text - @property - def text(self) -> str: ... def decompose(self) -> None: ... def clear(self, decompose: bool = ...) -> None: ... def smooth(self) -> None: ... @@ -316,7 +319,7 @@ class Tag(PageElement): text: _Strainable | None = ..., limit: int | None = ..., **kwargs: _Strainable, - ) -> ResultSet[PageElement]: ... + ) -> ResultSet[Any]: ... __call__ = find_all findAll = find_all findChildren = find_all @@ -345,6 +348,6 @@ class SoupStrainer: searchTag = search_tag def search(self, markup: PageElement | Iterable[PageElement]): ... -class ResultSet(Generic[_PageElementT], List[_PageElementT]): +class ResultSet(list[_PageElementT], Generic[_PageElementT]): source: SoupStrainer def __init__(self, source: SoupStrainer, result: Iterable[_PageElementT] = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/formatter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/formatter.pyi index e297f88a9e3e..4250419fdc47 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/formatter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/beautifulsoup4/bs4/formatter.pyi @@ -12,12 +12,14 @@ class Formatter(EntitySubstitution): entity_substitution: _EntitySubstitution void_element_close_prefix: str cdata_containing_tags: list[str] + empty_attributes_are_booleans: bool def __init__( self, language: str | None = ..., entity_substitution: _EntitySubstitution | None = ..., void_element_close_prefix: str = ..., cdata_containing_tags: list[str] | None = ..., + empty_attributes_are_booleans: bool = ..., ) -> None: ... def substitute(self, ns: str) -> str: ... def attribute_value(self, value: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml index 71ef530fa1a2..bb8cc449b44f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/METADATA.toml @@ -1 +1 @@ -version = "4.1" +version = "4.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi index 3d69fc0da4e8..52a028156121 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/html5lib_shim.pyi @@ -1,16 +1,16 @@ from collections.abc import Generator, Iterable from typing import Any -class HTMLParser(object): # actually html5lib.HTMLParser +class HTMLParser: # actually html5lib.HTMLParser def __getattr__(self, __name: str) -> Any: ... # incomplete -class Filter(object): # actually html5lib.filters.base.Filter +class Filter: # actually html5lib.filters.base.Filter def __getattr__(self, __name: str) -> Any: ... # incomplete -class SanitizerFilter(object): # actually html5lib.filters.sanitizer.Filter +class SanitizerFilter: # actually html5lib.filters.sanitizer.Filter def __getattr__(self, __name: str) -> Any: ... # incomplete -class HTMLSerializer(object): # actually html5lib.serializer.HTMLSerializer +class HTMLSerializer: # actually html5lib.serializer.HTMLSerializer def __getattr__(self, __name: str) -> Any: ... # incomplete class BleachHTMLParser(HTMLParser): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi index 05a6728383e2..cb3013cacd22 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/linkifier.pyi @@ -21,7 +21,7 @@ def build_email_re(tlds: Iterable[str] = ...) -> Pattern[str]: ... EMAIL_RE: Pattern[str] -class Linker(object): +class Linker: def __init__( self, callbacks: Iterable[_Callback] = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi index 0966af2096d6..76fe8f208dc9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/bleach/bleach/sanitizer.pyi @@ -1,5 +1,5 @@ from collections.abc import Callable, Container, Iterable -from typing import Any, Dict, List, Pattern, Union +from typing import Any, Pattern, Union from .html5lib_shim import BleachHTMLParser, BleachHTMLSerializer, SanitizerFilter @@ -15,7 +15,7 @@ INVISIBLE_REPLACEMENT_CHAR: str # A html5lib Filter class _Filter = Any -class Cleaner(object): +class Cleaner: tags: Container[str] attributes: _Attributes styles: Container[str] @@ -39,8 +39,8 @@ class Cleaner(object): def clean(self, text: str) -> str: ... _AttributeFilter = Callable[[str, str, str], bool] -_AttributeDict = Union[Dict[str, Union[List[str], _AttributeFilter]], Dict[str, List[str]], Dict[str, _AttributeFilter]] -_Attributes = Union[_AttributeFilter, _AttributeDict, List[str]] +_AttributeDict = Union[dict[str, Union[list[str], _AttributeFilter]], dict[str, list[str]], dict[str, _AttributeFilter]] +_Attributes = Union[_AttributeFilter, _AttributeDict, list[str]] def attribute_filter_factory(attributes: _Attributes) -> _AttributeFilter: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/boto/METADATA.toml index bc5011788db7..aff1a5fd3b7e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/METADATA.toml @@ -1,3 +1,3 @@ -version = "2.49" +version = "2.49.*" python2 = true requires = ["types-six"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/auth.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/auth.pyi index 2dc77949c110..3a588d27ed8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/auth.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/auth.pyi @@ -64,7 +64,7 @@ class HmacAuthV4Handler(AuthHandler, HmacKeys): def signature(self, http_request, string_to_sign): ... def add_auth(self, req, **kwargs): ... -class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler): +class S3HmacAuthV4Handler(HmacAuthV4Handler, AuthHandler): # type: ignore # argument disparities between base classes capability: Any region_name: Any def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/kms/layer1.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/kms/layer1.pyi index e2755233678d..5a7496ee30bb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/kms/layer1.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/kms/layer1.pyi @@ -1,4 +1,4 @@ -from typing import Any, Mapping, Type +from typing import Any, Mapping from boto.connection import AWSQueryConnection @@ -8,7 +8,7 @@ class KMSConnection(AWSQueryConnection): DefaultRegionEndpoint: str ServiceName: str TargetPrefix: str - ResponseError: Type[Exception] + ResponseError: type[Exception] region: Any def __init__(self, **kwargs) -> None: ... def create_alias(self, alias_name: str, target_key_id: str) -> dict[str, Any] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/__init__.pyi index fa747956ebbe..c483c9cf498b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Text, Type +from typing import Text from boto.connection import AWSAuthConnection from boto.regioninfo import RegionInfo @@ -10,7 +10,7 @@ class S3RegionInfo(RegionInfo): self, name: Text | None = ..., endpoint: str | None = ..., - connection_cls: Type[AWSAuthConnection] | None = ..., + connection_cls: type[AWSAuthConnection] | None = ..., **kw_params, ) -> S3Connection: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/bucket.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/bucket.pyi index 741772ff3817..196ba4d2e07d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/bucket.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/bucket.pyi @@ -1,4 +1,4 @@ -from typing import Any, Text, Type +from typing import Any, Text from .bucketlistresultset import BucketListResultSet from .connection import S3Connection @@ -19,8 +19,8 @@ class Bucket: MFADeleteRE: str name: Text connection: S3Connection - key_class: Type[Key] - def __init__(self, connection: S3Connection | None = ..., name: Text | None = ..., key_class: Type[Key] = ...) -> None: ... + key_class: type[Key] + def __init__(self, connection: S3Connection | None = ..., name: Text | None = ..., key_class: type[Key] = ...) -> None: ... def __iter__(self): ... def __contains__(self, key_name) -> bool: ... def startElement(self, name, attrs, connection): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/connection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/connection.pyi index 96b8d0de134c..b0240e297721 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/connection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/connection.pyi @@ -1,4 +1,4 @@ -from typing import Any, Text, Type +from typing import Any, Text from boto.connection import AWSAuthConnection from boto.exception import BotoClientError @@ -48,7 +48,7 @@ class S3Connection(AWSAuthConnection): DefaultCallingFormat: Any QueryString: str calling_format: Any - bucket_class: Type[Bucket] + bucket_class: type[Bucket] anon: Any def __init__( self, @@ -66,7 +66,7 @@ class S3Connection(AWSAuthConnection): calling_format: Any = ..., path: str = ..., provider: str = ..., - bucket_class: Type[Bucket] = ..., + bucket_class: type[Bucket] = ..., security_token: Any | None = ..., suppress_consec_slashes: bool = ..., anon: bool = ..., @@ -75,7 +75,7 @@ class S3Connection(AWSAuthConnection): ) -> None: ... def __iter__(self): ... def __contains__(self, bucket_name): ... - def set_bucket_class(self, bucket_class: Type[Bucket]) -> None: ... + def set_bucket_class(self, bucket_class: type[Bucket]) -> None: ... def build_post_policy(self, expiration_time, conditions): ... def build_post_form_args( self, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/cors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/cors.pyi index f31e612f748d..125587f9ba0e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/cors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/cors.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any class CORSRule: allowed_method: Any @@ -20,7 +20,7 @@ class CORSRule: def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... -class CORSConfiguration(List[CORSRule]): +class CORSConfiguration(list[CORSRule]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/lifecycle.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/lifecycle.pyi index bf750bb2e50f..7919bad712df 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/lifecycle.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/lifecycle.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any class Rule: id: Any @@ -33,7 +33,7 @@ class Transition: def __init__(self, days: Any | None = ..., date: Any | None = ..., storage_class: Any | None = ...) -> None: ... def to_xml(self): ... -class Transitions(List[Transition]): +class Transitions(list[Transition]): transition_properties: int current_transition_property: int temp_days: Any @@ -51,7 +51,7 @@ class Transitions(List[Transition]): @property def storage_class(self): ... -class Lifecycle(List[Rule]): +class Lifecycle(list[Rule]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/tagging.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/tagging.pyi index ad1bcf8039f4..98a954d5fd8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/tagging.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/tagging.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any class Tag: key: Any @@ -9,13 +9,13 @@ class Tag: def to_xml(self): ... def __eq__(self, other): ... -class TagSet(List[Tag]): +class TagSet(list[Tag]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def add_tag(self, key, value): ... def to_xml(self): ... -class Tags(List[TagSet]): +class Tags(list[TagSet]): def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... def to_xml(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/website.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/website.pyi index 186afdf1fdd1..e913f6dde791 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/website.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/s3/website.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any def tag(key, value): ... @@ -33,7 +33,7 @@ class RedirectLocation(_XMLKeyValue): def __init__(self, hostname: Any | None = ..., protocol: Any | None = ...) -> None: ... def to_xml(self): ... -class RoutingRules(List[RoutingRule]): +class RoutingRules(list[RoutingRule]): def add_rule(self, rule: RoutingRule) -> RoutingRules: ... def startElement(self, name, attrs, connection): ... def endElement(self, name, value, connection): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/utils.pyi index c4549098806b..1a2aabcf8ef5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/boto/boto/utils.pyi @@ -3,7 +3,7 @@ import logging.handlers import subprocess import sys import time -from typing import IO, Any, Callable, ContextManager, Dict, Iterable, Mapping, Sequence, Tuple, Type, TypeVar +from typing import IO, Any, Callable, ContextManager, Iterable, Mapping, Sequence, TypeVar import boto.connection @@ -37,10 +37,10 @@ else: _Provider = Any # TODO replace this with boto.provider.Provider once stubs exist _LockType = Any # TODO replace this with _thread.LockType once stubs exist -JSONDecodeError: Type[ValueError] +JSONDecodeError: type[ValueError] qsa_of_interest: list[str] -def unquote_v(nv: str) -> str | Tuple[str, str]: ... +def unquote_v(nv: str) -> str | tuple[str, str]: ... def canonical_string( method: str, path: str, headers: Mapping[str, str | None], expires: int | None = ..., provider: _Provider | None = ... ) -> str: ... @@ -50,7 +50,7 @@ def merge_meta( def get_aws_metadata(headers: Mapping[str, str], provider: _Provider | None = ...) -> Mapping[str, str]: ... def retry_url(url: str, retry_on_404: bool = ..., num_retries: int = ..., timeout: int | None = ...) -> str: ... -class LazyLoadMetadata(Dict[_KT, _VT]): +class LazyLoadMetadata(dict[_KT, _VT]): def __init__(self, url: str, num_retries: int, timeout: int | None = ...) -> None: ... def get_instance_metadata( @@ -68,10 +68,10 @@ ISO8601_MS: str RFC1123: str LOCALE_LOCK: _LockType -def setlocale(name: str | Tuple[str, str]) -> ContextManager[str]: ... +def setlocale(name: str | tuple[str, str]) -> ContextManager[str]: ... def get_ts(ts: time.struct_time | None = ...) -> str: ... def parse_ts(ts: str) -> datetime.datetime: ... -def find_class(module_name: str, class_name: str | None = ...) -> Type[Any] | None: ... +def find_class(module_name: str, class_name: str | None = ...) -> type[Any] | None: ... def update_dme(username: str, password: str, dme_id: str, ip_address: str) -> str: ... def fetch_file( uri: str, file: IO[str] | None = ..., username: str | None = ..., password: str | None = ... @@ -101,7 +101,7 @@ class AuthSMTPHandler(logging.handlers.SMTPHandler): self, mailhost: str, username: str, password: str, fromaddr: str, toaddrs: Sequence[str], subject: str ) -> None: ... -class LRUCache(Dict[_KT, _VT]): +class LRUCache(dict[_KT, _VT]): class _Item: previous: LRUCache._Item | None next: LRUCache._Item | None @@ -122,7 +122,7 @@ class Password: str: _str | None def __init__(self, str: _str | None = ..., hashfunc: Callable[[bytes], _HashType] | None = ...) -> None: ... def set(self, value: bytes | _str) -> None: ... - def __eq__(self, other: Any) -> bool: ... + def __eq__(self, other: _str | bytes | None) -> bool: ... # type: ignore[override] def __len__(self) -> int: ... def notify( @@ -137,11 +137,11 @@ def get_utf8_value(value: str) -> bytes: ... def mklist(value: Any) -> list[Any]: ... def pythonize_name(name: str) -> str: ... def write_mime_multipart( - content: list[Tuple[str, str]], compress: bool = ..., deftype: str = ..., delimiter: str = ... + content: list[tuple[str, str]], compress: bool = ..., deftype: str = ..., delimiter: str = ... ) -> str: ... def guess_mime_type(content: str, deftype: str) -> str: ... -def compute_md5(fp: IO[Any], buf_size: int = ..., size: int | None = ...) -> Tuple[str, str, int]: ... -def compute_hash(fp: IO[Any], buf_size: int = ..., size: int | None = ..., hash_algorithm: Any = ...) -> Tuple[str, str, int]: ... +def compute_md5(fp: IO[Any], buf_size: int = ..., size: int | None = ...) -> tuple[str, str, int]: ... +def compute_hash(fp: IO[Any], buf_size: int = ..., size: int | None = ..., hash_algorithm: Any = ...) -> tuple[str, str, int]: ... def find_matching_headers(name: str, headers: Mapping[str, str | None]) -> list[str]: ... def merge_headers_by_name(name: str, headers: Mapping[str, str | None]) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml index 4ede6a0b859b..9c5a4bca25dc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/METADATA.toml @@ -1 +1 @@ -version = "4.11" +version = "4.11.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/credit_card.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/credit_card.pyi index f6a3738920a5..e68500363886 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/credit_card.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/credit_card.pyi @@ -25,9 +25,11 @@ class CreditCard(Resource): Switch: str Visa: str Unknown: str + class CustomerLocation: International: str US: str + class CardTypeIndicator: Yes: str No: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/dispute.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/dispute.pyi index 64a87eb045bc..edda8a919794 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/dispute.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/dispute.pyi @@ -17,6 +17,7 @@ class Dispute(AttributeGetter): Open: str Won: str Lost: str + class Reason: CancelledRecurringTransaction: str CreditNotProcessed: str @@ -29,10 +30,12 @@ class Dispute(AttributeGetter): ProductUnsatisfactory: str Retrieval: str TransactionAmountDiffers: str + class Kind: Chargeback: str PreArbitration: str Retrieval: str + class ChargebackProtectionLevel: Effortless: str Standard: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi index 147f54dc4dfd..994521144eef 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/error_codes.pyi @@ -29,6 +29,7 @@ class ErrorCodes: StreetAddressIsRequired: str StreetAddressIsTooLong: str TooManyAddressesPerCustomer: str + class ApplePay: ApplePayCardsAreNotAccepted: str CustomerIdIsRequiredForVaulting: str @@ -52,6 +53,7 @@ class ErrorCodes: InvalidToken: str PrivateKeyMismatch: str KeyMismatchStoringCertificate: str + class AuthorizationFingerprint: MissingFingerprint: str InvalidFormat: str @@ -60,6 +62,7 @@ class ErrorCodes: InvalidPublicKey: str InvalidSignature: str OptionsNotAllowedWithoutCustomer: str + class ClientToken: MakeDefaultRequiresCustomerId: str VerifyCardRequiresCustomerId: str @@ -68,6 +71,7 @@ class ErrorCodes: ProxyMerchantDoesNotExist: str UnsupportedVersion: str MerchantAccountDoesNotExist: str + class CreditCard: BillingAddressConflict: str BillingAddressFormatIsInvalid: str @@ -113,6 +117,7 @@ class ErrorCodes: VerificationNotSupportedOnThisMerchantAccount: str VerificationAccountTypeIsInvald: str VerificationAccountTypeNotSupported: str + class Options: UpdateExistingTokenIsInvalid: str UpdateExistingTokenNotAllowed: str @@ -124,6 +129,7 @@ class ErrorCodes: VerificationMerchantAccountIsForbidden: str VerificationMerchantAccountIsSuspended: str VerificationMerchantAccountCannotBeSubMerchantAccount: str + class Customer: CompanyIsTooLong: str CustomFieldIsInvalid: str @@ -145,6 +151,7 @@ class ErrorCodes: WebsiteIsInvalid: str WebsiteFormatIsInvalid: str WebsiteIsTooLong: str + class Descriptor: DynamicDescriptorsDisabled: str InternationalNameFormatIsInvalid: str @@ -152,6 +159,7 @@ class ErrorCodes: NameFormatIsInvalid: str PhoneFormatIsInvalid: str UrlFormatIsInvalid: str + class Dispute: CanOnlyAddEvidenceToOpenDispute: str CanOnlyRemoveEvidenceFromOpenDispute: str @@ -175,6 +183,7 @@ class ErrorCodes: RecurringTransactionEvidenceMissingDate: str RecurringTransactionEvidenceMissingARN: str ValidEvidenceRequiredToFinalize: str + class DocumentUpload: KindIsInvalid: str FileIsTooLarge: str @@ -182,6 +191,7 @@ class ErrorCodes: FileIsMalformedOrEncrypted: str FileIsTooLong: str FileIsEmpty: str + class Merchant: CountryCannotBeBlank: str CountryCodeAlpha2IsInvalid: str @@ -203,6 +213,7 @@ class ErrorCodes: CurrencyIsInvalid: str NoMerchantAccounts: str MerchantAccountExistsForId: str + class MerchantAccount: IdFormatIsInvalid: str IdIsInUse: str @@ -221,6 +232,7 @@ class ErrorCodes: DeclinedFailedKYC: str DeclinedSsnInvalid: str DeclinedSsnMatchesDeceased: str + class ApplicantDetails: AccountNumberIsRequired: str CompanyNameIsInvalid: str @@ -247,6 +259,7 @@ class ErrorCodes: EmailAddressIsRequired: str AccountNumberIsInvalid: str TaxIdMustBeBlank: str + class Address: LocalityIsRequired: str PostalCodeIsInvalid: str @@ -255,6 +268,7 @@ class ErrorCodes: StreetAddressIsInvalid: str StreetAddressIsRequired: str RegionIsInvalid: str + class Individual: FirstNameIsRequired: str LastNameIsRequired: str @@ -266,6 +280,7 @@ class ErrorCodes: PhoneIsInvalid: str DateOfBirthIsInvalid: str EmailAddressIsRequired: str + class Address: StreetAddressIsRequired: str LocalityIsRequired: str @@ -274,6 +289,7 @@ class ErrorCodes: StreetAddressIsInvalid: str PostalCodeIsInvalid: str RegionIsInvalid: str + class Business: DbaNameIsInvalid: str LegalNameIsInvalid: str @@ -281,10 +297,12 @@ class ErrorCodes: TaxIdIsInvalid: str TaxIdIsRequiredWithLegalName: str TaxIdMustBeBlank: str + class Address: StreetAddressIsInvalid: str PostalCodeIsInvalid: str RegionIsInvalid: str + class Funding: RoutingNumberIsRequired: str AccountNumberIsRequired: str @@ -296,12 +314,14 @@ class ErrorCodes: EmailAddressIsRequired: str MobilePhoneIsInvalid: str MobilePhoneIsRequired: str + class OAuth: InvalidGrant: str InvalidCredentials: str InvalidScope: str InvalidRequest: str UnsupportedGrantType: str + class Verification: ThreeDSecureAuthenticationIdIsInvalid: str ThreeDSecureAuthenticationIdDoesntMatchNonceThreeDSecureAuthentication: str @@ -313,6 +333,7 @@ class ErrorCodes: MerchantAccountDoesNotSupport3DSecure: str MerchantAcountDoesNotMatch3DSecureMerchantAccount: str AmountDoesNotMatch3DSecureAmount: str + class ThreeDSecurePassThru: EciFlagIsRequired: str EciFlagIsInvalid: str @@ -322,6 +343,7 @@ class ErrorCodes: AuthenticationResponseIsInvalid: str DirectoryResponseIsInvalid: str CavvAlgorithmIsInvalid: str + class Options: AmountCannotBeNegative: str AmountFormatIsInvalid: str @@ -333,6 +355,7 @@ class ErrorCodes: MerchantAccountCannotBeSubMerchantAccount: str AccountTypeIsInvalid: str AccountTypeNotSupported: str + class PaymentMethod: CannotForwardPaymentMethodType: str PaymentMethodParamsAreRequired: str @@ -348,8 +371,10 @@ class ErrorCodes: CannotHaveFundingSourceWithoutAccessToken: str InvalidFundingSourceSelection: str CannotUpdatePayPalAccountUsingPaymentMethodNonce: str + class Options: UsBankAccountVerificationMethodIsInvalid: str + class PayPalAccount: CannotHaveBothAccessTokenAndConsentCode: str CannotVaultOneTimeUsePayPalAccount: str @@ -362,10 +387,12 @@ class ErrorCodes: PaymentMethodNonceLocked: str PaymentMethodNonceUnknown: str TokenIsInUse: str + class SettlementBatchSummary: CustomFieldIsInvalid: str SettlementDateIsInvalid: str SettlementDateIsRequired: str + class SEPAMandate: TypeIsRequired: str IBANInvalidCharacter: str @@ -377,10 +404,12 @@ class ErrorCodes: BillingAddressConflict: str BillingAddressIdIsInvalid: str TypeIsInvalid: str + class EuropeBankAccount: BICIsRequired: str IBANIsRequired: str AccountHolderNameIsRequired: str + class Subscription: BillingDayOfMonthCannotBeUpdated: str BillingDayOfMonthIsInvalid: str @@ -422,6 +451,7 @@ class ErrorCodes: TrialDurationFormatIsInvalid: str TrialDurationIsRequired: str TrialDurationUnitIsInvalid: str + class Modification: AmountCannotBeBlank: str AmountIsInvalid: str @@ -444,6 +474,7 @@ class ErrorCodes: QuantityIsInvalid: str QuantityMustBeGreaterThanZero: str IdToRemoveIsInvalid: str + class Transaction: AdjustmentAmountMustBeGreaterThanZero: str AmountCannotBeNegative: str @@ -586,24 +617,30 @@ class ErrorCodes: UnsupportedVoiceAuthorization: str UsBankAccountNonceMustBePlaidVerified: str UsBankAccountNotVerified: str + class ExternalVault: StatusIsInvalid: str StatusWithPreviousNetworkTransactionIdIsInvalid: str CardTypeIsInvalid: str PreviousNetworkTransactionIdIsInvalid: str + class Options: SubmitForSettlementIsRequiredForCloning: str SubmitForSettlementIsRequiredForPayPalUnilateral: str UseBillingForShippingDisabled: str VaultIsDisabled: str + class PayPal: CustomFieldTooLong: str + class CreditCard: AccountTypeIsInvalid: str AccountTypeNotSupported: str AccountTypeDebitDoesNotSupportAuths: str + class Industry: IndustryTypeIsInvalid: str + class Lodging: EmptyData: str FolioNumberIsInvalid: str @@ -621,6 +658,7 @@ class ErrorCodes: AdvancedDepositIndicatorIsInvalid: str FireSafetyIndicatorIsInvalid: str PropertyPhoneIsInvalid: str + class TravelCruise: EmptyData: str UnknownDataField: str @@ -628,6 +666,7 @@ class ErrorCodes: DepartureDateIsInvalid: str LodgingCheckInDateIsInvalid: str LodgingCheckOutDateIsInvalid: str + class TravelFlight: EmptyData: str UnknownDataField: str @@ -648,6 +687,7 @@ class ErrorCodes: TicketNumberIsTooLong: str LegsExpected: str TooManyLegs: str + class Leg: class TravelFlight: ArrivalAirportCodeIsTooLong: str @@ -670,6 +710,7 @@ class ErrorCodes: TaxAmountFormatIsInvalid: str TaxAmountIsTooLarge: str TicketNumberIsTooLong: str + class AdditionalCharge: KindIsInvalid: str KindMustBeUnique: str @@ -677,6 +718,7 @@ class ErrorCodes: AmountFormatIsInvalid: str AmountIsTooLarge: str AmountIsRequired: str + class LineItem: CommodityCodeIsTooLong: str DescriptionIsTooLong: str @@ -706,6 +748,7 @@ class ErrorCodes: TaxAmountFormatIsInvalid: str TaxAmountIsTooLarge: str TaxAmountCannotBeNegative: str + class UsBankAccountVerification: NotConfirmable: str MustBeMicroTransfersVerification: str @@ -713,6 +756,7 @@ class ErrorCodes: TooManyConfirmationAttempts: str UnableToConfirmDepositAmounts: str InvalidDepositAmounts: str + class RiskData: CustomerBrowserIsTooLong: str CustomerDeviceIdIsTooLong: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/merchant_account/merchant_account.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/merchant_account/merchant_account.pyi index 4bdb762ef00d..44729c20956b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/merchant_account/merchant_account.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/merchant_account/merchant_account.pyi @@ -13,6 +13,7 @@ class MerchantAccount(Resource): Active: str Pending: str Suspended: str + class FundingDestination: Bank: str Email: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/search.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/search.pyi index a9449ff61657..14257a8d1ea0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/search.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/search.pyi @@ -6,9 +6,11 @@ class Search: def __init__(self, name) -> None: ... def __eq__(self, value): ... def is_equal(self, value): ... + class EqualityNodeBuilder(IsNodeBuilder): def __ne__(self, value): ... def is_not_equal(self, value): ... + class KeyValueNodeBuilder: name: Any def __init__(self, name) -> None: ... @@ -16,28 +18,35 @@ class Search: def is_equal(self, value): ... def __ne__(self, value): ... def is_not_equal(self, value): ... + class PartialMatchNodeBuilder(EqualityNodeBuilder): def starts_with(self, value): ... def ends_with(self, value): ... + class EndsWithNodeBuilder: name: Any def __init__(self, name) -> None: ... def ends_with(self, value): ... + class TextNodeBuilder(PartialMatchNodeBuilder): def contains(self, value): ... + class Node: name: Any dict: Any def __init__(self, name, dict) -> None: ... def to_param(self): ... + class MultipleValueNodeBuilder: name: Any whitelist: Any def __init__(self, name, whitelist=...) -> None: ... def in_list(self, *values): ... def __eq__(self, value): ... + class MultipleValueOrTextNodeBuilder(TextNodeBuilder, MultipleValueNodeBuilder): def __init__(self, name, whitelist=...) -> None: ... + class RangeNodeBuilder: name: Any def __init__(self, name) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi index 33aa2bc4493a..7819c09090fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/subscription.pyi @@ -17,10 +17,12 @@ class Subscription(Resource): class TrialDurationUnit: Day: str Month: str + class Source: Api: str ControlPanel: str Recurring: str + class Status: Active: str Canceled: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi index 59cee2fe0b24..c89b67033dae 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/transaction.pyi @@ -40,6 +40,7 @@ class Transaction(Resource): class CreatedUsing: FullInformation: str Token: str + class GatewayRejectionReason: ApplicationIncomplete: str Avs: str @@ -50,16 +51,19 @@ class Transaction(Resource): RiskThreshold: str ThreeDSecure: str TokenIssuance: str + class Source: Api: str ControlPanel: str Recurring: str + class EscrowStatus: HoldPending: str Held: str ReleasePending: str Released: str Refunded: str + class Status: AuthorizationExpired: str Authorized: str @@ -75,13 +79,16 @@ class Transaction(Resource): Settling: str SubmittedForSettlement: str Voided: str + class Type: Credit: str Sale: str + class IndustryType: Lodging: str TravelAndCruise: str TravelAndFlight: str + class AdditionalCharge: Restaurant: str GiftShop: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/us_bank_account_verification.pyi b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/us_bank_account_verification.pyi index cf4bd42cbb8b..517d5ec404f2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/us_bank_account_verification.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/braintree/braintree/us_bank_account_verification.pyi @@ -11,6 +11,7 @@ class UsBankAccountVerification(AttributeGetter): Unrecognized: str Verified: str Pending: str + class VerificationMethod: NetworkCheck: str IndependentCheck: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/METADATA.toml index de720977e07a..cb7498d03056 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/METADATA.toml @@ -1 +1 @@ -version = "4.2" +version = "4.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/__init__.pyi index c6a9eaa95bc3..06088cadd40a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/__init__.pyi @@ -1,6 +1,67 @@ -from .cache import Cache as Cache -from .decorators import cached as cached, cachedmethod as cachedmethod -from .lfu import LFUCache as LFUCache -from .lru import LRUCache as LRUCache -from .rr import RRCache as RRCache -from .ttl import TTLCache as TTLCache +from _typeshed import IdentityFunction +from collections.abc import Iterator, Sequence +from contextlib import AbstractContextManager +from typing import Any, Callable, Generic, MutableMapping, TypeVar, overload + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_T = TypeVar("_T") + +class Cache(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + @overload # type: ignore[override] + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T) -> _VT | _T: ... + def setdefault(self, key: _KT, default: _VT | None = ...) -> _VT: ... + @property + def maxsize(self) -> float: ... + @property + def currsize(self) -> float: ... + @staticmethod + def getsizeof(value: _VT) -> float: ... + +class FIFOCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class LFUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class LRUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class MRUCache(Cache[_KT, _VT]): + def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... + +class RRCache(Cache[_KT, _VT]): + def __init__( + self, maxsize: float, choice: Callable[[Sequence[_KT]], _KT] | None = ..., getsizeof: Callable[[_VT], float] | None = ... + ) -> None: ... + @property + def choice(self) -> Callable[[Sequence[_KT]], _KT]: ... + +class TTLCache(Cache[_KT, _VT]): + def __init__( + self, maxsize: float, ttl: float, timer: Callable[[], float] = ..., getsizeof: Callable[[_VT], float] | None = ... + ) -> None: ... + @property + def currsize(self) -> float: ... + @property + def timer(self) -> Callable[[], float]: ... + @property + def ttl(self) -> float: ... + def expire(self, time: float | None = ...) -> None: ... + +def cached( + cache: MutableMapping[_KT, Any] | None, key: Callable[..., _KT] = ..., lock: AbstractContextManager[Any] | None = ... +) -> IdentityFunction: ... +def cachedmethod( + cache: Callable[[Any], MutableMapping[_KT, Any] | None], + key: Callable[..., _KT] = ..., + lock: Callable[[Any], AbstractContextManager[Any]] | None = ..., +) -> IdentityFunction: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/abc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/abc.pyi deleted file mode 100644 index a1f0fbbdb7e6..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/abc.pyi +++ /dev/null @@ -1,7 +0,0 @@ -from abc import ABCMeta -from typing import MutableMapping, TypeVar - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class DefaultMapping(MutableMapping[_KT, _VT], metaclass=ABCMeta): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/cache.pyi index ad5ff1473ff7..a9bd3f70f626 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/cache.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/cache.pyi @@ -1,20 +1,2 @@ -from typing import Callable, Generic, Iterator, TypeVar - -from .abc import DefaultMapping as DefaultMapping - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class Cache(DefaultMapping[_KT, _VT], Generic[_KT, _VT]): - def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... - def __getitem__(self, key: _KT) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT) -> None: ... - def __delitem__(self, key: _KT) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... - def __len__(self) -> int: ... - @property - def maxsize(self) -> float: ... - @property - def currsize(self) -> float: ... - @staticmethod - def getsizeof(value: _VT) -> float: ... +# this module is deprecated +from . import Cache as Cache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/decorators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/decorators.pyi deleted file mode 100644 index 426fc871d665..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/decorators.pyi +++ /dev/null @@ -1,11 +0,0 @@ -from _typeshed import IdentityFunction -from typing import Any, Callable, ContextManager, MutableMapping, TypeVar - -_KT = TypeVar("_KT") - -def cached( - cache: MutableMapping[_KT, Any] | None, key: Callable[..., _KT] = ..., lock: ContextManager[Any] | None = ... -) -> IdentityFunction: ... -def cachedmethod( - cache: Callable[[Any], MutableMapping[_KT, Any] | None], key: Callable[..., _KT] = ..., lock: ContextManager[Any] | None = ... -) -> IdentityFunction: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/fifo.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/fifo.pyi new file mode 100644 index 000000000000..c6b386ad455a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/fifo.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import FIFOCache as FIFOCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/func.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/func.pyi index 75a532a00ba8..8135f8d140f3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/func.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/func.pyi @@ -3,7 +3,13 @@ from typing import Callable, Sequence, TypeVar _T = TypeVar("_T") -def lfu_cache(maxsize: float = ..., typed: bool = ...) -> IdentityFunction: ... -def lru_cache(maxsize: float = ..., typed: bool = ...) -> IdentityFunction: ... -def rr_cache(maxsize: float = ..., choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = ...) -> IdentityFunction: ... -def ttl_cache(maxsize: float = ..., ttl: float = ..., timer: float = ..., typed: bool = ...) -> IdentityFunction: ... +def fifo_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lfu_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def lru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def mru_cache(maxsize: float | None = ..., typed: bool = ...) -> IdentityFunction: ... +def rr_cache( + maxsize: float | None = ..., choice: Callable[[Sequence[_T]], _T] | None = ..., typed: bool = ... +) -> IdentityFunction: ... +def ttl_cache( + maxsize: float | None = ..., ttl: float = ..., timer: Callable[[], float] = ..., typed: bool = ... +) -> IdentityFunction: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/keys.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/keys.pyi index 9effbe6e171f..ef3e1120f4f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/keys.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/keys.pyi @@ -1,4 +1,4 @@ -from typing import Hashable, Tuple +from typing import Hashable -def hashkey(*args: Hashable, **kwargs: Hashable) -> Tuple[Hashable, ...]: ... -def typedkey(*args: Hashable, **kwargs: Hashable) -> Tuple[Hashable, ...]: ... +def hashkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... +def typedkey(*args: Hashable, **kwargs: Hashable) -> tuple[Hashable, ...]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lfu.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lfu.pyi index e63aeb313628..9951e65e2841 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lfu.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lfu.pyi @@ -1,14 +1,2 @@ -from typing import Callable, Iterator, TypeVar - -from .cache import Cache - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class LFUCache(Cache[_KT, _VT]): - def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... - def __getitem__(self, key: _KT, cache_getitem: Callable[[_KT], _VT] = ...) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT, cache_setitem: Callable[[_KT, _VT], None] = ...) -> None: ... - def __delitem__(self, key: _KT, cache_delitem: Callable[[_KT], None] = ...) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... - def __len__(self) -> int: ... +# this module is deprecated +from . import LFUCache as LFUCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lru.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lru.pyi index 128a91480aff..4ea2d6413ca6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lru.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/lru.pyi @@ -1,13 +1,2 @@ -from typing import Callable, Iterator, TypeVar - -from .cache import Cache as Cache - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class LRUCache(Cache[_KT, _VT]): - def __init__(self, maxsize: float, getsizeof: Callable[[_VT], float] | None = ...) -> None: ... - def __getitem__(self, key: _KT, cache_getitem: Callable[[_KT], _VT] = ...) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT, cache_setitem: Callable[[_KT, _VT], None] = ...) -> None: ... - def __delitem__(self, key: _KT, cache_delitem: Callable[[_KT], None] = ...) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... +# this module is deprecated +from . import LRUCache as LRUCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/mru.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/mru.pyi new file mode 100644 index 000000000000..b345f5262b04 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/mru.pyi @@ -0,0 +1,2 @@ +# this module is deprecated +from . import MRUCache as MRUCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/rr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/rr.pyi index aa4190d047c2..18e2098ceb66 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/rr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/rr.pyi @@ -1,18 +1,2 @@ -from typing import Callable, Iterator, Sequence, TypeVar - -from .cache import Cache as Cache - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class RRCache(Cache[_KT, _VT]): - def __init__( - self, maxsize: float, choice: Callable[[Sequence[_KT]], _KT] | None = ..., getsizeof: Callable[[_VT], float] | None = ... - ) -> None: ... - def __getitem__(self, key: _KT) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT) -> None: ... - def __delitem__(self, key: _KT) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... - def __len__(self) -> int: ... - @property - def choice(self) -> Callable[[Sequence[_KT]], _KT]: ... +# this module is deprecated +from . import RRCache as RRCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/ttl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/ttl.pyi index 12cd6072cdfb..aee05857648a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/ttl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cachetools/cachetools/ttl.pyi @@ -1,23 +1,2 @@ -from typing import Callable, Iterator, TypeVar - -from .cache import Cache as Cache - -_KT = TypeVar("_KT") -_VT = TypeVar("_VT") - -class TTLCache(Cache[_KT, _VT]): - def __init__( - self, maxsize: float, ttl: float, timer: Callable[[], float] = ..., getsizeof: Callable[[_VT], float] | None = ... - ) -> None: ... - def __getitem__(self, key: _KT, cache_getitem: Callable[[_KT], _VT] = ...) -> _VT: ... - def __setitem__(self, key: _KT, value: _VT, cache_setitem: Callable[[_KT, _VT], None] = ...) -> None: ... - def __delitem__(self, key: _KT, cache_delitem: Callable[[_KT], None] = ...) -> None: ... - def __iter__(self) -> Iterator[_KT]: ... - def __len__(self) -> int: ... - @property - def currsize(self) -> float: ... - @property - def timer(self) -> Callable[[], float]: ... - @property - def ttl(self) -> float: ... - def expire(self, time: float | None = ...) -> None: ... +# this module is deprecated +from . import TTLCache as TTLCache diff --git a/packages/pyright-internal/typeshed-fallback/stubs/caldav/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/caldav/METADATA.toml index 50463eef97dd..a8d20041e01c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/caldav/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/caldav/METADATA.toml @@ -1,3 +1,3 @@ -version = "0.8" +version = "0.8.*" # also types-lxml and types-icalendar when those stubs are added requires = ["types-requests", "types-vobject"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/lib/error.pyi b/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/lib/error.pyi index 7e3ed16e6bc4..66de08c19f5d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/lib/error.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/lib/error.pyi @@ -1,14 +1,13 @@ -from typing import Any, Type - def assert_(condition: object) -> None: ... ERR_FRAGMENT: str -class AuthorizationError(Exception): - url: Any +class DAVError(Exception): + url: str | None reason: str + def __init__(self, url: str | None = ..., reason: str | None = ...) -> None: ... -class DAVError(Exception): ... +class AuthorizationError(DAVError): ... class PropsetError(DAVError): ... class ProppatchError(DAVError): ... class PropfindError(DAVError): ... @@ -19,6 +18,6 @@ class PutError(DAVError): ... class DeleteError(DAVError): ... class NotFoundError(DAVError): ... class ConsistencyError(DAVError): ... -class ReponseError(DAVError): ... +class ResponseError(DAVError): ... -exception_by_method: dict[str, Type[DAVError]] +exception_by_method: dict[str, type[DAVError]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/objects.pyi b/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/objects.pyi index 6f2f94ff1dc1..1774ef8c04c0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/objects.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/caldav/caldav/objects.pyi @@ -1,7 +1,7 @@ import datetime from _typeshed import Self from collections.abc import Iterable, Iterator, Mapping -from typing import Any, Type, TypeVar, overload +from typing import Any, TypeVar, overload from typing_extensions import Literal from urllib.parse import ParseResult, SplitResult @@ -101,7 +101,7 @@ class Calendar(DAVObject): @overload def search(self, xml, comp_class: None = ...) -> list[CalendarObjectResource]: ... @overload - def search(self, xml, comp_class: Type[_CC]) -> list[_CC]: ... + def search(self, xml, comp_class: type[_CC]) -> list[_CC]: ... def freebusy_request(self, start: datetime.datetime, end: datetime.datetime) -> FreeBusy: ... def todos(self, sort_keys: Iterable[str] = ..., include_completed: bool = ..., sort_key: str | None = ...) -> list[Todo]: ... def event_by_url(self, href, data: Any | None = ...) -> Event: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/certifi/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/certifi/METADATA.toml index 3f3b249a9d9c..a75d69563a04 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/certifi/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/certifi/METADATA.toml @@ -1,2 +1,2 @@ -version = "2020.4" +version = "2021.10.8" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/characteristic/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/characteristic/METADATA.toml index f14c99e0c7f6..199895dd573d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/characteristic/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/characteristic/METADATA.toml @@ -1,2 +1,2 @@ -version = "14.3" +version = "14.3.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/characteristic/characteristic/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/characteristic/characteristic/__init__.pyi index 08056c3e6c07..d60ff1a35f25 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/characteristic/characteristic/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/characteristic/characteristic/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any, AnyStr, Callable, Sequence, Type, TypeVar +from typing import Any, AnyStr, Callable, Sequence, TypeVar def with_repr(attrs: Sequence[AnyStr | Attribute]) -> Callable[..., Any]: ... def with_cmp(attrs: Sequence[AnyStr | Attribute]) -> Callable[..., Any]: ... @@ -18,7 +18,7 @@ def attributes( apply_immutable: bool = ..., store_attributes: Callable[[type, Attribute], Any] | None = ..., **kw: dict[Any, Any] | None, -) -> Callable[[Type[_T]], Type[_T]]: ... +) -> Callable[[type[_T]], type[_T]]: ... class Attribute: def __init__( diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/chardet/METADATA.toml index f52bb86f70f7..5df22726ce72 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/METADATA.toml @@ -1,3 +1,3 @@ -version = "4.0" +version = "4.0.*" python2 = true requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/__init__.pyi index 54e48f5c7233..7f883dabaf7b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/__init__.pyi @@ -1,5 +1,5 @@ import sys -from typing import Any, Tuple +from typing import Any from .universaldetector import UniversalDetector as UniversalDetector @@ -11,16 +11,16 @@ else: from typing_extensions import TypedDict class _LangModelType(TypedDict): - char_to_order_map: Tuple[int, ...] - precedence_matrix: Tuple[int, ...] + char_to_order_map: tuple[int, ...] + precedence_matrix: tuple[int, ...] typical_positive_ratio: float keep_english_letter: bool charset_name: str language: str class _SMModelType(TypedDict): - class_table: Tuple[int, ...] + class_table: tuple[int, ...] class_factor: int - state_table: Tuple[int, ...] - char_len_table: Tuple[int, ...] + state_table: tuple[int, ...] + char_len_table: tuple[int, ...] name: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langbulgarianmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langbulgarianmodel.pyi index de07cfa7b1b4..07344de5c895 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langbulgarianmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langbulgarianmodel.pyi @@ -1,9 +1,7 @@ -from typing import Tuple - from . import _LangModelType -Latin5_BulgarianCharToOrderMap: Tuple[int, ...] -win1251BulgarianCharToOrderMap: Tuple[int, ...] -BulgarianLangModel: Tuple[int, ...] +Latin5_BulgarianCharToOrderMap: tuple[int, ...] +win1251BulgarianCharToOrderMap: tuple[int, ...] +BulgarianLangModel: tuple[int, ...] Latin5BulgarianModel: _LangModelType Win1251BulgarianModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langcyrillicmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langcyrillicmodel.pyi index 40a7044b1398..22e7c52dc20a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langcyrillicmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langcyrillicmodel.pyi @@ -1,14 +1,12 @@ -from typing import Tuple - from . import _LangModelType -KOI8R_char_to_order_map: Tuple[int, ...] -win1251_char_to_order_map: Tuple[int, ...] -latin5_char_to_order_map: Tuple[int, ...] -macCyrillic_char_to_order_map: Tuple[int, ...] -IBM855_char_to_order_map: Tuple[int, ...] -IBM866_char_to_order_map: Tuple[int, ...] -RussianLangModel: Tuple[int, ...] +KOI8R_char_to_order_map: tuple[int, ...] +win1251_char_to_order_map: tuple[int, ...] +latin5_char_to_order_map: tuple[int, ...] +macCyrillic_char_to_order_map: tuple[int, ...] +IBM855_char_to_order_map: tuple[int, ...] +IBM866_char_to_order_map: tuple[int, ...] +RussianLangModel: tuple[int, ...] Koi8rModel: _LangModelType Win1251CyrillicModel: _LangModelType Latin5CyrillicModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langgreekmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langgreekmodel.pyi index f0fa3e8c21d3..ceee125a2341 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langgreekmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langgreekmodel.pyi @@ -1,9 +1,7 @@ -from typing import Tuple - from . import _LangModelType -Latin7_char_to_order_map: Tuple[int, ...] -win1253_char_to_order_map: Tuple[int, ...] -GreekLangModel: Tuple[int, ...] +Latin7_char_to_order_map: tuple[int, ...] +win1253_char_to_order_map: tuple[int, ...] +GreekLangModel: tuple[int, ...] Latin7GreekModel: _LangModelType Win1253GreekModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhebrewmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhebrewmodel.pyi index 08bfbc91bf26..a17e10de3023 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhebrewmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhebrewmodel.pyi @@ -1,7 +1,5 @@ -from typing import Tuple - from . import _LangModelType -WIN1255_CHAR_TO_ORDER_MAP: Tuple[int, ...] -HEBREW_LANG_MODEL: Tuple[int, ...] +WIN1255_CHAR_TO_ORDER_MAP: tuple[int, ...] +HEBREW_LANG_MODEL: tuple[int, ...] Win1255HebrewModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhungarianmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhungarianmodel.pyi index 01e4a44380c2..498c7da58a9d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhungarianmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langhungarianmodel.pyi @@ -1,9 +1,7 @@ -from typing import Tuple - from . import _LangModelType -Latin2_HungarianCharToOrderMap: Tuple[int, ...] -win1250HungarianCharToOrderMap: Tuple[int, ...] -HungarianLangModel: Tuple[int, ...] +Latin2_HungarianCharToOrderMap: tuple[int, ...] +win1250HungarianCharToOrderMap: tuple[int, ...] +HungarianLangModel: tuple[int, ...] Latin2HungarianModel: _LangModelType Win1250HungarianModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langthaimodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langthaimodel.pyi index 93149e72b16c..eee2356e8ead 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langthaimodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langthaimodel.pyi @@ -1,7 +1,5 @@ -from typing import Tuple - from . import _LangModelType -TIS620CharToOrderMap: Tuple[int, ...] -ThaiLangModel: Tuple[int, ...] +TIS620CharToOrderMap: tuple[int, ...] +ThaiLangModel: tuple[int, ...] TIS620ThaiModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langturkishmodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langturkishmodel.pyi index 65b1bdcbbe2d..6686f262d619 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langturkishmodel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/chardet/chardet/langturkishmodel.pyi @@ -1,7 +1,5 @@ -from typing import Tuple - from . import _LangModelType -Latin5_TurkishCharToOrderMap: Tuple[int, ...] -TurkishLangModel: Tuple[int, ...] +Latin5_TurkishCharToOrderMap: tuple[int, ...] +TurkishLangModel: tuple[int, ...] Latin5TurkishModel: _LangModelType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/chevron/METADATA.toml new file mode 100755 index 000000000000..c562b663336b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/METADATA.toml @@ -0,0 +1 @@ +version = "0.14.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/__init__.pyi new file mode 100755 index 000000000000..472809791640 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/__init__.pyi @@ -0,0 +1,5 @@ +from .main import cli_main as cli_main, main as main +from .renderer import render as render +from .tokenizer import ChevronError as ChevronError + +__all__ = ["main", "render", "cli_main", "ChevronError"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/main.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/main.pyi new file mode 100755 index 000000000000..8b0f75583a3e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/main.pyi @@ -0,0 +1,7 @@ +from _typeshed import StrOrBytesPath +from typing import Any + +_OpenFile = StrOrBytesPath | int + +def main(template: _OpenFile, data: _OpenFile | None = ..., **kwargs: Any) -> str: ... +def cli_main() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/metadata.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/metadata.pyi new file mode 100755 index 000000000000..c2ee2cab489b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/metadata.pyi @@ -0,0 +1 @@ +version: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/renderer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/renderer.pyi new file mode 100755 index 000000000000..232253d916ba --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/renderer.pyi @@ -0,0 +1,17 @@ +from _typeshed import StrPath, SupportsRead +from typing import Any, MutableSequence, Sequence + +g_token_cache: dict[str, list[tuple[str, str]]] # undocumented + +def render( + template: SupportsRead[str] | str | Sequence[tuple[str, str]] = ..., + data: dict[str, Any] = ..., + partials_path: StrPath | None = ..., + partials_ext: str = ..., + partials_dict: dict[str, str] = ..., + padding: str = ..., + def_ldel: str | None = ..., + def_rdel: str | None = ..., + scopes: MutableSequence[int] | None = ..., + warn: bool = ..., +) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/tokenizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/tokenizer.pyi new file mode 100755 index 000000000000..cb362e61d62e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/chevron/chevron/tokenizer.pyi @@ -0,0 +1,11 @@ +from typing import Iterator + +class ChevronError(SyntaxError): ... + +def grab_literal(template: str, l_del: str | None) -> tuple[str, str]: ... # undocumented +def l_sa_check(template: str, literal: str, is_standalone: bool) -> bool | None: ... # undocumented +def r_sa_check(template: str, tag_type: str, is_standalone: bool) -> bool: ... # undocumented +def parse_tag(template: str, l_del: str | None, r_del: str | None) -> tuple[tuple[str, str], str]: ... # undocumented +def tokenize( + template: str, def_ldel: str | None = ..., def_rdel: str | None = ... +) -> Iterator[tuple[str, str]]: ... # undocumented diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/METADATA.toml index e43ccaae3fb9..f511d7da692f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/METADATA.toml @@ -1,3 +1,3 @@ -version = "0.1" +version = "0.1.*" python2 = true requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/click_spinner/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/click_spinner/__init__.pyi index 5ec098ed3421..d16f3f73f25e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/click_spinner/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/click-spinner/click_spinner/__init__.pyi @@ -1,7 +1,8 @@ import threading +from _typeshed import Self from types import TracebackType -from typing import Iterator, Type -from typing_extensions import Literal, Protocol +from typing import Iterator, Protocol +from typing_extensions import Literal __version__: str @@ -22,9 +23,9 @@ class Spinner(object): def start(self) -> None: ... def stop(self) -> None: ... def init_spin(self) -> None: ... - def __enter__(self) -> Spinner: ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None ) -> Literal[False]: ... def spinner(beep: bool, disable: bool, force: bool, stream: _Stream) -> Spinner: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/click/METADATA.toml deleted file mode 100644 index 7b088028d1c5..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/METADATA.toml +++ /dev/null @@ -1,3 +0,0 @@ -version = "7.1" -python2 = true -obsolete_since = "8.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/README.md b/packages/pyright-internal/typeshed-fallback/stubs/click/README.md deleted file mode 100644 index aec5fa9110c4..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# click 7.0 - -`__init__.pyi` is almost a copy of `click/__init__.py`. It's a shortcut module -anyway in the actual sources so it works well with minimal changes. - -The types are pretty complete but they were created mostly for public API use -so some internal modules (`_compat`) or functions (`core._bashcomplete`) are -deliberately missing. If you feel the need to add those, pull requests accepted. - -Speaking of pull requests, it would be great if the option decorators informed -the type checker on what types the command callback should accept. diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/__init__.pyi deleted file mode 100644 index b588e6cefa26..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/__init__.pyi +++ /dev/null @@ -1,84 +0,0 @@ -from .core import ( - Argument as Argument, - BaseCommand as BaseCommand, - Command as Command, - CommandCollection as CommandCollection, - Context as Context, - Group as Group, - MultiCommand as MultiCommand, - Option as Option, - Parameter as Parameter, -) -from .decorators import ( - argument as argument, - command as command, - confirmation_option as confirmation_option, - group as group, - help_option as help_option, - make_pass_decorator as make_pass_decorator, - option as option, - pass_context as pass_context, - pass_obj as pass_obj, - password_option as password_option, - version_option as version_option, -) -from .exceptions import ( - Abort as Abort, - BadArgumentUsage as BadArgumentUsage, - BadOptionUsage as BadOptionUsage, - BadParameter as BadParameter, - ClickException as ClickException, - FileError as FileError, - MissingParameter as MissingParameter, - NoSuchOption as NoSuchOption, - UsageError as UsageError, -) -from .formatting import HelpFormatter as HelpFormatter, wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .parser import OptionParser as OptionParser -from .termui import ( - clear as clear, - confirm as confirm, - echo_via_pager as echo_via_pager, - edit as edit, - get_terminal_size as get_terminal_size, - getchar as getchar, - launch as launch, - pause as pause, - progressbar as progressbar, - prompt as prompt, - secho as secho, - style as style, - unstyle as unstyle, -) -from .types import ( - BOOL as BOOL, - FLOAT as FLOAT, - INT as INT, - STRING as STRING, - UNPROCESSED as UNPROCESSED, - UUID as UUID, - Choice as Choice, - DateTime as DateTime, - File as File, - FloatRange as FloatRange, - IntRange as IntRange, - ParamType as ParamType, - Path as Path, - Tuple as Tuple, -) -from .utils import ( - echo as echo, - format_filename as format_filename, - get_app_dir as get_app_dir, - get_binary_stream as get_binary_stream, - get_os_args as get_os_args, - get_text_stream as get_text_stream, - open_file as open_file, -) - -# Controls if click should emit the warning about the use of unicode -# literals. -disable_unicode_literals_warning: bool - -__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/_termui_impl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/_termui_impl.pyi deleted file mode 100644 index 9e373295d8fd..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/_termui_impl.pyi +++ /dev/null @@ -1,17 +0,0 @@ -from types import TracebackType -from typing import Generic, Type, TypeVar - -_T = TypeVar("_T") - -class ProgressBar(Generic[_T]): - def update(self, n_steps: int) -> None: ... - def finish(self) -> None: ... - def __enter__(self) -> ProgressBar[_T]: ... - def __exit__( - self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None - ) -> None: ... - def __iter__(self) -> ProgressBar[_T]: ... - def next(self) -> _T: ... - def __next__(self) -> _T: ... - length: int | None - label: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/core.pyi deleted file mode 100644 index b19e8aeb8ec6..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/core.pyi +++ /dev/null @@ -1,272 +0,0 @@ -from typing import Any, Callable, ContextManager, Iterable, Mapping, NoReturn, Optional, Sequence, Set, Tuple, TypeVar, Union - -from click.formatting import HelpFormatter -from click.parser import OptionParser - -_CC = TypeVar("_CC", bound=Callable[[], Any]) - -def invoke_param_callback( - callback: Callable[[Context, Parameter, str | None], Any], ctx: Context, param: Parameter, value: str | None -) -> Any: ... -def augment_usage_errors(ctx: Context, param: Parameter | None = ...) -> ContextManager[None]: ... -def iter_params_for_processing( - invocation_order: Sequence[Parameter], declaration_order: Iterable[Parameter] -) -> Iterable[Parameter]: ... - -class Context: - parent: Context | None - command: Command - info_name: str | None - params: dict[Any, Any] - args: list[str] - protected_args: list[str] - obj: Any - default_map: Mapping[str, Any] | None - invoked_subcommand: str | None - terminal_width: int | None - max_content_width: int | None - allow_extra_args: bool - allow_interspersed_args: bool - ignore_unknown_options: bool - help_option_names: list[str] - token_normalize_func: Callable[[str], str] | None - resilient_parsing: bool - auto_envvar_prefix: str | None - color: bool | None - _meta: dict[str, Any] - _close_callbacks: list[Any] - _depth: int - def __init__( - self, - command: Command, - parent: Context | None = ..., - info_name: str | None = ..., - obj: Any | None = ..., - auto_envvar_prefix: str | None = ..., - default_map: Mapping[str, Any] | None = ..., - terminal_width: int | None = ..., - max_content_width: int | None = ..., - resilient_parsing: bool = ..., - allow_extra_args: bool | None = ..., - allow_interspersed_args: bool | None = ..., - ignore_unknown_options: bool | None = ..., - help_option_names: list[str] | None = ..., - token_normalize_func: Callable[[str], str] | None = ..., - color: bool | None = ..., - ) -> None: ... - @property - def meta(self) -> dict[str, Any]: ... - @property - def command_path(self) -> str: ... - def scope(self, cleanup: bool = ...) -> ContextManager[Context]: ... - def make_formatter(self) -> HelpFormatter: ... - def call_on_close(self, f: _CC) -> _CC: ... - def close(self) -> None: ... - def find_root(self) -> Context: ... - def find_object(self, object_type: type) -> Any: ... - def ensure_object(self, object_type: type) -> Any: ... - def lookup_default(self, name: str) -> Any: ... - def fail(self, message: str) -> NoReturn: ... - def abort(self) -> NoReturn: ... - def exit(self, code: int | str = ...) -> NoReturn: ... - def get_usage(self) -> str: ... - def get_help(self) -> str: ... - def invoke(self, callback: Command | Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... - def forward(self, callback: Command | Callable[..., Any], *args: Any, **kwargs: Any) -> Any: ... - -class BaseCommand: - allow_extra_args: bool - allow_interspersed_args: bool - ignore_unknown_options: bool - name: str - context_settings: dict[Any, Any] - def __init__(self, name: str, context_settings: dict[Any, Any] | None = ...) -> None: ... - def get_usage(self, ctx: Context) -> str: ... - def get_help(self, ctx: Context) -> str: ... - def make_context(self, info_name: str, args: list[str], parent: Context | None = ..., **extra: Any) -> Context: ... - def parse_args(self, ctx: Context, args: list[str]) -> list[str]: ... - def invoke(self, ctx: Context) -> Any: ... - def main( - self, - args: list[str] | None = ..., - prog_name: str | None = ..., - complete_var: str | None = ..., - standalone_mode: bool = ..., - **extra: Any, - ) -> Any: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... - -class Command(BaseCommand): - callback: Callable[..., Any] | None - params: list[Parameter] - help: str | None - epilog: str | None - short_help: str | None - options_metavar: str - add_help_option: bool - no_args_is_help: bool - hidden: bool - deprecated: bool - def __init__( - self, - name: str, - context_settings: dict[Any, Any] | None = ..., - callback: Callable[..., Any] | None = ..., - params: list[Parameter] | None = ..., - help: str | None = ..., - epilog: str | None = ..., - short_help: str | None = ..., - options_metavar: str = ..., - add_help_option: bool = ..., - no_args_is_help: bool = ..., - hidden: bool = ..., - deprecated: bool = ..., - ) -> None: ... - def get_params(self, ctx: Context) -> list[Parameter]: ... - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: ... - def collect_usage_pieces(self, ctx: Context) -> list[str]: ... - def get_help_option_names(self, ctx: Context) -> Set[str]: ... - def get_help_option(self, ctx: Context) -> Option | None: ... - def make_parser(self, ctx: Context) -> OptionParser: ... - def get_short_help_str(self, limit: int = ...) -> str: ... - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: ... - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: ... - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: ... - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: ... - -_T = TypeVar("_T") -_F = TypeVar("_F", bound=Callable[..., Any]) - -class MultiCommand(Command): - no_args_is_help: bool - invoke_without_command: bool - subcommand_metavar: str - chain: bool - result_callback: Callable[..., Any] - def __init__( - self, - name: str | None = ..., - invoke_without_command: bool = ..., - no_args_is_help: bool | None = ..., - subcommand_metavar: str | None = ..., - chain: bool = ..., - result_callback: Callable[..., Any] | None = ..., - **attrs: Any, - ) -> None: ... - def resultcallback(self, replace: bool = ...) -> Callable[[_F], _F]: ... - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: ... - def resolve_command(self, ctx: Context, args: list[str]) -> Tuple[str, Command, list[str]]: ... - def get_command(self, ctx: Context, cmd_name: str) -> Command | None: ... - def list_commands(self, ctx: Context) -> Iterable[str]: ... - -class Group(MultiCommand): - commands: dict[str, Command] - def __init__(self, name: str | None = ..., commands: dict[str, Command] | None = ..., **attrs: Any) -> None: ... - def add_command(self, cmd: Command, name: str | None = ...) -> None: ... - def command(self, *args: Any, **kwargs: Any) -> Callable[[Callable[..., Any]], Command]: ... - def group(self, *args: Any, **kwargs: Any) -> Callable[[Callable[..., Any]], Group]: ... - -class CommandCollection(MultiCommand): - sources: list[MultiCommand] - def __init__(self, name: str | None = ..., sources: list[MultiCommand] | None = ..., **attrs: Any) -> None: ... - def add_source(self, multi_cmd: MultiCommand) -> None: ... - -class _ParamType: - name: str - is_composite: bool - envvar_list_splitter: str | None - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> Any: ... - def get_metavar(self, param: Parameter) -> str: ... - def get_missing_message(self, param: Parameter) -> str: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> Any: ... - def split_envvar_value(self, rv: str) -> list[str]: ... - def fail(self, message: str, param: Parameter | None = ..., ctx: Context | None = ...) -> NoReturn: ... - -# This type is here to resolve https://github.com/python/mypy/issues/5275 -_ConvertibleType = Union[ - type, _ParamType, Tuple[Union[type, _ParamType], ...], Callable[[str], Any], Callable[[Optional[str]], Any] -] - -class Parameter: - param_type_name: str - name: str - opts: list[str] - secondary_opts: list[str] - type: _ParamType - required: bool - callback: Callable[[Context, Parameter, str], Any] | None - nargs: int - multiple: bool - expose_value: bool - default: Any - is_eager: bool - metavar: str | None - envvar: str | list[str] | None - def __init__( - self, - param_decls: Iterable[str] | None = ..., - type: _ConvertibleType | None = ..., - required: bool = ..., - default: Any | None = ..., - callback: Callable[[Context, Parameter, str], Any] | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - ) -> None: ... - @property - def human_readable_name(self) -> str: ... - def make_metavar(self) -> str: ... - def get_default(self, ctx: Context) -> Any: ... - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: ... - def consume_value(self, ctx: Context, opts: dict[str, Any]) -> Any: ... - def type_cast_value(self, ctx: Context, value: Any) -> Any: ... - def process_value(self, ctx: Context, value: Any) -> Any: ... - def value_is_missing(self, value: Any) -> bool: ... - def full_process_value(self, ctx: Context, value: Any) -> Any: ... - def resolve_envvar_value(self, ctx: Context) -> str: ... - def value_from_envvar(self, ctx: Context) -> str | list[str]: ... - def handle_parse_result(self, ctx: Context, opts: dict[str, Any], args: list[str]) -> Tuple[Any, list[str]]: ... - def get_help_record(self, ctx: Context) -> Tuple[str, str]: ... - def get_usage_pieces(self, ctx: Context) -> list[str]: ... - def get_error_hint(self, ctx: Context) -> str: ... - -class Option(Parameter): - prompt: str # sic - confirmation_prompt: bool - hide_input: bool - is_flag: bool - flag_value: Any - is_bool_flag: bool - count: bool - multiple: bool - allow_from_autoenv: bool - help: str | None - hidden: bool - show_default: bool - show_choices: bool - show_envvar: bool - def __init__( - self, - param_decls: Iterable[str] | None = ..., - show_default: bool = ..., - prompt: bool | str = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: str | None = ..., - hidden: bool = ..., - show_choices: bool = ..., - show_envvar: bool = ..., - **attrs: Any, - ) -> None: ... - def prompt_for_value(self, ctx: Context) -> Any: ... - -class Argument(Parameter): - def __init__(self, param_decls: Iterable[str] | None = ..., required: bool | None = ..., **attrs: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/decorators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/decorators.pyi deleted file mode 100644 index 2401e060e868..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/decorators.pyi +++ /dev/null @@ -1,292 +0,0 @@ -from _typeshed import IdentityFunction -from distutils.version import Version -from typing import Any, Callable, Iterable, Text, Tuple, Type, TypeVar, Union, overload - -from click.core import Argument, Command, Context, Group, Option, Parameter, _ConvertibleType - -_T = TypeVar("_T") -_F = TypeVar("_F", bound=Callable[..., Any]) - -_Callback = Callable[[Context, Union[Option, Parameter], Any], Any] - -def pass_context(__f: _T) -> _T: ... -def pass_obj(__f: _T) -> _T: ... -def make_pass_decorator(object_type: type, ensure: bool = ...) -> IdentityFunction: ... - -# NOTE: Decorators below have **attrs converted to concrete constructor -# arguments from core.pyi to help with type checking. - -def command( - name: str | None = ..., - cls: Type[Command] | None = ..., - # Command - context_settings: dict[Any, Any] | None = ..., - help: str | None = ..., - epilog: str | None = ..., - short_help: str | None = ..., - options_metavar: str = ..., - add_help_option: bool = ..., - no_args_is_help: bool = ..., - hidden: bool = ..., - deprecated: bool = ..., -) -> Callable[[Callable[..., Any]], Command]: ... - -# This inherits attrs from Group, MultiCommand and Command. - -def group( - name: str | None = ..., - cls: Type[Command] = ..., - # Group - commands: dict[str, Command] | None = ..., - # MultiCommand - invoke_without_command: bool = ..., - no_args_is_help: bool | None = ..., - subcommand_metavar: str | None = ..., - chain: bool = ..., - result_callback: Callable[..., Any] | None = ..., - # Command - help: str | None = ..., - epilog: str | None = ..., - short_help: str | None = ..., - options_metavar: str = ..., - add_help_option: bool = ..., - hidden: bool = ..., - deprecated: bool = ..., - # User-defined - **kwargs: Any, -) -> Callable[[Callable[..., Any]], Group]: ... -def argument( - *param_decls: Text, - cls: Type[Argument] = ..., - # Argument - required: bool | None = ..., - # Parameter - type: _ConvertibleType | None = ..., - default: Any | None = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - autocompletion: Callable[[Context, list[str], str], Iterable[str | Tuple[str, str]]] | None = ..., -) -> IdentityFunction: ... -@overload -def option( - *param_decls: Text, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: Text | None = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - required: bool = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - # User-defined - **kwargs: Any, -) -> IdentityFunction: ... -@overload -def option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _T = ..., - help: str | None = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - required: bool = ..., - callback: Callable[[Context, Option | Parameter, bool | int | str], _T] | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - # User-defined - **kwargs: Any, -) -> IdentityFunction: ... -@overload -def option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: Type[str] = ..., - help: str | None = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - required: bool = ..., - callback: Callable[[Context, Option | Parameter, str], Any] = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - # User-defined - **kwargs: Any, -) -> IdentityFunction: ... -@overload -def option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: Type[int] = ..., - help: str | None = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - required: bool = ..., - callback: Callable[[Context, Option | Parameter, int], Any] = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., - # User-defined - **kwargs: Any, -) -> IdentityFunction: ... -def confirmation_option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: str = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., -) -> IdentityFunction: ... -def password_option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool | None = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: str | None = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., -) -> IdentityFunction: ... -def version_option( - version: str | Version | None = ..., - *param_decls: str, - cls: Type[Option] = ..., - # Option - prog_name: str | None = ..., - message: str | None = ..., - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: str = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., -) -> IdentityFunction: ... -def help_option( - *param_decls: str, - cls: Type[Option] = ..., - # Option - show_default: bool | Text = ..., - prompt: bool | Text = ..., - confirmation_prompt: bool = ..., - hide_input: bool = ..., - is_flag: bool = ..., - flag_value: Any | None = ..., - multiple: bool = ..., - count: bool = ..., - allow_from_autoenv: bool = ..., - type: _ConvertibleType | None = ..., - help: str = ..., - show_choices: bool = ..., - # Parameter - default: Any | None = ..., - callback: _Callback | None = ..., - nargs: int | None = ..., - metavar: str | None = ..., - expose_value: bool = ..., - is_eager: bool = ..., - envvar: str | list[str] | None = ..., -) -> IdentityFunction: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/exceptions.pyi deleted file mode 100644 index fa1a2c3e7911..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/exceptions.pyi +++ /dev/null @@ -1,59 +0,0 @@ -from typing import IO, Any - -from click.core import Command, Context, Parameter - -class ClickException(Exception): - exit_code: int - message: str - def __init__(self, message: str) -> None: ... - def format_message(self) -> str: ... - def show(self, file: Any | None = ...) -> None: ... - -class UsageError(ClickException): - ctx: Context | None - cmd: Command | None - def __init__(self, message: str, ctx: Context | None = ...) -> None: ... - def show(self, file: IO[Any] | None = ...) -> None: ... - -class BadParameter(UsageError): - param: Parameter | None - param_hint: str | None - def __init__( - self, message: str, ctx: Context | None = ..., param: Parameter | None = ..., param_hint: str | None = ... - ) -> None: ... - -class MissingParameter(BadParameter): - param_type: str # valid values: 'parameter', 'option', 'argument' - def __init__( - self, - message: str | None = ..., - ctx: Context | None = ..., - param: Parameter | None = ..., - param_hint: str | None = ..., - param_type: str | None = ..., - ) -> None: ... - -class NoSuchOption(UsageError): - option_name: str - possibilities: list[str] | None - def __init__( - self, option_name: str, message: str | None = ..., possibilities: list[str] | None = ..., ctx: Context | None = ... - ) -> None: ... - -class BadOptionUsage(UsageError): - option_name: str - def __init__(self, option_name: str, message: str, ctx: Context | None = ...) -> None: ... - -class BadArgumentUsage(UsageError): - def __init__(self, message: str, ctx: Context | None = ...) -> None: ... - -class FileError(ClickException): - ui_filename: str - filename: str - def __init__(self, filename: str, hint: str | None = ...) -> None: ... - -class Abort(RuntimeError): ... - -class Exit(RuntimeError): - exit_code: int - def __init__(self, code: int = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/formatting.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/formatting.pyi deleted file mode 100644 index 08c607a66837..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/formatting.pyi +++ /dev/null @@ -1,29 +0,0 @@ -from typing import ContextManager, Generator, Iterable, Tuple - -FORCED_WIDTH: int | None - -def measure_table(rows: Iterable[Iterable[str]]) -> Tuple[int, ...]: ... -def iter_rows(rows: Iterable[Iterable[str]], col_count: int) -> Generator[Tuple[str, ...], None, None]: ... -def wrap_text( - text: str, width: int = ..., initial_indent: str = ..., subsequent_indent: str = ..., preserve_paragraphs: bool = ... -) -> str: ... - -class HelpFormatter: - indent_increment: int - width: int | None - current_indent: int - buffer: list[str] - def __init__(self, indent_increment: int = ..., width: int | None = ..., max_width: int | None = ...) -> None: ... - def write(self, string: str) -> None: ... - def indent(self) -> None: ... - def dedent(self) -> None: ... - def write_usage(self, prog: str, args: str = ..., prefix: str = ...) -> None: ... - def write_heading(self, heading: str) -> None: ... - def write_paragraph(self) -> None: ... - def write_text(self, text: str) -> None: ... - def write_dl(self, rows: Iterable[Iterable[str]], col_max: int = ..., col_spacing: int = ...) -> None: ... - def section(self, name: str) -> ContextManager[None]: ... - def indentation(self) -> ContextManager[None]: ... - def getvalue(self) -> str: ... - -def join_options(options: list[str]) -> Tuple[str, bool]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/globals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/globals.pyi deleted file mode 100644 index bdf259936476..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/globals.pyi +++ /dev/null @@ -1,6 +0,0 @@ -from click.core import Context - -def get_current_context(silent: bool = ...) -> Context: ... -def push_context(ctx: Context) -> None: ... -def pop_context() -> None: ... -def resolve_color_default(color: bool | None = ...) -> bool | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/parser.pyi deleted file mode 100644 index 70324011b50c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/parser.pyi +++ /dev/null @@ -1,65 +0,0 @@ -from typing import Any, Iterable, Set, Tuple - -from click.core import Context - -def _unpack_args(args: Iterable[str], nargs_spec: Iterable[int]) -> Tuple[Tuple[Tuple[str, ...] | None, ...], list[str]]: ... -def split_opt(opt: str) -> Tuple[str, str]: ... -def normalize_opt(opt: str, ctx: Context) -> str: ... -def split_arg_string(string: str) -> list[str]: ... - -class Option: - dest: str - action: str - nargs: int - const: Any - obj: Any - prefixes: Set[str] - _short_opts: list[str] - _long_opts: list[str] - def __init__( - self, - opts: Iterable[str], - dest: str, - action: str | None = ..., - nargs: int = ..., - const: Any | None = ..., - obj: Any | None = ..., - ) -> None: ... - @property - def takes_value(self) -> bool: ... - def process(self, value: Any, state: ParsingState) -> None: ... - -class Argument: - dest: str - nargs: int - obj: Any - def __init__(self, dest: str, nargs: int = ..., obj: Any | None = ...) -> None: ... - def process(self, value: Any, state: ParsingState) -> None: ... - -class ParsingState: - opts: dict[str, Any] - largs: list[str] - rargs: list[str] - order: list[Any] - def __init__(self, rargs: list[str]) -> None: ... - -class OptionParser: - ctx: Context | None - allow_interspersed_args: bool - ignore_unknown_options: bool - _short_opt: dict[str, Option] - _long_opt: dict[str, Option] - _opt_prefixes: Set[str] - _args: list[Argument] - def __init__(self, ctx: Context | None = ...) -> None: ... - def add_option( - self, - opts: Iterable[str], - dest: str, - action: str | None = ..., - nargs: int = ..., - const: Any | None = ..., - obj: Any | None = ..., - ) -> None: ... - def add_argument(self, dest: str, nargs: int = ..., obj: Any | None = ...) -> None: ... - def parse_args(self, args: list[str]) -> Tuple[dict[str, Any], list[str], list[Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/termui.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/termui.pyi deleted file mode 100644 index 8345c47fe3ad..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/termui.pyi +++ /dev/null @@ -1,104 +0,0 @@ -from typing import IO, Any, Callable, Generator, Iterable, Text, Tuple, TypeVar, overload - -from click._termui_impl import ProgressBar as _ProgressBar -from click.core import _ConvertibleType - -def hidden_prompt_func(prompt: str) -> str: ... -def _build_prompt(text: str, suffix: str, show_default: bool = ..., default: str | None = ...) -> str: ... -def prompt( - text: str, - default: str | None = ..., - hide_input: bool = ..., - confirmation_prompt: bool = ..., - type: _ConvertibleType | None = ..., - value_proc: Callable[[str | None], Any] | None = ..., - prompt_suffix: str = ..., - show_default: bool = ..., - err: bool = ..., - show_choices: bool = ..., -) -> Any: ... -def confirm( - text: str, default: bool = ..., abort: bool = ..., prompt_suffix: str = ..., show_default: bool = ..., err: bool = ... -) -> bool: ... -def get_terminal_size() -> Tuple[int, int]: ... -def echo_via_pager( - text_or_generator: str | Iterable[str] | Callable[[], Generator[str, None, None]], color: bool | None = ... -) -> None: ... - -_T = TypeVar("_T") - -@overload -def progressbar( - iterable: Iterable[_T], - length: int | None = ..., - label: str | None = ..., - show_eta: bool = ..., - show_percent: bool | None = ..., - show_pos: bool = ..., - item_show_func: Callable[[_T], str] | None = ..., - fill_char: str = ..., - empty_char: str = ..., - bar_template: str = ..., - info_sep: str = ..., - width: int = ..., - file: IO[Any] | None = ..., - color: bool | None = ..., -) -> _ProgressBar[_T]: ... -@overload -def progressbar( - iterable: None = ..., - length: int | None = ..., - label: str | None = ..., - show_eta: bool = ..., - show_percent: bool | None = ..., - show_pos: bool = ..., - item_show_func: Callable[[Any], str] | None = ..., - fill_char: str = ..., - empty_char: str = ..., - bar_template: str = ..., - info_sep: str = ..., - width: int = ..., - file: IO[Any] | None = ..., - color: bool | None = ..., -) -> _ProgressBar[int]: ... -def clear() -> None: ... -def style( - text: Text, - fg: Text | None = ..., - bg: Text | None = ..., - bold: bool | None = ..., - dim: bool | None = ..., - underline: bool | None = ..., - blink: bool | None = ..., - reverse: bool | None = ..., - reset: bool = ..., -) -> str: ... -def unstyle(text: Text) -> str: ... - -# Styling options copied from style() for nicer type checking. -def secho( - message: str | None = ..., - file: IO[Any] | None = ..., - nl: bool = ..., - err: bool = ..., - color: bool | None = ..., - fg: str | None = ..., - bg: str | None = ..., - bold: bool | None = ..., - dim: bool | None = ..., - underline: bool | None = ..., - blink: bool | None = ..., - reverse: bool | None = ..., - reset: bool = ..., -) -> None: ... -def edit( - text: str | None = ..., - editor: str | None = ..., - env: str | None = ..., - require_save: bool = ..., - extension: str = ..., - filename: str | None = ..., -) -> str: ... -def launch(url: str, wait: bool = ..., locate: bool = ...) -> int: ... -def getchar(echo: bool = ...) -> Text: ... -def pause(info: str = ..., err: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/testing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/testing.pyi deleted file mode 100644 index d83881b59bdf..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/testing.pyi +++ /dev/null @@ -1,65 +0,0 @@ -import io -from typing import IO, Any, BinaryIO, ContextManager, Iterable, Mapping, Text, Tuple -from typing_extensions import Literal - -from .core import BaseCommand - -clickpkg: Any - -class EchoingStdin: - def __init__(self, input: BinaryIO, output: BinaryIO) -> None: ... - def __getattr__(self, x: str) -> Any: ... - def read(self, n: int = ...) -> bytes: ... - def readline(self, n: int = ...) -> bytes: ... - def readlines(self) -> list[bytes]: ... - def __iter__(self) -> Iterable[bytes]: ... - -def make_input_stream(input: bytes | Text | IO[Any] | None, charset: Text) -> BinaryIO: ... - -class Result: - runner: CliRunner - exit_code: int - exception: Any - exc_info: Any | None - stdout_bytes: bytes - stderr_bytes: bytes - def __init__( - self, - runner: CliRunner, - stdout_bytes: bytes, - stderr_bytes: bytes, - exit_code: int, - exception: Any, - exc_info: Any | None = ..., - ) -> None: ... - @property - def output(self) -> Text: ... - @property - def stdout(self) -> Text: ... - @property - def stderr(self) -> Text: ... - -class CliRunner: - charset: str - env: Mapping[str, str] - echo_stdin: bool - mix_stderr: bool - def __init__( - self, charset: Text | None = ..., env: Mapping[str, str] | None = ..., echo_stdin: bool = ..., mix_stderr: bool = ... - ) -> None: ... - def get_default_prog_name(self, cli: BaseCommand) -> str: ... - def make_env(self, overrides: Mapping[str, str] | None = ...) -> dict[str, str]: ... - def isolation( - self, input: bytes | Text | IO[Any] | None = ..., env: Mapping[str, str] | None = ..., color: bool = ... - ) -> ContextManager[Tuple[io.BytesIO, io.BytesIO | Literal[False]]]: ... - def invoke( - self, - cli: BaseCommand, - args: str | Iterable[str] | None = ..., - input: bytes | Text | IO[Any] | None = ..., - env: Mapping[str, str] | None = ..., - catch_exceptions: bool = ..., - color: bool = ..., - **extra: Any, - ) -> Result: ... - def isolated_filesystem(self) -> ContextManager[str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/types.pyi deleted file mode 100644 index f1e9b9ca078c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/types.pyi +++ /dev/null @@ -1,125 +0,0 @@ -import datetime -import uuid -from typing import IO, Any, Callable, Generic, Iterable, Optional, Sequence, Text, Tuple as _PyTuple, Type, TypeVar, Union - -from click.core import Context, Parameter, _ConvertibleType, _ParamType - -ParamType = _ParamType - -class BoolParamType(ParamType): - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> bool: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> bool: ... - -class CompositeParamType(ParamType): - arity: int - -class Choice(ParamType): - choices: Iterable[str] - case_sensitive: bool - def __init__(self, choices: Iterable[str], case_sensitive: bool = ...) -> None: ... - -class DateTime(ParamType): - formats: Sequence[str] - def __init__(self, formats: Sequence[str] | None = ...) -> None: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> datetime.datetime: ... - -class FloatParamType(ParamType): - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> float: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> float: ... - -class FloatRange(FloatParamType): - min: float | None - max: float | None - clamp: bool - def __init__(self, min: float | None = ..., max: float | None = ..., clamp: bool = ...) -> None: ... - -class File(ParamType): - mode: str - encoding: str | None - errors: str | None - lazy: bool | None - atomic: bool - def __init__( - self, - mode: Text = ..., - encoding: str | None = ..., - errors: str | None = ..., - lazy: bool | None = ..., - atomic: bool | None = ..., - ) -> None: ... - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> IO[Any]: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> IO[Any]: ... - def resolve_lazy_flag(self, value: str) -> bool: ... - -_F = TypeVar("_F") # result of the function -_Func = Callable[[Optional[str]], _F] - -class FuncParamType(ParamType, Generic[_F]): - func: _Func[_F] - def __init__(self, func: _Func[_F]) -> None: ... - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> _F: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> _F: ... - -class IntParamType(ParamType): - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> int: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> int: ... - -class IntRange(IntParamType): - min: int | None - max: int | None - clamp: bool - def __init__(self, min: int | None = ..., max: int | None = ..., clamp: bool = ...) -> None: ... - -_PathType = TypeVar("_PathType", str, bytes) -_PathTypeBound = Union[Type[str], Type[bytes]] - -class Path(ParamType): - exists: bool - file_okay: bool - dir_okay: bool - writable: bool - readable: bool - resolve_path: bool - allow_dash: bool - type: _PathTypeBound | None - def __init__( - self, - exists: bool = ..., - file_okay: bool = ..., - dir_okay: bool = ..., - writable: bool = ..., - readable: bool = ..., - resolve_path: bool = ..., - allow_dash: bool = ..., - path_type: Type[_PathType] | None = ..., - ) -> None: ... - def coerce_path_result(self, rv: str | bytes) -> _PathType: ... - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> _PathType: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> _PathType: ... - -class StringParamType(ParamType): - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> str: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> str: ... - -class Tuple(CompositeParamType): - types: list[ParamType] - def __init__(self, types: Iterable[Any]) -> None: ... - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> Tuple: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> Tuple: ... - -class UnprocessedParamType(ParamType): ... - -class UUIDParameterType(ParamType): - def __call__(self, value: str | None, param: Parameter | None = ..., ctx: Context | None = ...) -> uuid.UUID: ... - def convert(self, value: str, param: Parameter | None, ctx: Context | None) -> uuid.UUID: ... - -def convert_type(ty: _ConvertibleType | None, default: Any | None = ...) -> ParamType: ... - -# parameter type shortcuts - -BOOL: BoolParamType -FLOAT: FloatParamType -INT: IntParamType -STRING: StringParamType -UNPROCESSED: UnprocessedParamType -UUID: UUIDParameterType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/click/click/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/click/click/utils.pyi deleted file mode 100644 index f97dd8e81b8a..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/click/click/utils.pyi +++ /dev/null @@ -1,48 +0,0 @@ -from types import TracebackType -from typing import IO, Any, AnyStr, Generic, Iterator, Text, Type, TypeVar - -_T = TypeVar("_T") - -def _posixify(name: str) -> str: ... -def safecall(func: _T) -> _T: ... -def make_str(value: Any) -> str: ... -def make_default_short_help(help: str, max_length: int = ...) -> str: ... - -class LazyFile(object): - name: str - mode: str - encoding: str | None - errors: str - atomic: bool - def __init__( - self, filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., atomic: bool = ... - ) -> None: ... - def open(self) -> IO[Any]: ... - def close(self) -> None: ... - def close_intelligently(self) -> None: ... - def __enter__(self) -> LazyFile: ... - def __exit__( - self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None - ) -> None: ... - def __iter__(self) -> Iterator[Any]: ... - -class KeepOpenFile(Generic[AnyStr]): - _file: IO[AnyStr] - def __init__(self, file: IO[AnyStr]) -> None: ... - def __enter__(self) -> KeepOpenFile[AnyStr]: ... - def __exit__( - self, exctype: Type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None - ) -> None: ... - def __iter__(self) -> Iterator[AnyStr]: ... - -def echo( - message: object = ..., file: IO[Text] | None = ..., nl: bool = ..., err: bool = ..., color: bool | None = ... -) -> None: ... -def get_binary_stream(name: str) -> IO[bytes]: ... -def get_text_stream(name: str, encoding: str | None = ..., errors: str = ...) -> IO[str]: ... -def open_file( - filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., lazy: bool = ..., atomic: bool = ... -) -> Any: ... # really IO | LazyFile | KeepOpenFile -def get_os_args() -> list[str]: ... -def format_filename(filename: str, shorten: bool = ...) -> str: ... -def get_app_dir(app_name: str, roaming: bool = ..., force_posix: bool = ...) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorama/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/colorama/METADATA.toml index 337e55f69ea6..cea6c7b21c6f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/colorama/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorama/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.4" +version = "0.4.*" python2 = true \ No newline at end of file diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/ansitowin32.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/ansitowin32.pyi index 117fe8f6265c..9cf3008e86b2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/ansitowin32.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/ansitowin32.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import SupportsWrite -from typing import Any, Callable, Dict, Optional, Pattern, Sequence, TextIO, Tuple, Union +from typing import Any, Callable, Optional, Pattern, Sequence, TextIO, Union if sys.platform == "win32": from .winterm import WinTerm @@ -20,7 +20,7 @@ class StreamWrapper: def closed(self) -> bool: ... _WinTermCall = Callable[[Optional[int], bool, bool], None] -_WinTermCallDict = Dict[int, Union[Tuple[_WinTermCall], Tuple[_WinTermCall, int], Tuple[_WinTermCall, int, bool]]] +_WinTermCallDict = dict[int, Union[tuple[_WinTermCall], tuple[_WinTermCall, int], tuple[_WinTermCall, int, bool]]] class AnsiToWin32: ANSI_CSI_RE: Pattern[str] = ... @@ -40,6 +40,6 @@ class AnsiToWin32: def write_and_convert(self, text: str) -> None: ... def write_plain_text(self, text: str, start: int, end: int) -> None: ... def convert_ansi(self, paramstring: str, command: str) -> None: ... - def extract_params(self, command: str, paramstring: str) -> Tuple[int, ...]: ... + def extract_params(self, command: str, paramstring: str) -> tuple[int, ...]: ... def call_win32(self, command: str, params: Sequence[int]) -> None: ... def convert_osc(self, text: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/win32.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/win32.pyi index 37e6a0b99f9d..0a6f4ca23f2f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/win32.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/win32.pyi @@ -10,13 +10,13 @@ if sys.platform == "win32": windll: LibraryLoader[WinDLL] COORD = wintypes._COORD + class CONSOLE_SCREEN_BUFFER_INFO(Structure): dwSize: COORD dwCursorPosition: COORD wAttributes: wintypes.WORD srWindow: wintypes.SMALL_RECT dwMaximumWindowSize: COORD - def __str__(self) -> str: ... def winapi_test() -> bool: ... def GetConsoleScreenBufferInfo(stream_id: int = ...) -> CONSOLE_SCREEN_BUFFER_INFO: ... def SetConsoleTextAttribute(stream_id: int, attrs: wintypes.WORD) -> wintypes.BOOL: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/winterm.pyi b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/winterm.pyi index a827463c000c..af1dd3db132a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/winterm.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/colorama/colorama/winterm.pyi @@ -3,6 +3,7 @@ import sys if sys.platform == "win32": from . import win32 + class WinColor: BLACK: int = ... BLUE: int = ... @@ -12,10 +13,12 @@ if sys.platform == "win32": MAGENTA: int = ... YELLOW: int = ... GREY: int = ... + class WinStyle: NORMAL: int = ... BRIGHT: int = ... BRIGHT_BACKGROUND: int = ... + class WinTerm: def __init__(self) -> None: ... def get_attrs(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/commonmark/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/commonmark/METADATA.toml index 16fd217d5d8e..51e869b47983 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/commonmark/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/commonmark/METADATA.toml @@ -1 +1 @@ -version = "0.9" +version = "0.9.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/contextvars/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/contextvars/METADATA.toml index 5f1541084942..ea07e8d318f6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/contextvars/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/contextvars/METADATA.toml @@ -1 +1 @@ -version = "0.1" +version = "2.4" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/contextvars/contextvars.pyi b/packages/pyright-internal/typeshed-fallback/stubs/contextvars/contextvars.pyi index 429d2037afb2..49e6e35b49ac 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/contextvars/contextvars.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/contextvars/contextvars.pyi @@ -1,11 +1,14 @@ import sys from typing import Any, Callable, ClassVar, Generic, Iterator, Mapping, TypeVar +from typing_extensions import ParamSpec, final if sys.version_info >= (3, 9): from types import GenericAlias _T = TypeVar("_T") +_P = ParamSpec("_P") +@final class ContextVar(Generic[_T]): def __init__(self, name: str, *, default: _T = ...) -> None: ... @property @@ -16,6 +19,7 @@ class ContextVar(Generic[_T]): if sys.version_info >= (3, 9): def __class_getitem__(cls, item: Any) -> GenericAlias: ... +@final class Token(Generic[_T]): @property def var(self) -> ContextVar[_T]: ... @@ -29,10 +33,11 @@ def copy_context() -> Context: ... # It doesn't make sense to make this generic, because for most Contexts each ContextVar will have # a different value. +@final class Context(Mapping[ContextVar[Any], Any]): def __init__(self) -> None: ... - def run(self, callable: Callable[..., _T], *args: Any, **kwargs: Any) -> _T: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... def copy(self) -> Context: ... - def __getitem__(self, key: ContextVar[Any]) -> Any: ... + def __getitem__(self, __key: ContextVar[_T]) -> _T: ... def __iter__(self) -> Iterator[ContextVar[Any]]: ... def __len__(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/croniter/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/croniter/METADATA.toml index f339dc7fdf26..e29f8e7f8ef1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/croniter/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/croniter/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.0" +version = "1.0.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/croniter/croniter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/croniter/croniter.pyi index 22cd88b343ce..c55b83672fd7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/croniter/croniter.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/croniter/croniter.pyi @@ -1,8 +1,9 @@ import datetime -from typing import Any, Iterator, Text, Tuple, Type, TypeVar, Union +from _typeshed import Self +from typing import Any, Iterator, Text, Union +from typing_extensions import Literal -_RetType = Union[Type[float], Type[datetime.datetime]] -_SelfT = TypeVar("_SelfT", bound=croniter) +_RetType = Union[type[float], type[datetime.datetime]] class CroniterError(ValueError): ... class CroniterBadCronError(CroniterError): ... @@ -10,11 +11,25 @@ class CroniterBadDateError(CroniterError): ... class CroniterNotAlphaError(CroniterError): ... class croniter(Iterator[Any]): - MONTHS_IN_YEAR: int - RANGES: Tuple[Tuple[int, int], ...] - DAYS: Tuple[int, ...] - ALPHACONV: Tuple[dict[str, Any], ...] - LOWMAP: Tuple[dict[int, Any], ...] + MONTHS_IN_YEAR: Literal[12] + RANGES: tuple[tuple[int, int], ...] + DAYS: tuple[ + Literal[31], + Literal[28], + Literal[31], + Literal[30], + Literal[31], + Literal[30], + Literal[31], + Literal[31], + Literal[30], + Literal[31], + Literal[30], + Literal[31], + ] + ALPHACONV: tuple[dict[str, Any], ...] + LOWMAP: tuple[dict[int, Any], ...] + LEN_MEANS_ALL: tuple[int, ...] bad_length: str tzinfo: datetime.tzinfo | None cur: float @@ -34,10 +49,11 @@ class croniter(Iterator[Any]): ) -> None: ... # Most return value depend on ret_type, which can be passed in both as a method argument and as # a constructor argument. - def get_next(self, ret_type: _RetType | None = ...) -> Any: ... + def get_next(self, ret_type: _RetType | None = ..., start_time: float | datetime.datetime | None = ...) -> Any: ... def get_prev(self, ret_type: _RetType | None = ...) -> Any: ... def get_current(self, ret_type: _RetType | None = ...) -> Any: ... - def __iter__(self: _SelfT) -> _SelfT: ... + def set_current(self, start_time: float | datetime.datetime) -> float: ... + def __iter__(self: Self) -> Self: ... def __next__(self, ret_type: _RetType | None = ...) -> Any: ... def next(self, ret_type: _RetType | None = ...) -> Any: ... def all_next(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... @@ -45,6 +61,18 @@ class croniter(Iterator[Any]): def iter(self, ret_type: _RetType | None = ...) -> Iterator[Any]: ... def is_leap(self, year: int) -> bool: ... @classmethod - def expand(cls, expr_format: Text) -> Tuple[list[list[str]], dict[str, Any]]: ... + def expand(cls, expr_format: Text, hash_id: str | bytes | None = ...) -> tuple[list[list[str]], dict[str, Any]]: ... @classmethod - def is_valid(cls, expression: Text) -> bool: ... + def is_valid(cls, expression: Text, hash_id: str | bytes | None = ...) -> bool: ... + @classmethod + def match(cls, cron_expression: Text, testdate: float | datetime.datetime | None, day_or: bool = ...) -> bool: ... + +def croniter_range( + start: float | datetime.datetime, + stop: float | datetime.datetime, + expr_format: Text, + ret_type: _RetType | None = ..., + day_or: bool = ..., + exclude_ends: bool = ..., + _croniter: type[croniter] | None = ..., +) -> Iterator[Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/METADATA.toml index a1932bbe5537..67fb5034d2cd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/METADATA.toml @@ -1,4 +1,4 @@ -version = "3.3" +version = "3.3.*" python2 = true requires = ["types-enum34", "types-ipaddress"] obsolete_since = "3.4.4" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/ec.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/ec.pyi index 33fb4cb9a931..a04af595f8b1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/ec.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/ec.pyi @@ -1,3 +1,4 @@ +from _typeshed import Self from abc import ABCMeta, abstractmethod from typing import ClassVar @@ -174,7 +175,7 @@ class EllipticCurvePublicNumbers(object): def y(self) -> int: ... def __init__(self, x: int, y: int, curve: EllipticCurve) -> None: ... @classmethod - def from_encoded_point(cls, curve: EllipticCurve, data: bytes) -> EllipticCurvePublicNumbers: ... + def from_encoded_point(cls: type[Self], curve: EllipticCurve, data: bytes) -> Self: ... def public_key(self, backend: EllipticCurveBackend | None = ...) -> EllipticCurvePublicKey: ... class EllipticCurveSignatureAlgorithm(metaclass=ABCMeta): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/rsa.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/rsa.pyi index 3b5f0ca0caad..958fc71f808c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/rsa.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/rsa.pyi @@ -1,5 +1,4 @@ from abc import ABCMeta, abstractmethod -from typing import Tuple from cryptography.hazmat.backends.interfaces import RSABackend from cryptography.hazmat.primitives.asymmetric import AsymmetricVerificationContext @@ -52,7 +51,7 @@ def generate_private_key( def rsa_crt_iqmp(p: int, q: int) -> int: ... def rsa_crt_dmp1(private_exponent: int, p: int) -> int: ... def rsa_crt_dmq1(private_exponent: int, q: int) -> int: ... -def rsa_recover_prime_factors(n: int, e: int, d: int) -> Tuple[int, int]: ... +def rsa_recover_prime_factors(n: int, e: int, d: int) -> tuple[int, int]: ... class RSAPrivateNumbers(object): def __init__(self, p: int, q: int, d: int, dmp1: int, dmq1: int, iqmp: int, public_numbers: RSAPublicNumbers) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/utils.pyi index 5bd9f53a8a8d..199bfc8ea4bb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/asymmetric/utils.pyi @@ -1,8 +1,6 @@ -from typing import Tuple - from cryptography.hazmat.primitives.hashes import HashAlgorithm -def decode_dss_signature(signature: bytes) -> Tuple[int, int]: ... +def decode_dss_signature(signature: bytes) -> tuple[int, int]: ... def encode_dss_signature(r: int, s: int) -> bytes: ... class Prehashed(object): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/algorithms.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/algorithms.pyi index 78e44b2c329b..466c70b055b5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/algorithms.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/algorithms.pyi @@ -1,5 +1,3 @@ -from typing import FrozenSet - from cryptography.hazmat.primitives.ciphers import BlockCipherAlgorithm, CipherAlgorithm from cryptography.hazmat.primitives.ciphers.modes import ModeWithNonce @@ -7,7 +5,7 @@ class AES(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... @property def key_size(self) -> int: ... @@ -16,7 +14,7 @@ class ARC4(CipherAlgorithm): @property def key_size(self) -> int: ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class Blowfish(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @@ -24,7 +22,7 @@ class Blowfish(BlockCipherAlgorithm, CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class Camellia(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @@ -32,7 +30,7 @@ class Camellia(BlockCipherAlgorithm, CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class CAST5(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @@ -40,14 +38,14 @@ class CAST5(BlockCipherAlgorithm, CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class ChaCha20(CipherAlgorithm, ModeWithNonce): def __init__(self, key: bytes, nonce: bytes) -> None: ... @property def key_size(self) -> int: ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... @property def nonce(self) -> bytes: ... @@ -57,7 +55,7 @@ class IDEA(CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class SEED(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @@ -65,7 +63,7 @@ class SEED(BlockCipherAlgorithm, CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... class TripleDES(BlockCipherAlgorithm, CipherAlgorithm): def __init__(self, key: bytes) -> None: ... @@ -73,4 +71,4 @@ class TripleDES(BlockCipherAlgorithm, CipherAlgorithm): def key_size(self) -> int: ... block_size: int = ... name: str = ... - key_sizes: FrozenSet[int] = ... + key_sizes: frozenset[int] = ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/modes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/modes.pyi index e01d2009438c..b99d7a89fc92 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/modes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/ciphers/modes.pyi @@ -67,7 +67,7 @@ class ECB(Mode): def validate_for_algorithm(self, algorithm: CipherAlgorithm) -> None: ... class GCM(Mode, ModeWithInitializationVector, ModeWithAuthenticationTag): - def __init__(self, initialization_vector: bytes, tag: bytes | None, min_tag_length: int | None) -> None: ... + def __init__(self, initialization_vector: bytes, tag: bytes | None = ..., min_tag_length: int | None = ...) -> None: ... @property def initialization_vector(self) -> bytes: ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/serialization/pkcs12.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/serialization/pkcs12.pyi index 40b555bd29fa..72c91f7a69ca 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/serialization/pkcs12.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/hazmat/primitives/serialization/pkcs12.pyi @@ -1,4 +1,4 @@ -from typing import Any, Tuple +from typing import Any from cryptography.hazmat.primitives.asymmetric.dsa import DSAPrivateKeyWithSerialization from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKeyWithSerialization @@ -8,7 +8,7 @@ from cryptography.x509 import Certificate def load_key_and_certificates( data: bytes, password: bytes | None, backend: Any | None = ... -) -> Tuple[Any | None, Certificate | None, list[Certificate]]: ... +) -> tuple[Any | None, Certificate | None, list[Certificate]]: ... def serialize_key_and_certificates( name: bytes, key: RSAPrivateKeyWithSerialization | EllipticCurvePrivateKeyWithSerialization | DSAPrivateKeyWithSerialization, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/__init__.pyi index be3b7bdabd3f..e9db89a0faac 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/__init__.pyi @@ -1,8 +1,9 @@ import datetime +from _typeshed import Self from abc import ABCMeta, abstractmethod from enum import Enum from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network -from typing import Any, ClassVar, Generator, Generic, Iterable, Sequence, Text, Type, TypeVar +from typing import Any, ClassVar, Generator, Generic, Iterable, Sequence, Text, TypeVar from cryptography.hazmat.backends.interfaces import X509Backend from cryptography.hazmat.primitives.asymmetric.dsa import DSAPrivateKey, DSAPublicKey @@ -282,7 +283,7 @@ class UniformResourceIdentifier(GeneralName): class ExtensionType(metaclass=ABCMeta): oid: ObjectIdentifier -_T = TypeVar("_T", bound="ExtensionType") +_T = TypeVar("_T", bound=ExtensionType) class Extension(Generic[_T]): critical: bool @@ -293,7 +294,7 @@ class Extensions(object): def __init__(self, general_names: list[Extension[Any]]) -> None: ... def __iter__(self) -> Generator[Extension[Any], None, None]: ... def get_extension_for_oid(self, oid: ObjectIdentifier) -> Extension[Any]: ... - def get_extension_for_class(self, extclass: Type[_T]) -> Extension[_T]: ... + def get_extension_for_class(self, extclass: type[_T]) -> Extension[_T]: ... class DuplicateExtension(Exception): oid: ObjectIdentifier @@ -306,12 +307,12 @@ class ExtensionNotFound(Exception): class IssuerAlternativeName(ExtensionType): def __init__(self, general_names: list[GeneralName]) -> None: ... def __iter__(self) -> Generator[GeneralName, None, None]: ... - def get_values_for_type(self, type: Type[GeneralName]) -> list[Any]: ... + def get_values_for_type(self, type: type[GeneralName]) -> list[Any]: ... class SubjectAlternativeName(ExtensionType): def __init__(self, general_names: list[GeneralName]) -> None: ... def __iter__(self) -> Generator[GeneralName, None, None]: ... - def get_values_for_type(self, type: Type[GeneralName]) -> list[Any]: ... + def get_values_for_type(self, type: type[GeneralName]) -> list[Any]: ... class AuthorityKeyIdentifier(ExtensionType): @property @@ -325,10 +326,10 @@ class AuthorityKeyIdentifier(ExtensionType): ) -> None: ... @classmethod def from_issuer_public_key( - cls, public_key: RSAPublicKey | DSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey | Ed448PublicKey - ) -> AuthorityKeyIdentifier: ... + cls: type[Self], public_key: RSAPublicKey | DSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey | Ed448PublicKey + ) -> Self: ... @classmethod - def from_issuer_subject_key_identifier(cls, ski: SubjectKeyIdentifier) -> AuthorityKeyIdentifier: ... + def from_issuer_subject_key_identifier(cls: type[Self], ski: SubjectKeyIdentifier) -> Self: ... class SubjectKeyIdentifier(ExtensionType): @property @@ -336,8 +337,8 @@ class SubjectKeyIdentifier(ExtensionType): def __init__(self, digest: bytes) -> None: ... @classmethod def from_public_key( - cls, public_key: RSAPublicKey | DSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey | Ed448PublicKey - ) -> SubjectKeyIdentifier: ... + cls: type[Self], public_key: RSAPublicKey | DSAPublicKey | EllipticCurvePublicKey | Ed25519PublicKey | Ed448PublicKey + ) -> Self: ... class AccessDescription: @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/oid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/oid.pyi index 2ca14b95ff64..43dd1e2062f0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/oid.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/cryptography/cryptography/x509/oid.pyi @@ -99,6 +99,5 @@ class CertificatePoliciesOID: CPS_USER_NOTICE: ObjectIdentifier = ... ANY_POLICY: ObjectIdentifier = ... -_OID_NAMES: dict[ObjectIdentifier, str] = ... - -_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, HashAlgorithm | None] = ... +_OID_NAMES: dict[ObjectIdentifier, str] +_SIG_OIDS_TO_HASH: dict[ObjectIdentifier, HashAlgorithm | None] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/METADATA.toml index 5f1541084942..22cbe4eb9ca3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/METADATA.toml @@ -1 +1 @@ -version = "0.1" +version = "0.6" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/dataclasses.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/dataclasses.pyi index 871ccabf1674..8f76c6be10aa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/dataclasses.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dataclasses/dataclasses.pyi @@ -1,5 +1,5 @@ import sys -from typing import Any, Callable, Generic, Iterable, Mapping, Tuple, Type, TypeVar, overload +from typing import Any, Callable, Generic, Iterable, Mapping, TypeVar, overload if sys.version_info >= (3, 9): from types import GenericAlias @@ -13,23 +13,23 @@ MISSING: _MISSING_TYPE @overload def asdict(obj: Any) -> dict[str, Any]: ... @overload -def asdict(obj: Any, *, dict_factory: Callable[[list[Tuple[str, Any]]], _T]) -> _T: ... +def asdict(obj: Any, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... @overload -def astuple(obj: Any) -> Tuple[Any, ...]: ... +def astuple(obj: Any) -> tuple[Any, ...]: ... @overload def astuple(obj: Any, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... @overload -def dataclass(_cls: Type[_T]) -> Type[_T]: ... +def dataclass(_cls: type[_T]) -> type[_T]: ... @overload -def dataclass(_cls: None) -> Callable[[Type[_T]], Type[_T]]: ... +def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... @overload def dataclass( *, init: bool = ..., repr: bool = ..., eq: bool = ..., order: bool = ..., unsafe_hash: bool = ..., frozen: bool = ... -) -> Callable[[Type[_T]], Type[_T]]: ... +) -> Callable[[type[_T]], type[_T]]: ... class Field(Generic[_T]): name: str - type: Type[_T] + type: type[_T] default: _T default_factory: Callable[[], _T] repr: bool @@ -66,7 +66,7 @@ def field( def field( *, init: bool = ..., repr: bool = ..., hash: bool | None = ..., compare: bool = ..., metadata: Mapping[str, Any] | None = ... ) -> Any: ... -def fields(class_or_instance: Any) -> Tuple[Field[Any], ...]: ... +def fields(class_or_instance: Any) -> tuple[Field[Any], ...]: ... def is_dataclass(obj: Any) -> bool: ... class FrozenInstanceError(AttributeError): ... @@ -77,9 +77,9 @@ class InitVar(Generic[_T]): def make_dataclass( cls_name: str, - fields: Iterable[str | Tuple[str, type] | Tuple[str, type, Field[Any]]], + fields: Iterable[str | tuple[str, type] | tuple[str, type, Field[Any]]], *, - bases: Tuple[type, ...] = ..., + bases: tuple[type, ...] = ..., namespace: dict[str, Any] | None = ..., init: bool = ..., repr: bool = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/METADATA.toml index 346c93fc5729..c9f594bd7bbd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/METADATA.toml @@ -1 +1 @@ -version = "1.0" +version = "1.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/__init__.pyi index 2a137ef9ecff..c3bb15660728 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/__init__.pyi @@ -1,13 +1,7 @@ import datetime -import sys -from typing import Set, Tuple +from typing_extensions import Literal, TypedDict -from dateparser.date import DateDataParser - -if sys.version_info >= (3, 8): - from typing import Literal, TypedDict -else: - from typing_extensions import Literal, TypedDict +from .date import DateDataParser, _DetectLanguagesFunction __version__: str @@ -34,9 +28,10 @@ class _Settings(TypedDict, total=False): def parse( date_string: str, - date_formats: list[str] | Tuple[str] | Set[str] | None = ..., - languages: list[str] | Tuple[str] | Set[str] | None = ..., - locales: list[str] | Tuple[str] | Set[str] | None = ..., + date_formats: list[str] | tuple[str, ...] | set[str] | None = ..., + languages: list[str] | tuple[str, ...] | set[str] | None = ..., + locales: list[str] | tuple[str, ...] | set[str] | None = ..., region: str | None = ..., settings: _Settings | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., ) -> datetime.datetime | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/conf.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/conf.pyi index 30d86dcff9c8..7b816d9d6099 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/conf.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/conf.pyi @@ -1,7 +1,8 @@ +from _typeshed import Self from typing import Any class Settings: - def __new__(cls, *args, **kw) -> Settings: ... + def __new__(cls: type[Self], *args, **kw) -> Self: ... def __init__(self, settings: Any | None = ...) -> None: ... @classmethod def get_key(cls, settings: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/date.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/date.pyi index f39d15ffe794..fe2cea9aaa0c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/date.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/date.pyi @@ -1,21 +1,18 @@ -import sys +import collections from _typeshed import Self as Self +from collections.abc import Callable, Iterable, Iterator from datetime import datetime -from typing import Any, ClassVar, Iterable, Iterator, Type, overload +from typing import ClassVar, Pattern, overload +from typing_extensions import Literal from dateparser import _Settings from dateparser.conf import Settings from dateparser.languages.loader import LocaleDataLoader from dateparser.languages.locale import Locale -if sys.version_info >= (3, 8): - from re import Pattern - from typing import Literal -else: - from typing import Pattern - from typing_extensions import Literal - +_DetectLanguagesFunction = Callable[[str, float], list[str]] _Period = Literal["time", "day", "week", "month", "year"] + APOSTROPHE_LOOK_ALIKE_CHARS: list[str] RE_NBSP: Pattern[str] RE_SPACES: Pattern[str] @@ -38,12 +35,12 @@ def parse_with_formats(date_string: str, date_formats: Iterable[str], settings: class _DateLocaleParser: locale: Locale date_string: str - date_formats: list[str] | tuple[str] | set[str] | None + date_formats: list[str] | tuple[str, ...] | set[str] | None def __init__( self, locale: Locale, date_string: str, - date_formats: list[str] | tuple[str] | set[str] | None, + date_formats: list[str] | tuple[str, ...] | set[str] | None, settings: Settings | None = ..., ) -> None: ... @classmethod @@ -51,7 +48,7 @@ class _DateLocaleParser: cls, locale: Locale, date_string: str, - date_formats: list[str] | tuple[str] | set[str] | None = ..., + date_formats: list[str] | tuple[str, ...] | set[str] | None = ..., settings: Settings | None = ..., ) -> DateData: ... def _parse(self) -> DateData | None: ... @@ -88,22 +85,24 @@ class DateDataParser: locale_loader: ClassVar[LocaleDataLoader | None] try_previous_locales: bool use_given_order: bool - languages: Any - locales: Any - region: Any - previous_locales: Any + languages: list[str] | None + locales: list[str] | tuple[str, ...] | set[str] | None + region: str + detect_languages_function: _DetectLanguagesFunction | None + previous_locales: collections.OrderedDict[Locale, None] def __init__( self, - languages: list[str] | tuple[str] | set[str] | None = ..., - locales: list[str] | tuple[str] | set[str] | None = ..., + languages: list[str] | tuple[str, ...] | set[str] | None = ..., + locales: list[str] | tuple[str, ...] | set[str] | None = ..., region: str | None = ..., try_previous_locales: bool = ..., use_given_order: bool = ..., settings: _Settings | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., ) -> None: ... - def get_date_data(self, date_string: str, date_formats: list[str] | tuple[str] | set[str] | None = ...) -> DateData: ... - def get_date_tuple(self, date_string: str, date_formats: list[str] | tuple[str] | set[str] | None = ...): ... + def get_date_data(self, date_string: str, date_formats: list[str] | tuple[str, ...] | set[str] | None = ...) -> DateData: ... + def get_date_tuple(self, date_string: str, date_formats: list[str] | tuple[str, ...] | set[str] | None = ...): ... def _get_applicable_locales(self, date_string: str) -> Iterator[Locale]: ... def _is_applicable_locale(self, locale: Locale, date_string: str) -> bool: ... @classmethod - def _get_locale_loader(cls: Type[DateDataParser]) -> LocaleDataLoader: ... + def _get_locale_loader(cls: type[DateDataParser]) -> LocaleDataLoader: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/languages/loader.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/languages/loader.pyi index 431979be4f02..8eaa38c77b08 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/languages/loader.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/languages/loader.pyi @@ -1,5 +1,5 @@ from collections import OrderedDict -from typing import Any, Iterator, List +from typing import Any, Iterator from .locale import Locale @@ -8,16 +8,16 @@ LOCALE_SPLIT_PATTERN: Any class LocaleDataLoader: def get_locale_map( self, - languages: List[str] | None = ..., - locales: List[str] | None = ..., + languages: list[str] | None = ..., + locales: list[str] | None = ..., region: str | None = ..., use_given_order: bool = ..., allow_conflicting_locales: bool = ..., - ) -> OrderedDict[str, List[Any] | str | int]: ... + ) -> OrderedDict[str, list[Any] | str | int]: ... def get_locales( self, - languages: List[str] | None = ..., - locales: List[str] | None = ..., + languages: list[str] | None = ..., + locales: list[str] | None = ..., region: str | None = ..., use_given_order: bool = ..., allow_conflicting_locales: bool = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/__init__.pyi index e75ed92a6c5f..b9b1d3e689fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/__init__.pyi @@ -1,23 +1,23 @@ -import sys +from collections.abc import Mapping, Set as AbstractSet from datetime import datetime -from typing import Any, List, Mapping, Set, Tuple, overload +from typing import Any, overload +from typing_extensions import Literal -if sys.version_info >= (3, 8): - from typing import Literal -else: - from typing_extensions import Literal +from ..date import _DetectLanguagesFunction @overload def search_dates( text: str, - languages: List[str] | Tuple[str] | Set[str] | None, + languages: list[str] | tuple[str, ...] | AbstractSet[str] | None, settings: Mapping[Any, Any] | None, add_detected_language: Literal[True], -) -> List[Tuple[str, datetime, str]]: ... + detect_languages_function: _DetectLanguagesFunction | None = ..., +) -> list[tuple[str, datetime, str]]: ... @overload def search_dates( text: str, - languages: List[str] | Tuple[str] | Set[str] | None = ..., + languages: list[str] | tuple[str, ...] | AbstractSet[str] | None = ..., settings: Mapping[Any, Any] | None = ..., add_detected_language: Literal[False] = ..., -) -> List[Tuple[str, datetime]]: ... + detect_languages_function: _DetectLanguagesFunction | None = ..., +) -> list[tuple[str, datetime]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/search.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/search.pyi index 67e6e9582492..d0efac4fd224 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/search.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/search/search.pyi @@ -1,5 +1,7 @@ from typing import Any +from ..date import _DetectLanguagesFunction + RELATIVE_REG: Any def date_is_relative(translation): ... @@ -25,5 +27,13 @@ class DateSearchWithDetection: search: Any def __init__(self) -> None: ... language_detector: Any - def detect_language(self, text, languages): ... - def search_dates(self, text, languages: Any | None = ..., settings: Any | None = ...): ... + def detect_language( + self, text, languages, settings: Any | None = ..., detect_languages_function: _DetectLanguagesFunction | None = ... + ): ... + def search_dates( + self, + text, + languages: Any | None = ..., + settings: Any | None = ..., + detect_languages_function: _DetectLanguagesFunction | None = ..., + ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/utils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/utils/__init__.pyi index 8b979d0bf9b2..1dd81898563c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/utils/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/dateparser/dateparser/utils/__init__.pyi @@ -1,11 +1,11 @@ from collections import OrderedDict -from typing import Any, List, Mapping +from typing import Any, Mapping def strip_braces(date_string: str) -> str: ... def normalize_unicode(string: str, form: str = ...) -> str: ... def combine_dicts( primary_dict: Mapping[Any, Any], supplementary_dict: Mapping[Any, Any] -) -> OrderedDict[str, str | List[Any]]: ... +) -> OrderedDict[str, str | list[Any]]: ... def find_date_separator(format) -> Any: ... def localize_timezone(date_time, tz_string): ... def apply_tzdatabase_timezone(date_time, pytz_string): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/decorator/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/decorator/METADATA.toml index 31f638bf33df..2c151642b36f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/decorator/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/decorator/METADATA.toml @@ -1,2 +1 @@ -version = "0.1" -python2 = true +version = "5.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi index 0d50a5412a1d..c2a4bed79474 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/decorator/decorator.pyi @@ -1,9 +1,11 @@ import sys -from typing import Any, Callable, Iterator, NamedTuple, Pattern, Text, Tuple, TypeVar +from typing import Any, Callable, Iterator, NamedTuple, Pattern, Text, TypeVar +from typing_extensions import ParamSpec _C = TypeVar("_C", bound=Callable[..., Any]) _Func = TypeVar("_Func", bound=Callable[..., Any]) _T = TypeVar("_T") +_P = ParamSpec("_P") def get_init(cls: type) -> None: ... @@ -14,7 +16,7 @@ else: args: list[str] varargs: str | None varkw: str | None - defaults: Tuple[Any, ...] + defaults: tuple[Any, ...] kwonlyargs: list[str] kwonlydefaults: dict[str, Any] annotations: dict[str, Any] @@ -30,11 +32,11 @@ DEF: Pattern[str] _dict = dict # conflicts with attribute name -class FunctionMaker(object): +class FunctionMaker: args: list[Text] varargs: Text | None varkw: Text | None - defaults: Tuple[Any, ...] + defaults: tuple[Any, ...] kwonlyargs: list[Text] kwonlydefaults: Text | None shortsignature: Text | None @@ -49,7 +51,7 @@ class FunctionMaker(object): func: Callable[..., Any] | None = ..., name: Text | None = ..., signature: Text | None = ..., - defaults: Tuple[Any, ...] | None = ..., + defaults: tuple[Any, ...] | None = ..., doc: Text | None = ..., module: Text | None = ..., funcdict: _dict[Text, Any] | None = ..., @@ -64,7 +66,7 @@ class FunctionMaker(object): obj: Any, body: Text, evaldict: _dict[Text, Any], - defaults: Tuple[Any, ...] | None = ..., + defaults: tuple[Any, ...] | None = ..., doc: Text | None = ..., module: Text | None = ..., addsource: bool = ..., @@ -79,5 +81,5 @@ def decorator( class ContextManager(_GeneratorContextManager[_T]): def __call__(self, func: _C) -> _C: ... -def contextmanager(func: Callable[..., Iterator[_T]]) -> Callable[..., ContextManager[_T]]: ... +def contextmanager(func: Callable[_P, Iterator[_T]]) -> Callable[_P, ContextManager[_T]]: ... def dispatch_on(*dispatch_args: Any) -> Callable[[Callable[..., Any]], Callable[..., Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docopt/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/docopt/METADATA.toml index c7567c225756..ce19e8f4b3b6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docopt/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/docopt/METADATA.toml @@ -1,3 +1,3 @@ # Prior to v0.6, docopt() had only 3 optional args -version = "0.6" +version = "0.6.*" python2 = true \ No newline at end of file diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml index b3849f7d0447..17d320a94b90 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/METADATA.toml @@ -1 +1 @@ -version = "0.17" +version = "0.17.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/__init__.pyi index d4477b9624b0..cfa9914a6a4e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/__init__.pyi @@ -1,4 +1,5 @@ -from typing import Any, ClassVar, NamedTuple, Tuple +from _typeshed import Self +from typing import Any, ClassVar, NamedTuple __docformat__: str __version__: str @@ -13,8 +14,14 @@ class _VersionInfo(NamedTuple): class VersionInfo(_VersionInfo): def __new__( - cls, major: int = ..., minor: int = ..., micro: int = ..., releaselevel: str = ..., serial: int = ..., release: bool = ... - ) -> VersionInfo: ... + cls: type[Self], + major: int = ..., + minor: int = ..., + micro: int = ..., + releaselevel: str = ..., + serial: int = ..., + release: bool = ..., + ) -> Self: ... __version_info__: VersionInfo __version_details__: str @@ -23,19 +30,19 @@ class ApplicationError(Exception): ... class DataError(ApplicationError): ... class SettingsSpec: - settings_spec: ClassVar[Tuple[Any, ...]] + settings_spec: ClassVar[tuple[Any, ...]] settings_defaults: ClassVar[dict[Any, Any] | None] settings_default_overrides: ClassVar[dict[Any, Any] | None] - relative_path_settings: ClassVar[Tuple[Any, ...]] + relative_path_settings: ClassVar[tuple[Any, ...]] config_section: ClassVar[str | None] - config_section_dependencies: ClassVar[Tuple[str, ...] | None] + config_section_dependencies: ClassVar[tuple[str, ...] | None] class TransformSpec: def get_transforms(self) -> list[Any]: ... - default_transforms: ClassVar[Tuple[Any, ...]] + default_transforms: ClassVar[tuple[Any, ...]] unknown_reference_resolvers: ClassVar[list[Any]] class Component(SettingsSpec, TransformSpec): component_type: ClassVar[str | None] - supported: ClassVar[Tuple[str, ...]] + supported: ClassVar[tuple[str, ...]] def supports(self, format: str) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/frontend.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/frontend.pyi index c32dc44ffb5e..e71907471704 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/frontend.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/frontend.pyi @@ -1,7 +1,7 @@ import optparse from collections.abc import Iterable, Mapping from configparser import RawConfigParser -from typing import Any, ClassVar, Tuple, Type +from typing import Any, ClassVar from docutils import SettingsSpec from docutils.parsers import Parser @@ -45,7 +45,7 @@ def validate_smartquotes_locales( ) -> list[tuple[str, str]]: ... def make_paths_absolute(pathdict, keys, base_path: Any | None = ...) -> None: ... def make_one_path_absolute(base_path, path) -> str: ... -def filter_settings_spec(settings_spec, *exclude, **replace) -> Tuple[Any, ...]: ... +def filter_settings_spec(settings_spec, *exclude, **replace) -> tuple[Any, ...]: ... class Values(optparse.Values): def update(self, other_dict, option_parser) -> None: ... @@ -64,7 +64,7 @@ class OptionParser(optparse.OptionParser, SettingsSpec): version_template: ClassVar[str] def __init__( self, - components: Iterable[Type[Parser]] = ..., + components: Iterable[type[Parser]] = ..., defaults: Mapping[str, Any] | None = ..., read_config_files: bool | None = ..., *args, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/languages/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/languages/__init__.pyi index 964e6fa3f426..d8747739d3ae 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/languages/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/languages/__init__.pyi @@ -1,3 +1,14 @@ -from typing import Any +from typing import Any, Protocol -def __getattr__(name: str) -> Any: ... # incomplete +from docutils.utils import Reporter + +class _LanguageModule(Protocol): + labels: dict[str, str] + author_separators: list[str] + bibliographic_fields: list[str] + +class LanguageImporter: + def __call__(self, language_code: str, reporter: Reporter | None = ...) -> _LanguageModule: ... + def __getattr__(self, __name: str) -> Any: ... # incomplete + +get_language: LanguageImporter diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/nodes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/nodes.pyi index 964e6fa3f426..7df55c0bd92c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/nodes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/nodes.pyi @@ -1,3 +1,44 @@ -from typing import Any +from _typeshed import Self +from collections.abc import Iterable +from typing import Any, overload + +from docutils.transforms import Transformer + +class Node: + parent: Node | None + source: str | None + line: int | None + document: document | None + def __getattr__(self, __name: str) -> Any: ... # incomplete + +class Element(Node): + children: list[Node] + def __init__(self, rawsource: str = ..., *children: Node, **attributes): ... + def __len__(self) -> int: ... + def __contains__(self, key: str | Node) -> bool: ... + @overload + def __getitem__(self, key: str) -> Any: ... + @overload + def __getitem__(self, key: int) -> Node: ... + @overload + def __getitem__(self, key: slice) -> list[Node]: ... + @overload + def __setitem__(self, key: str, item: Any) -> None: ... + @overload + def __setitem__(self, key: int, item: Node) -> None: ... + @overload + def __setitem__(self, key: slice, item: Iterable[Node]) -> None: ... + def __delitem__(self, key: str | int | slice) -> None: ... + def __add__(self, other: list[Node]) -> list[Node]: ... + def __radd__(self, other: list[Node]) -> list[Node]: ... + def __iadd__(self: Self, other: Node | Iterable[Node]) -> Self: ... + def __getattr__(self, __name: str) -> Any: ... # incomplete + +class Structural: ... +class Root: ... + +class document(Root, Structural, Element): + transformer: Transformer + def __getattr__(self, __name: str) -> Any: ... # incomplete def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/__init__.pyi index a77c183734e0..04683301d1e2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/__init__.pyi @@ -1,16 +1,17 @@ -from typing import Any, ClassVar, Type +from typing import Any, ClassVar from docutils import Component +from docutils.nodes import document as _document class Parser(Component): component_type: ClassVar[str] config_section: ClassVar[str] inputstring: Any # defined after call to setup_parse() document: Any # defined after call to setup_parse() - def parse(self, inputstring: str, document) -> None: ... - def setup_parse(self, inputstring: str, document) -> None: ... + def parse(self, inputstring: str, document: _document) -> None: ... + def setup_parse(self, inputstring: str, document: _document) -> None: ... def finish_parse(self) -> None: ... _parser_aliases: dict[str, str] -def get_parser_class(parser_name: str) -> Type[Parser]: ... +def get_parser_class(parser_name: str) -> type[Parser]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/null.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/null.pyi index 1d3629109243..edc977325a1d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/null.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/null.pyi @@ -1,6 +1,6 @@ -from typing import ClassVar, Tuple +from typing import ClassVar from docutils import parsers class Parser(parsers.Parser): - config_section_dependencies: ClassVar[Tuple[str, ...]] + config_section_dependencies: ClassVar[tuple[str, ...]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/__init__.pyi index f605176c4173..b9cc02940ce0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/__init__.pyi @@ -1,11 +1,11 @@ -from typing import Any, ClassVar, Tuple +from typing import Any, ClassVar from typing_extensions import Literal from docutils import parsers from docutils.parsers.rst import states class Parser(parsers.Parser): - config_section_dependencies: ClassVar[Tuple[str, ...]] + config_section_dependencies: ClassVar[tuple[str, ...]] initial_state: Literal["Body", "RFC2822Body"] state_classes: Any inliner: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi new file mode 100644 index 000000000000..790329106fac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/__init__.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from docutils.languages import _LanguageModule +from docutils.nodes import document +from docutils.parsers.rst import Directive +from docutils.utils import SystemMessage + +def register_directive(name: str, directive: type[Directive]) -> None: ... +def directive( + directive_name: str, language_module: _LanguageModule, document: document +) -> tuple[type[Directive] | None, list[SystemMessage]]: ... +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/nodes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/admonitions.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/nodes.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/admonitions.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/body.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/html.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/html.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/images.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/images.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/images.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/misc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/misc.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/misc.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/parts.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/parts.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/parts.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/references.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/references.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/references.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/tables.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/tables.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/directives/tables.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/roles.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/roles.pyi index 2c3d65b68c94..b433542e16d9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/roles.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/parsers/rst/roles.pyi @@ -1,12 +1,17 @@ -from typing import Any, Callable, Dict, List, Tuple +from typing import Any, Callable import docutils.nodes import docutils.parsers.rst.states +from docutils.languages import _LanguageModule +from docutils.utils import Reporter, SystemMessage _RoleFn = Callable[ - [str, str, str, int, docutils.parsers.rst.states.Inliner, Dict[str, Any], List[str]], - Tuple[List[docutils.nodes.reference], List[docutils.nodes.reference]], + [str, str, str, int, docutils.parsers.rst.states.Inliner, dict[str, Any], list[str]], + tuple[list[docutils.nodes.reference], list[docutils.nodes.reference]], ] def register_local_role(name: str, role_fn: _RoleFn) -> None: ... +def role( + role_name: str, language_module: _LanguageModule, lineno: int, reporter: Reporter +) -> tuple[_RoleFn | None, list[SystemMessage]]: ... def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/transforms/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/transforms/__init__.pyi index 964e6fa3f426..92b6fcb2ddc2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/transforms/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/transforms/__init__.pyi @@ -1,3 +1,14 @@ from typing import Any +from docutils.nodes import Node, document + +class Transform: + def __init__(self, document: document, startnode: Node | None = ...): ... + def __getattr__(self, __name: str) -> Any: ... # incomplete + +class Transformer: + def __init__(self, document: document): ... + def add_transform(self, transform_class: type[Transform], priority: int | None = ..., **kwargs) -> None: ... + def __getattr__(self, __name: str) -> Any: ... # incomplete + def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/utils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/utils/__init__.pyi index 189352a6770c..fc0c2f3460a9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/utils/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/utils/__init__.pyi @@ -1,7 +1,11 @@ +import optparse from collections.abc import Iterable from typing import Any +from typing_extensions import Literal +from docutils import ApplicationError from docutils.io import FileOutput +from docutils.nodes import document _list = list @@ -13,4 +17,24 @@ class DependencyList: def add(self, *filenames: str) -> None: ... def close(self) -> None: ... +_SystemMessageLevel = Literal[0, 1, 2, 3, 4] + +class Reporter: + DEBUG_LEVEL: Literal[0] + INFO_LEVEL: Literal[1] + WARNING_LEVEL: Literal[2] + ERROR_LEVEL: Literal[3] + SEVERE_LEVEL: Literal[4] + + source: str + report_level: _SystemMessageLevel + halt_level: _SystemMessageLevel + def __getattr__(self, __name: str) -> Any: ... # incomplete + +class SystemMessage(ApplicationError): + level: _SystemMessageLevel + def __init__(self, system_message: object, level: _SystemMessageLevel): ... + +def new_reporter(source_path: str, settings: optparse.Values) -> Reporter: ... +def new_document(source_path: str, settings: optparse.Values | None = ...) -> document: ... def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/docutils_xml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/docutils_xml.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/docutils_xml.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html4css1.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html4css1.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html4css1.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html5_polyglot.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html5_polyglot.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/html5_polyglot.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/latex2e.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/latex2e.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/latex2e.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/manpage.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/manpage.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/manpage.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/null.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/null.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/null.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/odf_odt.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/odf_odt.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/odf_odt.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pep_html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pep_html.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pep_html.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pseudoxml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pseudoxml.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/pseudoxml.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/s5_html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/s5_html.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/s5_html.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/xetex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/xetex.pyi new file mode 100644 index 000000000000..964e6fa3f426 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/docutils/docutils/writers/xetex.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/editdistance/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/editdistance/METADATA.toml index c0ca2875e6a9..1883b870198a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/editdistance/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/editdistance/METADATA.toml @@ -1 +1 @@ -version = "0.5" +version = "0.5.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/emoji/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/emoji/METADATA.toml index 55112056081f..56eef46c616f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/emoji/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/emoji/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.2" +version = "1.2.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/emoji/emoji/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/emoji/emoji/core.pyi index 96b04d026223..de134feab0bc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/emoji/emoji/core.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/emoji/emoji/core.pyi @@ -1,4 +1,4 @@ -from typing import Pattern, Text, Tuple +from typing import Pattern, Text from typing_extensions import Literal _DEFAULT_DELIMITER: str @@ -6,11 +6,11 @@ _DEFAULT_DELIMITER: str def emojize( string: str, use_aliases: bool = ..., - delimiters: Tuple[str, str] = ..., + delimiters: tuple[str, str] = ..., variant: Literal["text_type", "emoji_type", None] = ..., language: str = ..., ) -> str: ... -def demojize(string: str, use_aliases: bool = ..., delimiters: Tuple[str, str] = ..., language: str = ...) -> str: ... +def demojize(string: str, use_aliases: bool = ..., delimiters: tuple[str, str] = ..., language: str = ...) -> str: ... def get_emoji_regexp(language: str = ...) -> Pattern[Text]: ... def emoji_lis(string: str, language: str = ...) -> list[dict[str, int | str]]: ... def distinct_emoji_lis(string: str) -> list[str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/METADATA.toml index 378b109c3adc..7730a01469a9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.3" +version = "0.3.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/entrypoints.pyi b/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/entrypoints.pyi index 444d6cbc9587..11a716f30159 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/entrypoints.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/entrypoints/entrypoints.pyi @@ -1,6 +1,6 @@ import sys from _typeshed import Self -from typing import Any, Dict, Iterator, List, Sequence, Text, Tuple, Type +from typing import Any, Iterator, Sequence, Text if sys.version_info >= (3, 0): from configparser import ConfigParser @@ -42,7 +42,7 @@ class EntryPoint: ) -> None: ... def load(self) -> Any: ... @classmethod - def from_string(cls: Type[Self], epstr: Text, name: Text, distro: Distribution | None = ...) -> Self: ... + def from_string(cls: type[Self], epstr: Text, name: Text, distro: Distribution | None = ...) -> Self: ... class Distribution: name: Text @@ -51,7 +51,7 @@ class Distribution: def iter_files_distros( path: Sequence[Text] | None = ..., repeated_distro: Text = ... -) -> Iterator[Tuple[ConfigParser, Distribution | None]]: ... +) -> Iterator[tuple[ConfigParser, Distribution | None]]: ... def get_single(group: Text, name: Text, path: Sequence[Text] | None = ...) -> EntryPoint: ... -def get_group_named(group: Text, path: Sequence[Text] | None = ...) -> Dict[str, EntryPoint]: ... -def get_group_all(group: Text, path: Sequence[Text] | None = ...) -> List[EntryPoint]: ... +def get_group_named(group: Text, path: Sequence[Text] | None = ...) -> dict[str, EntryPoint]: ... +def get_group_all(group: Text, path: Sequence[Text] | None = ...) -> list[EntryPoint]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/filelock/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/filelock/METADATA.toml index 5f1541084942..38c94680a9a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/filelock/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/filelock/METADATA.toml @@ -1 +1 @@ -version = "0.1" +version = "3.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/filelock/filelock/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/filelock/filelock/__init__.pyi index 6b65fa78e039..1d66887f130a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/filelock/filelock/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/filelock/filelock/__init__.pyi @@ -1,19 +1,15 @@ import sys -from logging import Logger +from _typeshed import Self from types import TracebackType -from typing import Type - -def logger() -> Logger: ... class Timeout(TimeoutError): def __init__(self, lock_file: str) -> None: ... - def __str__(self) -> str: ... class _Acquire_ReturnProxy: def __init__(self, lock: str) -> None: ... def __enter__(self) -> str: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, traceback: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, traceback: TracebackType | None ) -> None: ... class BaseFileLock: @@ -28,9 +24,9 @@ class BaseFileLock: def is_locked(self) -> bool: ... def acquire(self, timeout: float | None = ..., poll_intervall: float = ...) -> _Acquire_ReturnProxy: ... def release(self, force: bool = ...) -> None: ... - def __enter__(self) -> BaseFileLock: ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, traceback: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, traceback: TracebackType | None ) -> None: ... def __del__(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/first/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/first/METADATA.toml index 0f01cc4b1d64..72294377102c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/first/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/first/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.0" +version = "2.0.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/METADATA.toml new file mode 100644 index 000000000000..6cf9fae44d92 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/METADATA.toml @@ -0,0 +1 @@ +version = "1.6.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/flake8_2020.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/flake8_2020.pyi new file mode 100644 index 000000000000..b46c3d5ed592 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-2020/flake8_2020.pyi @@ -0,0 +1,14 @@ +# flake8-2020 has type annotations, but PEP 561 states: +# This PEP does not support distributing typing information as part of module-only distributions or single-file modules within namespace packages. +# Therefore typeshed is the best place. + +import ast +from typing import Any, ClassVar, Generator + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Any: ... # incomplete (other attributes are normally not accessed) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml new file mode 100644 index 000000000000..4965db3af595 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/METADATA.toml @@ -0,0 +1 @@ +version = "21.11.29" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi new file mode 100644 index 000000000000..b435e5bcecda --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-bugbear/bugbear.pyi @@ -0,0 +1,27 @@ +import argparse +import ast +from typing import Any, Sequence + +class BugBearChecker: + name: str + version: str + tree: ast.AST | None + filename: str + lines: Sequence[str] | None + max_line_length: int + visitor: ast.NodeVisitor + options: argparse.Namespace | None + def run(self) -> None: ... + @staticmethod + def add_options(optmanager: Any) -> None: ... + def __init__( + self, + tree: ast.AST | None = ..., + filename: str = ..., + lines: Sequence[str] | None = ..., + max_line_length: int = ..., + options: argparse.Namespace | None = ..., + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... # incomplete (other attributes are normally not accessed) + +def __getattr__(name: str) -> Any: ... # incomplete (other attributes are normally not accessed) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml new file mode 100644 index 000000000000..97ceca8aa9f5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/METADATA.toml @@ -0,0 +1 @@ +version = "1.5.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/flake8_builtins.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/flake8_builtins.pyi new file mode 100644 index 000000000000..904be75d389a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-builtins/flake8_builtins.pyi @@ -0,0 +1,10 @@ +import ast +from typing import Any, ClassVar, Generator + +class BuiltinsChecker: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST, filename: str) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Any: ... # incomplete (other attributes are normally not accessed) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/METADATA.toml new file mode 100644 index 000000000000..6cf9fae44d92 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/METADATA.toml @@ -0,0 +1 @@ +version = "1.6.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/flake8_docstrings.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/flake8_docstrings.pyi new file mode 100644 index 000000000000..80c252903982 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-docstrings/flake8_docstrings.pyi @@ -0,0 +1,19 @@ +import argparse +import ast +from typing import Any, ClassVar, Generator, Iterable + +class pep257Checker: + name: ClassVar[str] + version: ClassVar[str] + tree: ast.AST + filename: str + checker: Any + source: str + def __init__(self, tree: ast.AST, filename: str, lines: Iterable[str]) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, options: argparse.Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Any: ... # incomplete diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/METADATA.toml new file mode 100644 index 000000000000..3ea18392d7df --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/METADATA.toml @@ -0,0 +1 @@ +version = "1.3.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi new file mode 100644 index 000000000000..0c3f4bced581 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/__init__.pyi @@ -0,0 +1,9 @@ +from .plugin import Error as Error, Plugin as Plugin, Visitor as Visitor +from .utils import ( + assert_error as assert_error, + assert_not_error as assert_not_error, + check_equivalent_nodes as check_equivalent_nodes, + is_false as is_false, + is_none as is_none, + is_true as is_true, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi new file mode 100644 index 000000000000..25f910fc30b9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/plugin.pyi @@ -0,0 +1,36 @@ +import argparse +import ast +from typing import Any, Generic, Iterable, Iterator, TypeVar + +FLAKE8_ERROR = tuple[int, int, str, type[Any]] +TConfig = TypeVar("TConfig") # noqa: Y001 + +class Error: + code: str + message: str + lineno: int + col_offset: int + def __init__(self, lineno: int, col_offset: int, **kwargs: Any) -> None: ... + @classmethod + def formatted_message(cls, **kwargs: Any) -> str: ... + +class Visitor(ast.NodeVisitor, Generic[TConfig]): + errors: list[Error] + def __init__(self, config: TConfig | None = ...) -> None: ... + @property + def config(self) -> TConfig: ... + def error_from_node(self, error: type[Error], node: ast.AST, **kwargs: Any) -> None: ... + +class Plugin(Generic[TConfig]): + name: str + version: str + visitors: list[type[Visitor[TConfig]]] + config: TConfig + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Iterable[FLAKE8_ERROR]: ... + @classmethod + def parse_options(cls, option_manager: Any, options: argparse.Namespace, args: list[str]) -> None: ... + @classmethod + def parse_options_to_config(cls, option_manager: Any, options: argparse.Namespace, args: list[str]) -> TConfig | None: ... + @classmethod + def test_config(cls, config: TConfig) -> Iterator[None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi new file mode 100644 index 000000000000..095dbe4c7283 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/__init__.pyi @@ -0,0 +1,3 @@ +from .assertions import assert_error as assert_error, assert_not_error as assert_not_error +from .constants import is_false as is_false, is_none as is_none, is_true as is_true +from .equiv_nodes import check_equivalent_nodes as check_equivalent_nodes diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi new file mode 100644 index 000000000000..d030a527f773 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/assertions.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from ..plugin import Error as Error, TConfig as TConfig, Visitor as Visitor + +def assert_error( + visitor_cls: type[Visitor[TConfig]], src: str, expected: type[Error], config: TConfig | None = ..., **kwargs: Any +) -> None: ... +def assert_not_error(visitor_cls: type[Visitor[TConfig]], src: str, config: TConfig | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi new file mode 100644 index 000000000000..8503693c6f3e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/constants.pyi @@ -0,0 +1,5 @@ +import ast + +def is_none(node: ast.AST) -> bool: ... +def is_false(node: ast.AST) -> bool: ... +def is_true(node: ast.AST) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi new file mode 100644 index 000000000000..641945b106b9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-plugin-utils/flake8_plugin_utils/utils/equiv_nodes.pyi @@ -0,0 +1,3 @@ +import ast + +def check_equivalent_nodes(node1: ast.AST, node2: ast.AST) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/METADATA.toml new file mode 100644 index 000000000000..a42da251bed5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/METADATA.toml @@ -0,0 +1 @@ +version = "0.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi new file mode 100644 index 000000000000..399f7cb53a0c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-rst-docstrings/flake8_rst_docstrings.pyi @@ -0,0 +1,29 @@ +import ast +from argparse import Namespace +from collections.abc import Container +from typing import Any, Generator + +rst_prefix: str +rst_fail_load: int +rst_fail_lint: int +code_mapping_info: dict[str, int] +code_mapping_warning: dict[str, int] +code_mapping_error: dict[str, int] +code_mapping_severe: dict[str, int] +code_mappings_by_level: dict[int, dict[str, int]] + +def code_mapping( + level: int, msg: str, extra_directives: Container[str], extra_roles: Container[str], default: int = ... +) -> int: ... + +class reStructuredTextChecker: + name: str + version: str + tree: ast.AST + filename: str + def __init__(self, tree: ast.AST, filename: str = ...) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, options: Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[reStructuredTextChecker]], None, None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml new file mode 100644 index 000000000000..9550463d6ca4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/METADATA.toml @@ -0,0 +1 @@ +version = "0.18.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi new file mode 100644 index 000000000000..7296c8a25c7f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-simplify/flake8_simplify/__init__.pyi @@ -0,0 +1,8 @@ +import ast +from typing import Any, ClassVar, Generator + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/METADATA.toml new file mode 100644 index 000000000000..88c1356c9e65 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/METADATA.toml @@ -0,0 +1 @@ +version = "1.11.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/flake8_typing_imports.pyi b/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/flake8_typing_imports.pyi new file mode 100644 index 000000000000..0b3fe50a1fec --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/flake8-typing-imports/flake8_typing_imports.pyi @@ -0,0 +1,15 @@ +import argparse +import ast +from typing import Any, ClassVar, Generator + +class Plugin: + name: ClassVar[str] + version: ClassVar[str] + @staticmethod + def add_options(option_manager: Any) -> None: ... + @classmethod + def parse_options(cls, options: argparse.Namespace) -> None: ... + def __init__(self, tree: ast.AST) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + +def __getattr__(name: str) -> Any: ... # incomplete (other attributes are normally not accessed) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/METADATA.toml new file mode 100644 index 000000000000..4b1457dea513 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.4.*" +requires = ["types-Pillow"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/__init__.pyi new file mode 100644 index 000000000000..2ee9dd6704d3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/__init__.pyi @@ -0,0 +1,10 @@ +from pathlib import Path + +from .fpdf import FPDF as FPDF, TitleStyle as TitleStyle +from .html import HTML2FPDF as HTML2FPDF, HTMLMixin as HTMLMixin +from .template import Template as Template + +__license__: str +__version__: str +FPDF_VERSION: str +FPDF_FONT_DIR: Path diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/actions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/actions.pyi new file mode 100644 index 000000000000..69feb0a2bf21 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/actions.pyi @@ -0,0 +1,26 @@ +from abc import ABC +from typing import Any + +class Action(ABC): + def dict_as_string(self) -> None: ... + +class NamedAction(Action): + action_name: Any + def __init__(self, action_name) -> None: ... + def dict_as_string(self): ... + +class GoToAction(Action): + dest: Any + def __init__(self, dest) -> None: ... + def dict_as_string(self): ... + +class GoToRemoteAction(Action): + file: Any + dest: Any + def __init__(self, file, dest) -> None: ... + def dict_as_string(self): ... + +class LaunchAction(Action): + file: Any + def __init__(self, file) -> None: ... + def dict_as_string(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/deprecation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/deprecation.pyi new file mode 100644 index 000000000000..044aa1cf795d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/deprecation.pyi @@ -0,0 +1,5 @@ +from types import ModuleType + +class WarnOnDeprecatedModuleAttributes(ModuleType): + def __getattr__(self, name): ... + def __setattr__(self, name, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/errors.pyi new file mode 100644 index 000000000000..637a55e25159 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/errors.pyi @@ -0,0 +1,9 @@ +from typing import Any + +class FPDFException(Exception): ... + +class FPDFPageFormatException(FPDFException): + argument: Any + unknown: Any + one: Any + def __init__(self, argument, unknown: bool = ..., one: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fonts.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fonts.pyi new file mode 100644 index 000000000000..5a894fff6278 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fonts.pyi @@ -0,0 +1,4 @@ +from typing import Any + +courier: Any +fpdf_charwidths: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fpdf.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fpdf.pyi new file mode 100644 index 000000000000..f17a00b03f05 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/fpdf.pyi @@ -0,0 +1,286 @@ +import datetime +from _typeshed import StrPath +from collections import defaultdict +from collections.abc import Callable +from contextlib import _GeneratorContextManager +from enum import IntEnum +from io import BytesIO +from pathlib import Path +from typing import Any, NamedTuple, overload +from typing_extensions import Literal + +from PIL import Image + +from .actions import Action +from .recorder import FPDFRecorder +from .syntax import DestinationXYZ +from .util import _Unit + +_Orientation = Literal["", "portrait", "p", "P", "landscape", "l", "L"] +_Format = Literal["", "a3", "A3", "a4", "A4", "a5", "A5", "letter", "Letter", "legal", "Legal"] +_FontStyle = Literal["", "B", "I"] +_FontStyles = Literal["", "B", "I", "U", "BU", "UB", "BI", "IB", "IU", "UI", "BIU", "BUI", "IBU", "IUB", "UBI", "UIB"] +PAGE_FORMATS: dict[_Format, tuple[float, float]] + +class DocumentState(IntEnum): + UNINITIALIZED: int + READY: int + GENERATING_PAGE: int + CLOSED: int + +class Annotation(NamedTuple): + type: str + x: int + y: int + width: int + height: int + contents: str | None = ... + link: str | int | None = ... + alt_text: str | None = ... + action: Action | None = ... + +class TitleStyle(NamedTuple): + font_family: str | None = ... + font_style: str | None = ... + font_size_pt: int | None = ... + color: int | tuple[int, int, int] | None = ... + underline: bool = ... + t_margin: int | None = ... + l_margin: int | None = ... + b_margin: int | None = ... + +class ToCPlaceholder(NamedTuple): + render_function: Callable[[FPDF, Any], object] + start_page: int + y: int + pages: int = ... + +class SubsetMap: + def __init__(self, identities: list[int]) -> None: ... + def pick(self, unicode: int): ... + def dict(self): ... + +def get_page_format(format: _Format | tuple[float, float], k: float | None = ...) -> tuple[float, float]: ... +def load_cache(filename: Path): ... + +# TODO: TypedDicts +_Page = dict[str, Any] +_Font = dict[str, Any] +_FontFile = dict[str, Any] +_Image = dict[str, Any] + +class FPDF: + MARKDOWN_BOLD_MARKER: str + MARKDOWN_ITALICS_MARKER: str + MARKDOWN_UNDERLINE_MARKER: str + offsets: dict[int, int] + page: int + n: int + buffer: bytearray + pages: dict[int, _Page] + state: DocumentState + fonts: dict[str, _Font] + font_files: dict[str, _FontFile] + diffs: dict[int, int] + images: dict[str, _Image] + annots: defaultdict[int, list[Annotation]] + links: dict[int, DestinationXYZ] + in_footer: int + lasth: int + current_font: _Font + font_family: str + font_style: str + font_size_pt: int + font_stretching: int + str_alias_nb_pages: str + underline: int + draw_color: str + fill_color: str + text_color: str + ws: int + angle: int + font_cache_dir: Any + xmp_metadata: Any + image_filter: str + page_duration: int + page_transition: Any + struct_builder: Any + section_title_styles: Any + core_fonts: Any + core_fonts_encoding: str + font_aliases: Any + k: float + def_orientation: Any + font_size: Any + c_margin: Any + line_width: float + dw_pt: float + dh_pt: float + compress: bool + pdf_version: str + + x: float + y: float + t_margin: float + r_margin: float + l_margin: float + + # Set during call to _set_orientation(), called from __init__(). + cur_orientation: Literal["P", "L"] + w_pt: float + h_pt: float + w: float + h: float + def __init__( + self, + orientation: _Orientation = ..., + unit: _Unit | float = ..., + format: _Format | tuple[float, float] = ..., + font_cache_dir: bool = ..., + ) -> None: ... + @property + def unifontsubset(self): ... + @property + def epw(self): ... + @property + def eph(self): ... + def set_margin(self, margin: float) -> None: ... + def set_margins(self, left: float, top: float, right: float = ...) -> None: ... + def set_left_margin(self, margin: float) -> None: ... + def set_top_margin(self, margin: float) -> None: ... + def set_right_margin(self, margin: float) -> None: ... + auto_page_break: Any + b_margin: Any + page_break_trigger: Any + def set_auto_page_break(self, auto: bool, margin: float = ...) -> None: ... + zoom_mode: Any + layout_mode: Any + def set_display_mode(self, zoom, layout: str = ...) -> None: ... + def set_compression(self, compress) -> None: ... + title: Any + def set_title(self, title: str) -> None: ... + lang: Any + def set_lang(self, lang: str) -> None: ... + subject: Any + def set_subject(self, subject: str) -> None: ... + author: Any + def set_author(self, author: str) -> None: ... + keywords: Any + def set_keywords(self, keywords: str) -> None: ... + creator: Any + def set_creator(self, creator: str) -> None: ... + producer: Any + def set_producer(self, producer: str) -> None: ... + creation_date: Any + def set_creation_date(self, date: datetime.datetime | None = ...) -> None: ... + def set_xmp_metadata(self, xmp_metadata) -> None: ... + def set_doc_option(self, opt, value) -> None: ... + def set_image_filter(self, image_filter) -> None: ... + def alias_nb_pages(self, alias: str = ...) -> None: ... + def open(self) -> None: ... + def close(self) -> None: ... + def add_page( + self, + orientation: _Orientation = ..., + format: _Format | tuple[float, float] = ..., + same: bool = ..., + duration: int = ..., + transition: Any | None = ..., + ) -> None: ... + def header(self) -> None: ... + def footer(self) -> None: ... + def page_no(self) -> int: ... + def set_draw_color(self, r, g: int = ..., b: int = ...) -> None: ... + def set_fill_color(self, r, g: int = ..., b: int = ...) -> None: ... + def set_text_color(self, r, g: int = ..., b: int = ...) -> None: ... + def get_string_width(self, s, normalized: bool = ..., markdown: bool = ...): ... + def set_line_width(self, width: float) -> None: ... + def line(self, x1: float, y1: float, x2: float, y2: float) -> None: ... + def polyline(self, point_list, fill: bool = ..., polygon: bool = ...) -> None: ... + def polygon(self, point_list, fill: bool = ...) -> None: ... + def dashed_line(self, x1, y1, x2, y2, dash_length: int = ..., space_length: int = ...) -> None: ... + def rect(self, x, y, w, h, style: Any | None = ...) -> None: ... + def ellipse(self, x, y, w, h, style: Any | None = ...) -> None: ... + def circle(self, x, y, r, style: Any | None = ...) -> None: ... + def add_font(self, family: str, style: _FontStyle = ..., fname: str | None = ..., uni: bool = ...) -> None: ... + def set_font(self, family: str | None = ..., style: _FontStyles = ..., size: int = ...) -> None: ... + def set_font_size(self, size: int) -> None: ... + def set_stretching(self, stretching) -> None: ... + def add_link(self): ... + def set_link(self, link, y: int = ..., x: int = ..., page: int = ..., zoom: str = ...) -> None: ... + def link(self, x, y, w, h, link, alt_text: Any | None = ...) -> None: ... + def text_annotation(self, x, y, text) -> None: ... + def add_action(self, action, x, y, w, h) -> None: ... + def text(self, x, y, txt: str = ...) -> None: ... + def rotate(self, angle, x: Any | None = ..., y: Any | None = ...) -> None: ... + def rotation(self, angle, x: Any | None = ..., y: Any | None = ...) -> _GeneratorContextManager[None]: ... + @property + def accept_page_break(self): ... + def cell( + self, + w: float | None = ..., + h: float | None = ..., + txt: str = ..., + border: bool | Literal[0, 1] | str = ..., + ln: int = ..., + align: str = ..., + fill: bool = ..., + link: str = ..., + center: bool = ..., + markdown: bool = ..., + ): ... + def will_page_break(self, height): ... + def multi_cell( + self, + w: float, + h: float | None = ..., + txt: str = ..., + border: bool | Literal[0, 1] | str = ..., + align: str = ..., + fill: bool = ..., + split_only: bool = ..., + link: str = ..., + ln: int = ..., + max_line_height: Any | None = ..., + markdown: bool = ..., + ): ... + def write(self, h: Any | None = ..., txt: str = ..., link: str = ...) -> None: ... + def image( + self, + name: str | Image.Image | BytesIO | StrPath, + x: float | None = ..., + y: float | None = ..., + w: float = ..., + h: float = ..., + type: str = ..., + link: str = ..., + title: str | None = ..., + alt_text: str | None = ..., + ) -> _Image: ... + def ln(self, h: Any | None = ...) -> None: ... + def get_x(self) -> float: ... + def set_x(self, x: float) -> None: ... + def get_y(self) -> float: ... + def set_y(self, y: float) -> None: ... + def set_xy(self, x: float, y: float) -> None: ... + @overload + def output(self, name: Literal[""] = ...) -> bytearray: ... # type: ignore[misc] + @overload + def output(self, name: str) -> None: ... + def normalize_text(self, txt): ... + def interleaved2of5(self, txt, x, y, w: int = ..., h: int = ...) -> None: ... + def code39(self, txt, x, y, w: float = ..., h: int = ...) -> None: ... + def rect_clip(self, x, y, w, h) -> _GeneratorContextManager[None]: ... + def unbreakable(self) -> _GeneratorContextManager[FPDFRecorder]: ... + def insert_toc_placeholder(self, render_toc_function, pages: int = ...) -> None: ... + def set_section_title_styles( + self, + level0, + level1: Any | None = ..., + level2: Any | None = ..., + level3: Any | None = ..., + level4: Any | None = ..., + level5: Any | None = ..., + level6: Any | None = ..., + ) -> None: ... + def start_section(self, name, level: int = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/html.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/html.pyi new file mode 100644 index 000000000000..c2e9a3a59066 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/html.pyi @@ -0,0 +1,74 @@ +from html.parser import HTMLParser +from typing import Any + +LOGGER: Any +BULLET_WIN1252: str +DEFAULT_HEADING_SIZES: Any +COLOR_DICT: Any + +def px2mm(px): ... +def color_as_decimal(color: str = ...): ... + +class HTML2FPDF(HTMLParser): + pdf: Any + image_map: Any + li_tag_indent: Any + table_line_separators: Any + ul_bullet_char: Any + style: Any + href: str + align: str + page_links: Any + font_stack: Any + indent: int + bullet: Any + font_size: Any + font_color: Any + table: Any + table_col_width: Any + table_col_index: Any + td: Any + th: Any + tr: Any + thead: Any + tfoot: Any + tr_index: Any + theader: Any + tfooter: Any + theader_out: bool + table_row_height: int + heading_level: Any + heading_sizes: Any + def __init__( + self, + pdf, + image_map: Any | None = ..., + li_tag_indent: int = ..., + table_line_separators: bool = ..., + ul_bullet_char=..., + heading_sizes: Any | None = ..., + **_, + ): ... + def width2unit(self, length): ... + def handle_data(self, data) -> None: ... + def box_shadow(self, w, h, bgcolor) -> None: ... + def output_table_header(self) -> None: ... + tfooter_out: bool + def output_table_footer(self) -> None: ... + def output_table_sep(self) -> None: ... + font_face: Any + table_offset: Any + def handle_starttag(self, tag, attrs) -> None: ... + tbody: Any + def handle_endtag(self, tag) -> None: ... + h: Any + def set_font(self, face: Any | None = ..., size: Any | None = ...) -> None: ... + def set_style(self, tag: Any | None = ..., enable: bool = ...) -> None: ... + def set_text_color(self, r: Any | None = ..., g: int = ..., b: int = ...) -> None: ... + def put_link(self, txt) -> None: ... + def render_toc(self, pdf, outline) -> None: ... + def error(self, message: str) -> None: ... + +class HTMLMixin: + HTML2FPDF_CLASS: Any + def write_html(self, text, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/image_parsing.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/image_parsing.pyi new file mode 100644 index 000000000000..14c4f4bcd261 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/image_parsing.pyi @@ -0,0 +1,11 @@ +from typing import Any +from typing_extensions import Literal + +_ImageFilter = Literal["AUTO", "FlateDecode", "DCTDecode", "JPXDecode"] + +SUPPORTED_IMAGE_FILTERS: tuple[_ImageFilter, ...] + +def load_image(filename): ... + +# Returned dict could be typed as a TypedDict. +def get_img_info(img, image_filter: _ImageFilter = ..., dims: Any | None = ...) -> dict[str, Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/outline.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/outline.pyi new file mode 100644 index 000000000000..0df21fb1a58b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/outline.pyi @@ -0,0 +1,32 @@ +from typing import Any, NamedTuple + +from .structure_tree import StructElem +from .syntax import Destination, PDFObject + +class OutlineSection(NamedTuple): + name: str + level: str + page_number: int + dest: Destination + struct_elem: StructElem | None = ... + +class OutlineItemDictionary(PDFObject): + title: str + parent: Any | None + prev: Any | None + next: Any | None + first: Any | None + last: Any | None + count: int + dest: str | None + struct_elem: StructElem | None + def __init__(self, title: str, dest: str | None = ..., struct_elem: StructElem | None = ..., **kwargs) -> None: ... + +class OutlineDictionary(PDFObject): + type: str + first: Any | None + last: Any | None + count: int + def __init__(self, **kwargs) -> None: ... + +def serialize_outline(sections, first_object_id: int = ..., fpdf: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/recorder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/recorder.pyi new file mode 100644 index 000000000000..6401da5362ed --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/recorder.pyi @@ -0,0 +1,13 @@ +from typing import Any + +class FPDFRecorder: + pdf: Any + accept_page_break: bool + def __init__(self, pdf, accept_page_break: bool = ...) -> None: ... + def __getattr__(self, name): ... + def rewind(self) -> None: ... + def replay(self) -> None: ... + +class CallRecorder: + def __init__(self, func, calls) -> None: ... + def __call__(self, *args, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/structure_tree.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/structure_tree.pyi new file mode 100644 index 000000000000..b8bc849b3d19 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/structure_tree.pyi @@ -0,0 +1,52 @@ +from typing import Any, NamedTuple + +from .syntax import PDFObject + +class MarkedContent(NamedTuple): + page_object_id: int + struct_parents_id: int + struct_type: str + mcid: int | None = ... + title: str | None = ... + alt_text: str | None = ... + +class NumberTree(PDFObject): + nums: Any + def __init__(self, **kwargs) -> None: ... + def serialize(self, fpdf: Any | None = ..., obj_dict: Any | None = ...): ... + +class StructTreeRoot(PDFObject): + type: str + parent_tree: Any + k: Any + def __init__(self, **kwargs) -> None: ... + +class StructElem(PDFObject): + type: str + s: Any + p: Any + k: Any + pg: Any + t: Any + alt: Any + def __init__( + self, + struct_type: str, + parent: PDFObject, + kids: list[int] | list[StructElem], + page: PDFObject | None = ..., + title: str | None = ..., + alt: str | None = ..., + **kwargs, + ) -> None: ... + +class StructureTreeBuilder: + struct_tree_root: Any + doc_struct_elem: Any + struct_elem_per_mc: Any + def __init__(self) -> None: ... + def add_marked_content(self, marked_content) -> None: ... + def next_mcid_for_page(self, page_object_id): ... + def empty(self): ... + def serialize(self, first_object_id: int = ..., fpdf: Any | None = ...): ... + def assign_ids(self, n): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/syntax.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/syntax.pyi new file mode 100644 index 000000000000..838880de0794 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/syntax.pyi @@ -0,0 +1,45 @@ +from abc import ABC +from typing import Any + +def clear_empty_fields(d): ... +def create_dictionary_string( + dict_, + open_dict: str = ..., + close_dict: str = ..., + field_join: str = ..., + key_value_join: str = ..., + has_empty_fields: bool = ..., +): ... +def create_list_string(list_): ... +def iobj_ref(n): ... +def create_stream(stream): ... + +class PDFObject: + def __init__(self, id: Any | None = ...) -> None: ... + @property + def id(self): ... + @id.setter + def id(self, n) -> None: ... + @property + def ref(self): ... + def serialize(self, fpdf: Any | None = ..., obj_dict: Any | None = ...): ... + +def camel_case(property_name): ... + +class PDFString(str): + def serialize(self): ... + +class PDFArray(list[Any]): + def serialize(self): ... + +class Destination(ABC): + def as_str(self, pdf: Any | None = ...) -> None: ... + +class DestinationXYZ(Destination): + page: Any + x: Any + y: Any + zoom: Any + page_as_obj_id: Any + def __init__(self, page, x: int = ..., y: int = ..., zoom: str = ..., page_as_obj_id: bool = ...) -> None: ... + def as_str(self, pdf: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/template.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/template.pyi new file mode 100644 index 000000000000..8ab4d6cb970c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/template.pyi @@ -0,0 +1,35 @@ +from typing import Any + +class FlexTemplate: + pdf: Any + splitting_pdf: Any + handlers: Any + texts: Any + def __init__(self, pdf, elements: Any | None = ...) -> None: ... + elements: Any + keys: Any + def load_elements(self, elements) -> None: ... + def parse_csv(self, infile, delimiter: str = ..., decimal_sep: str = ..., encoding: Any | None = ...): ... + def __setitem__(self, name, value) -> None: ... + set: Any + def __contains__(self, name): ... + def __getitem__(self, name): ... + def split_multicell(self, text, element_name): ... + def render(self, offsetx: float = ..., offsety: float = ..., rotate: float = ..., scale: float = ...): ... + +class Template(FlexTemplate): + def __init__( + self, + infile: Any | None = ..., + elements: Any | None = ..., + format: str = ..., + orientation: str = ..., + unit: str = ..., + title: str = ..., + author: str = ..., + subject: str = ..., + creator: str = ..., + keywords: str = ..., + ) -> None: ... + def add_page(self) -> None: ... + def render(self, outfile: Any | None = ..., dest: Any | None = ...) -> None: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/transitions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/transitions.pyi new file mode 100644 index 000000000000..c1d692275e78 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/transitions.pyi @@ -0,0 +1,58 @@ +from abc import ABC +from typing import Any + +class Transition(ABC): + def dict_as_string(self) -> None: ... + +class SplitTransition(Transition): + dimension: Any + direction: Any + def __init__(self, dimension, direction) -> None: ... + def dict_as_string(self): ... + +class BlindsTransition(Transition): + dimension: Any + def __init__(self, dimension) -> None: ... + def dict_as_string(self): ... + +class BoxTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class WipeTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class DissolveTransition(Transition): + def dict_as_string(self): ... + +class GlitterTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class FlyTransition(Transition): + dimension: Any + direction: Any + def __init__(self, dimension, direction: Any | None = ...) -> None: ... + def dict_as_string(self): ... + +class PushTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class CoverTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class UncoverTransition(Transition): + direction: Any + def __init__(self, direction) -> None: ... + def dict_as_string(self): ... + +class FadeTransition(Transition): + def dict_as_string(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/ttfonts.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/ttfonts.pyi new file mode 100644 index 000000000000..84990f260693 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/ttfonts.pyi @@ -0,0 +1,72 @@ +from typing import Any + +GF_WORDS: Any +GF_SCALE: Any +GF_MORE: Any +GF_XYSCALE: Any +GF_TWOBYTWO: Any + +def sub32(x, y): ... +def calcChecksum(data): ... + +class TTFontFile: + maxStrLenRead: int + def __init__(self) -> None: ... + filename: Any + charWidths: Any + glyphPos: Any + charToGlyph: Any + tables: Any + otables: Any + ascent: int + descent: int + version: Any + def getMetrics(self, file) -> None: ... + numTables: Any + searchRange: Any + entrySelector: Any + rangeShift: Any + def readTableDirectory(self) -> None: ... + def get_table_pos(self, tag): ... + def seek(self, pos) -> None: ... + def skip(self, delta) -> None: ... + def seek_table(self, tag, offset_in_table: int = ...): ... + def read_tag(self): ... + def read_short(self): ... + def read_ushort(self): ... + def read_ulong(self): ... + def get_ushort(self, pos): ... + @staticmethod + def splice(stream, offset, value): ... + def get_chunk(self, pos, length): ... + def get_table(self, tag): ... + def add(self, tag, data) -> None: ... + sFamilyClass: int + sFamilySubClass: int + name: Any + familyName: Any + styleName: Any + fullName: Any + uniqueFontID: Any + unitsPerEm: Any + bbox: Any + capHeight: Any + stemV: Any + italicAngle: Any + underlinePosition: Any + underlineThickness: Any + flags: int + def extractInfo(self) -> None: ... + maxUni: int + codeToGlyph: Any + glyphdata: Any + def makeSubset(self, file, subset): ... + def getGlyphs(self, originalGlyphIdx, nonlocals) -> None: ... + defaultWidth: Any + def getHMTX(self, numberOfHMetrics, numGlyphs, glyphToChar, scale) -> None: ... + def getHMetric(self, numberOfHMetrics, gid): ... + def getLOCA(self, indexToLocFormat, numGlyphs) -> None: ... + maxUniChar: int + def getCMAP4(self, unicode_cmap_offset, glyphToChar, charToGlyph) -> None: ... + def getCMAP12(self, unicode_cmap_offset, glyphToChar, charToGlyph) -> None: ... + def endTTFile(self, stm): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/util.pyi new file mode 100644 index 000000000000..21b00796ab70 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/fpdf2/fpdf/util.pyi @@ -0,0 +1,18 @@ +from collections.abc import Iterable +from typing import Any +from typing_extensions import Literal + +_Unit = Literal["pt", "mm", "cm", "in"] + +def substr(s, start, length: int = ...): ... +def enclose_in_parens(s): ... +def escape_parens(s): ... +def b(s): ... +def get_scale_factor(unit: _Unit | float) -> float: ... +def convert_unit( + # to_convert has a recursive type + to_convert: float | Iterable[float | Iterable[Any]], + old_unit: str | float, + new_unit: str | float, +) -> float | tuple[float, ...]: ... +def dochecks() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/freezegun/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/freezegun/METADATA.toml index bad265e4fe3f..c9f594bd7bbd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/freezegun/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/freezegun/METADATA.toml @@ -1 +1 @@ -version = "1.1" +version = "1.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/freezegun/freezegun/api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/freezegun/freezegun/api.pyi index df10e569ae1d..183443ede1d6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/freezegun/freezegun/api.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/freezegun/freezegun/api.pyi @@ -1,22 +1,22 @@ from collections.abc import Awaitable, Callable, Iterator, Sequence from datetime import date, datetime, timedelta from numbers import Real -from typing import Any, Type, TypeVar, Union, overload +from typing import Any, TypeVar, Union, overload _T = TypeVar("_T") _Freezable = Union[str, datetime, date, timedelta] -class TickingDateTimeFactory(object): +class TickingDateTimeFactory: def __init__(self, time_to_freeze: datetime, start: datetime) -> None: ... def __call__(self) -> datetime: ... -class FrozenDateTimeFactory(object): +class FrozenDateTimeFactory: def __init__(self, time_to_freeze: datetime) -> None: ... def __call__(self) -> datetime: ... def tick(self, delta: float | Real | timedelta = ...) -> None: ... def move_to(self, target_datetime: _Freezable | None) -> None: ... -class StepTickTimeFactory(object): +class StepTickTimeFactory: def __init__(self, time_to_freeze: datetime, step_width: float) -> None: ... def __call__(self) -> datetime: ... def tick(self, delta: timedelta | None = ...) -> None: ... @@ -31,19 +31,20 @@ class _freeze_time: ignore: Sequence[str], tick: bool, as_arg: bool, + as_kwarg: str, auto_tick_seconds: float, ) -> None: ... @overload - def __call__(self, func: Type[_T]) -> Type[_T]: ... + def __call__(self, func: type[_T]) -> type[_T]: ... @overload def __call__(self, func: Callable[..., Awaitable[_T]]) -> Callable[..., Awaitable[_T]]: ... @overload def __call__(self, func: Callable[..., _T]) -> Callable[..., _T]: ... - def __enter__(self) -> Any: ... + def __enter__(self) -> FrozenDateTimeFactory | StepTickTimeFactory: ... def __exit__(self, *args: Any) -> None: ... def start(self) -> Any: ... def stop(self) -> None: ... - def decorate_class(self, klass: Type[_T]) -> _T: ... + def decorate_class(self, klass: type[_T]) -> _T: ... def decorate_coroutine(self, coroutine: _T) -> _T: ... def decorate_callable(self, func: Callable[..., _T]) -> Callable[..., _T]: ... @@ -53,5 +54,6 @@ def freeze_time( ignore: Sequence[str] | None = ..., tick: bool | None = ..., as_arg: bool | None = ..., + as_kwarg: str | None = ..., auto_tick_seconds: float | None = ..., ) -> _freeze_time: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/frozendict/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/frozendict/METADATA.toml index 5f1541084942..58bc38349459 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/frozendict/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/frozendict/METADATA.toml @@ -1 +1 @@ -version = "0.1" +version = "2.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/frozendict/frozendict.pyi b/packages/pyright-internal/typeshed-fallback/stubs/frozendict/frozendict.pyi index 99ce6b8da46c..6d72eb5cddc9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/frozendict/frozendict.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/frozendict/frozendict.pyi @@ -1,27 +1,26 @@ import collections -from typing import Any, Generic, Iterable, Iterator, Mapping, Tuple, Type, TypeVar, overload +from _typeshed import Self +from typing import Any, Generic, Iterable, Iterator, Mapping, TypeVar, overload -_S = TypeVar("_S") _KT = TypeVar("_KT") _VT = TypeVar("_VT") class frozendict(Mapping[_KT, _VT], Generic[_KT, _VT]): - dict_cls: Type[dict[Any, Any]] = ... + dict_cls: type[dict[Any, Any]] = ... @overload def __init__(self, **kwargs: _VT) -> None: ... @overload def __init__(self, mapping: Mapping[_KT, _VT]) -> None: ... @overload - def __init__(self, iterable: Iterable[Tuple[_KT, _VT]]) -> None: ... + def __init__(self, iterable: Iterable[tuple[_KT, _VT]]) -> None: ... def __getitem__(self, key: _KT) -> _VT: ... - def __contains__(self, key: object) -> bool: ... - def copy(self: _S, **add_or_replace: _VT) -> _S: ... + def __contains__(self, __key: object) -> bool: ... + def copy(self: Self, **add_or_replace: _VT) -> Self: ... def __iter__(self) -> Iterator[_KT]: ... def __len__(self) -> int: ... - def __repr__(self) -> str: ... def __hash__(self) -> int: ... class FrozenOrderedDict(frozendict[_KT, _VT]): - dict_cls: Type[collections.OrderedDict[Any, Any]] = ... + dict_cls: type[collections.OrderedDict[Any, Any]] = ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml new file mode 100644 index 000000000000..971cc4a0f9b2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.9.*" +requires = ["types-six"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi new file mode 100644 index 000000000000..3fb429e6e962 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/__init__.pyi @@ -0,0 +1,109 @@ +from google.cloud.ndb._datastore_api import EVENTUAL as EVENTUAL, EVENTUAL_CONSISTENCY as EVENTUAL_CONSISTENCY, STRONG as STRONG +from google.cloud.ndb._datastore_query import Cursor as Cursor, QueryIterator as QueryIterator +from google.cloud.ndb._transaction import ( + in_transaction as in_transaction, + non_transactional as non_transactional, + transaction as transaction, + transaction_async as transaction_async, + transactional as transactional, + transactional_async as transactional_async, + transactional_tasklet as transactional_tasklet, +) +from google.cloud.ndb.client import Client as Client +from google.cloud.ndb.context import ( + AutoBatcher as AutoBatcher, + Context as Context, + ContextOptions as ContextOptions, + TransactionOptions as TransactionOptions, + get_context as get_context, + get_toplevel_context as get_toplevel_context, +) +from google.cloud.ndb.global_cache import GlobalCache as GlobalCache, MemcacheCache as MemcacheCache, RedisCache as RedisCache +from google.cloud.ndb.key import Key as Key +from google.cloud.ndb.model import ( + BadProjectionError as BadProjectionError, + BlobKey as BlobKey, + BlobKeyProperty as BlobKeyProperty, + BlobProperty as BlobProperty, + BooleanProperty as BooleanProperty, + ComputedProperty as ComputedProperty, + ComputedPropertyError as ComputedPropertyError, + DateProperty as DateProperty, + DateTimeProperty as DateTimeProperty, + Expando as Expando, + FloatProperty as FloatProperty, + GenericProperty as GenericProperty, + GeoPt as GeoPt, + GeoPtProperty as GeoPtProperty, + Index as Index, + IndexProperty as IndexProperty, + IndexState as IndexState, + IntegerProperty as IntegerProperty, + InvalidPropertyError as InvalidPropertyError, + JsonProperty as JsonProperty, + KeyProperty as KeyProperty, + KindError as KindError, + LocalStructuredProperty as LocalStructuredProperty, + MetaModel as MetaModel, + Model as Model, + ModelAdapter as ModelAdapter, + ModelAttribute as ModelAttribute, + ModelKey as ModelKey, + PickleProperty as PickleProperty, + Property as Property, + ReadonlyPropertyError as ReadonlyPropertyError, + Rollback as Rollback, + StringProperty as StringProperty, + StructuredProperty as StructuredProperty, + TextProperty as TextProperty, + TimeProperty as TimeProperty, + UnprojectedPropertyError as UnprojectedPropertyError, + User as User, + UserNotFoundError as UserNotFoundError, + UserProperty as UserProperty, + delete_multi as delete_multi, + delete_multi_async as delete_multi_async, + get_indexes as get_indexes, + get_indexes_async as get_indexes_async, + get_multi as get_multi, + get_multi_async as get_multi_async, + make_connection as make_connection, + put_multi as put_multi, + put_multi_async as put_multi_async, +) +from google.cloud.ndb.polymodel import PolyModel as PolyModel +from google.cloud.ndb.query import ( + AND as AND, + OR as OR, + ConjunctionNode as ConjunctionNode, + DisjunctionNode as DisjunctionNode, + FalseNode as FalseNode, + FilterNode as FilterNode, + Node as Node, + Parameter as Parameter, + ParameterizedFunction as ParameterizedFunction, + ParameterizedThing as ParameterizedThing, + ParameterNode as ParameterNode, + PostFilterNode as PostFilterNode, + Query as Query, + QueryOptions as QueryOptions, + RepeatedStructuredPropertyPredicate as RepeatedStructuredPropertyPredicate, + gql as gql, +) +from google.cloud.ndb.tasklets import ( + Future as Future, + QueueFuture as QueueFuture, + ReducingFuture as ReducingFuture, + Return as Return, + SerialQueueFuture as SerialQueueFuture, + add_flow_exception as add_flow_exception, + make_context as make_context, + make_default_context as make_default_context, + set_context as set_context, + sleep as sleep, + synctasklet as synctasklet, + tasklet as tasklet, + toplevel as toplevel, + wait_all as wait_all, + wait_any as wait_any, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi new file mode 100644 index 000000000000..0f50383e9b04 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_batch.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def get_batch(batch_cls, options: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi new file mode 100644 index 000000000000..dd86c584e048 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_cache.pyi @@ -0,0 +1,72 @@ +from typing import Any + +from google.cloud.ndb import tasklets as tasklets + +class ContextCache: + def get_and_validate(self, key): ... + +class _GlobalCacheBatch: + def full(self): ... + def idle_callback(self) -> None: ... + def done_callback(self, cache_call) -> None: ... + def make_call(self) -> None: ... + def future_info(self, key) -> None: ... + +global_get: Any + +class _GlobalCacheGetBatch(_GlobalCacheBatch): + todo: Any + keys: Any + def __init__(self, ignore_options) -> None: ... + def add(self, key): ... + def done_callback(self, cache_call) -> None: ... + def make_call(self): ... + def future_info(self, key): ... + +def global_set(key, value, expires: Any | None = ..., read: bool = ...): ... + +class _GlobalCacheSetBatch(_GlobalCacheBatch): + expires: Any + todo: object + futures: object + def __init__(self, options) -> None: ... + def done_callback(self, cache_call) -> None: ... + def add(self, key, value): ... + def make_call(self): ... + def future_info(self, key, value): ... + +class _GlobalCacheSetIfNotExistsBatch(_GlobalCacheSetBatch): + def add(self, key, value): ... + def make_call(self): ... + def future_info(self, key, value): ... + +global_delete: Any + +class _GlobalCacheDeleteBatch(_GlobalCacheBatch): + keys: Any + futures: Any + def __init__(self, ignore_options) -> None: ... + def add(self, key): ... + def make_call(self): ... + def future_info(self, key): ... + +global_watch: Any + +class _GlobalCacheWatchBatch(_GlobalCacheDeleteBatch): + def make_call(self): ... + def future_info(self, key): ... + +def global_unwatch(key): ... + +class _GlobalCacheUnwatchBatch(_GlobalCacheDeleteBatch): + def make_call(self): ... + def future_info(self, key): ... + +global_compare_and_swap: Any + +class _GlobalCacheCompareAndSwapBatch(_GlobalCacheSetBatch): + def make_call(self): ... + def future_info(self, key, value): ... + +def is_locked_value(value): ... +def global_cache_key(key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi new file mode 100644 index 000000000000..67b9641d6080 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_api.pyi @@ -0,0 +1,5 @@ +from typing_extensions import Literal + +EVENTUAL: Literal[2] +EVENTUAL_CONSISTENCY: Literal[2] +STRONG: Literal[1] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi new file mode 100644 index 000000000000..c199db49696d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_datastore_query.pyi @@ -0,0 +1,22 @@ +from typing import Any + +class QueryIterator: + def __iter__(self): ... + def has_next(self) -> None: ... + def has_next_async(self) -> None: ... + def probably_has_next(self) -> None: ... + def next(self) -> None: ... + def cursor_before(self) -> None: ... + def cursor_after(self) -> None: ... + def index_list(self) -> None: ... + +class Cursor: + @classmethod + def from_websafe_string(cls, urlsafe): ... + cursor: Any + def __init__(self, cursor: Any | None = ..., urlsafe: Any | None = ...) -> None: ... + def to_websafe_string(self): ... + def urlsafe(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi new file mode 100644 index 000000000000..2c80cd3751c1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_eventloop.pyi @@ -0,0 +1,26 @@ +from typing import Any, NamedTuple + +class _Event(NamedTuple): + when: Any + callback: Any + args: Any + kwargs: Any + +class EventLoop: + current: Any + idlers: Any + inactive: int + queue: Any + rpcs: Any + rpc_results: Any + def __init__(self) -> None: ... + def clear(self) -> None: ... + def insort_event_right(self, event) -> None: ... + def call_soon(self, callback, *args, **kwargs) -> None: ... + def queue_call(self, delay, callback, *args, **kwargs) -> None: ... + def queue_rpc(self, rpc, callback) -> None: ... + def add_idle(self, callback, *args, **kwargs) -> None: ... + def run_idle(self): ... + def run0(self): ... + def run1(self): ... + def run(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi new file mode 100644 index 000000000000..153e041a5889 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_options.pyi @@ -0,0 +1,15 @@ +from typing import Any + +class Options: + @classmethod + def options(cls, wrapped): ... + @classmethod + def slots(cls): ... + def __init__(self, config: Any | None = ..., **kwargs) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def copy(self, **kwargs): ... + def items(self) -> None: ... + +class ReadOptions(Options): + def __init__(self, config: Any | None = ..., **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi new file mode 100644 index 000000000000..c3dc5a4738d2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/_transaction.pyi @@ -0,0 +1,18 @@ +from typing import Any + +def in_transaction(): ... +def transaction( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ... +): ... +def transaction_async( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ... +): ... +def transaction_async_( + callback, retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ... +): ... +def transactional(retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ...): ... +def transactional_async(retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ...): ... +def transactional_tasklet( + retries=..., read_only: bool = ..., join: bool = ..., xg: bool = ..., propagation: Any | None = ... +): ... +def non_transactional(allow_existing: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi new file mode 100644 index 000000000000..acb002aadcec --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/blobstore.pyi @@ -0,0 +1,65 @@ +from typing import Any + +from google.cloud.ndb import model + +BlobKey: Any +BLOB_INFO_KIND: str +BLOB_MIGRATION_KIND: str +BLOB_KEY_HEADER: str +BLOB_RANGE_HEADER: str +MAX_BLOB_FETCH_SIZE: int +UPLOAD_INFO_CREATION_HEADER: str +BlobKeyProperty = model.BlobKeyProperty + +class BlobFetchSizeTooLargeError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobInfo: + def __init__(self, *args, **kwargs) -> None: ... + @classmethod + def get(cls, *args, **kwargs) -> None: ... + @classmethod + def get_async(cls, *args, **kwargs) -> None: ... + @classmethod + def get_multi(cls, *args, **kwargs) -> None: ... + @classmethod + def get_multi_async(cls, *args, **kwargs) -> None: ... + +class BlobInfoParseError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobNotFoundError: + def __init__(self, *args, **kwargs) -> None: ... + +class BlobReader: + def __init__(self, *args, **kwargs) -> None: ... + +def create_upload_url(*args, **kwargs) -> None: ... +def create_upload_url_async(*args, **kwargs) -> None: ... + +class DataIndexOutOfRangeError: + def __init__(self, *args, **kwargs) -> None: ... + +def delete(*args, **kwargs) -> None: ... +def delete_async(*args, **kwargs) -> None: ... +def delete_multi(*args, **kwargs) -> None: ... +def delete_multi_async(*args, **kwargs) -> None: ... + +class Error: + def __init__(self, *args, **kwargs) -> None: ... + +def fetch_data(*args, **kwargs) -> None: ... +def fetch_data_async(*args, **kwargs) -> None: ... + +get: Any +get_async: Any +get_multi: Any +get_multi_async: Any + +class InternalError: + def __init__(self, *args, **kwargs) -> None: ... + +def parse_blob_info(*args, **kwargs) -> None: ... + +class PermissionDeniedError: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi new file mode 100644 index 000000000000..207673ea13a2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/client.pyi @@ -0,0 +1,21 @@ +from typing import Any + +DATASTORE_API_HOST: Any + +class Client: + SCOPE: Any + namespace: Any + host: Any + client_info: Any + secure: Any + stub: Any + def __init__(self, project: Any | None = ..., namespace: Any | None = ..., credentials: Any | None = ...) -> None: ... + def context( + self, + namespace=..., + cache_policy: Any | None = ..., + global_cache: Any | None = ..., + global_cache_policy: Any | None = ..., + global_cache_timeout_policy: Any | None = ..., + legacy_data: bool = ..., + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi new file mode 100644 index 000000000000..bb3538fab21c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/context.pyi @@ -0,0 +1,105 @@ +from typing import Any, Callable, NamedTuple + +from google.cloud.ndb import Key, exceptions as exceptions + +class _LocalState: + def __init__(self) -> None: ... + @property + def context(self): ... + @context.setter + def context(self, value) -> None: ... + @property + def toplevel_context(self): ... + @toplevel_context.setter + def toplevel_context(self, value) -> None: ... + +def get_context(raise_context_error: bool = ...): ... +def get_toplevel_context(raise_context_error: bool = ...): ... + +class _ContextTuple(NamedTuple): + client: Any + namespace: Any + eventloop: Any + batches: Any + commit_batches: Any + transaction: Any + cache: Any + global_cache: Any + on_commit_callbacks: Any + legacy_data: Any + +class _Context(_ContextTuple): + def __new__( + cls, + client, + namespace=..., + eventloop: Any | None = ..., + batches: Any | None = ..., + commit_batches: Any | None = ..., + transaction: Any | None = ..., + cache: Any | None = ..., + cache_policy: Any | None = ..., + global_cache: Any | None = ..., + global_cache_flush_keys: object | None = ..., + global_cache_policy: Callable[[Key], bool] | None = ..., + global_cache_timeout_policy: Any | None = ..., + datastore_policy: Any | None = ..., + on_commit_callbacks: Any | None = ..., + legacy_data: bool = ..., + retry: Any | None = ..., + rpc_time: Any | None = ..., + wait_time: Any | None = ..., + ): ... + def new(self, **kwargs): ... + rpc_time: int + wait_time: int + def use(self) -> None: ... + +class Context(_Context): + def clear_cache(self) -> None: ... + def flush(self) -> None: ... + def get_namespace(self): ... + def get_cache_policy(self): ... + def get_datastore_policy(self) -> None: ... + def get_global_cache_policy(self): ... + get_memcache_policy: Any + def get_global_cache_timeout_policy(self): ... + get_memcache_timeout_policy: Any + cache_policy: Any + def set_cache_policy(self, policy): ... + datastore_policy: Any + def set_datastore_policy(self, policy): ... + global_cache_policy: Any + def set_global_cache_policy(self, policy): ... + set_memcache_policy: Any + global_cache_timeout_policy: Any + def set_global_cache_timeout_policy(self, policy): ... + set_memcache_timeout_policy: Any + def get_retry_state(self): ... + def set_retry_state(self, state) -> None: ... + def clear_retry_state(self) -> None: ... + def call_on_commit(self, callback) -> None: ... + def in_transaction(self): ... + def in_retry(self): ... + def memcache_add(self, *args, **kwargs) -> None: ... + def memcache_cas(self, *args, **kwargs) -> None: ... + def memcache_decr(self, *args, **kwargs) -> None: ... + def memcache_delete(self, *args, **kwargs) -> None: ... + def memcache_get(self, *args, **kwargs) -> None: ... + def memcache_gets(self, *args, **kwargs) -> None: ... + def memcache_incr(self, *args, **kwargs) -> None: ... + def memcache_replace(self, *args, **kwargs) -> None: ... + def memcache_set(self, *args, **kwargs) -> None: ... + def urlfetch(self, *args, **kwargs) -> None: ... + +class ContextOptions: + def __init__(self, *args, **kwargs) -> None: ... + +class TransactionOptions: + NESTED: int + MANDATORY: int + ALLOWED: int + INDEPENDENT: int + +class AutoBatcher: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi new file mode 100644 index 000000000000..8d4c846e526d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/django_middleware.pyi @@ -0,0 +1,2 @@ +class NdbDjangoMiddleware: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi new file mode 100644 index 000000000000..ab5a3a0ae0b2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/exceptions.pyi @@ -0,0 +1,22 @@ +from typing import Any + +class Error(Exception): ... + +class ContextError(Error): + def __init__(self) -> None: ... + +class BadValueError(Error): ... +class BadArgumentError(Error): ... +class BadRequestError(Error): ... +class Rollback(Error): ... +class BadQueryError(Error): ... + +class BadFilterError(Error): + filter: Any + def __init__(self, filter) -> None: ... + +class NoLongerImplementedError(NotImplementedError): + def __init__(self) -> None: ... + +class Cancelled(Error): ... +class NestedRetryException(Error): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi new file mode 100644 index 000000000000..48aeaeb14e74 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/global_cache.pyi @@ -0,0 +1,78 @@ +import abc +from _typeshed import Self +from typing import Any + +ConnectionError: Any + +class GlobalCache(metaclass=abc.ABCMeta): + __metaclass__: Any + transient_errors: Any + strict_read: bool + strict_write: bool + @abc.abstractmethod + def get(self, keys): ... + @abc.abstractmethod + def set(self, items, expires: Any | None = ...): ... + @abc.abstractmethod + def delete(self, keys): ... + @abc.abstractmethod + def watch(self, keys): ... + @abc.abstractmethod + def unwatch(self, keys): ... + @abc.abstractmethod + def compare_and_swap(self, items, expires: Any | None = ...): ... + @abc.abstractmethod + def clear(self): ... + +class _InProcessGlobalCache(GlobalCache): + cache: Any + def __init__(self) -> None: ... + def get(self, keys): ... + def set(self, items, expires: Any | None = ...) -> None: ... + def delete(self, keys) -> None: ... + def watch(self, keys) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Any | None = ...): ... + def clear(self) -> None: ... + +class RedisCache(GlobalCache): + transient_errors: Any + @classmethod + def from_environment(cls: type[Self], strict_read: bool = ..., strict_write: bool = ...) -> Self: ... + redis: Any + strict_read: Any + strict_write: Any + def __init__(self, redis, strict_read: bool = ..., strict_write: bool = ...) -> None: ... + @property + def pipes(self): ... + def get(self, keys): ... + def set(self, items, expires: Any | None = ...) -> None: ... + def delete(self, keys) -> None: ... + def watch(self, keys) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Any | None = ...): ... + def clear(self) -> None: ... + +class MemcacheCache(GlobalCache): + class KeyNotSet(Exception): + key: Any + def __init__(self, key) -> None: ... + def __eq__(self, other): ... + transient_errors: Any + @classmethod + def from_environment( + cls: type[Self], max_pool_size: int = ..., strict_read: bool = ..., strict_write: bool = ... + ) -> Self: ... + client: Any + strict_read: Any + strict_write: Any + def __init__(self, client, strict_read: bool = ..., strict_write: bool = ...) -> None: ... + @property + def caskeys(self): ... + def get(self, keys): ... + def set(self, items, expires: Any | None = ...): ... + def delete(self, keys) -> None: ... + def watch(self, keys) -> None: ... + def unwatch(self, keys) -> None: ... + def compare_and_swap(self, items, expires: Any | None = ...): ... + def clear(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi new file mode 100644 index 000000000000..6a9896e3d032 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/key.pyi @@ -0,0 +1,98 @@ +from typing import Any + +UNDEFINED: Any + +class Key: + def __new__(cls, *path_args, **kwargs): ... + def __hash__(self): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __getnewargs__(self): ... + def parent(self): ... + def root(self): ... + def namespace(self): ... + def project(self): ... + app: Any + def id(self): ... + def string_id(self): ... + def integer_id(self): ... + def pairs(self): ... + def flat(self): ... + def kind(self): ... + def reference(self): ... + def serialized(self): ... + def urlsafe(self): ... + def to_legacy_urlsafe(self, location_prefix): ... + def get( + self, + read_consistency: Any | None = ..., + read_policy: Any | None = ..., + transaction: Any | None = ..., + retries: Any | None = ..., + timeout: Any | None = ..., + deadline: Any | None = ..., + use_cache: Any | None = ..., + use_global_cache: Any | None = ..., + use_datastore: Any | None = ..., + global_cache_timeout: Any | None = ..., + use_memcache: Any | None = ..., + memcache_timeout: Any | None = ..., + max_memcache_items: Any | None = ..., + force_writes: Any | None = ..., + _options: Any | None = ..., + ): ... + def get_async( + self, + read_consistency: Any | None = ..., + read_policy: Any | None = ..., + transaction: Any | None = ..., + retries: Any | None = ..., + timeout: Any | None = ..., + deadline: Any | None = ..., + use_cache: Any | None = ..., + use_global_cache: Any | None = ..., + use_datastore: Any | None = ..., + global_cache_timeout: Any | None = ..., + use_memcache: Any | None = ..., + memcache_timeout: Any | None = ..., + max_memcache_items: Any | None = ..., + force_writes: Any | None = ..., + _options: Any | None = ..., + ): ... + def delete( + self, + retries: Any | None = ..., + timeout: Any | None = ..., + deadline: Any | None = ..., + use_cache: Any | None = ..., + use_global_cache: Any | None = ..., + use_datastore: Any | None = ..., + global_cache_timeout: Any | None = ..., + use_memcache: Any | None = ..., + memcache_timeout: Any | None = ..., + max_memcache_items: Any | None = ..., + force_writes: Any | None = ..., + _options: Any | None = ..., + ): ... + def delete_async( + self, + retries: Any | None = ..., + timeout: Any | None = ..., + deadline: Any | None = ..., + use_cache: Any | None = ..., + use_global_cache: Any | None = ..., + use_datastore: Any | None = ..., + global_cache_timeout: Any | None = ..., + use_memcache: Any | None = ..., + memcache_timeout: Any | None = ..., + max_memcache_items: Any | None = ..., + force_writes: Any | None = ..., + _options: Any | None = ..., + ): ... + @classmethod + def from_old_key(cls, old_key) -> None: ... + def to_old_key(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi new file mode 100644 index 000000000000..6c8d3392476f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/metadata.pyi @@ -0,0 +1,51 @@ +from typing import Any + +from google.cloud.ndb import model + +class _BaseMetadata(model.Model): + KIND_NAME: str + def __new__(cls, *args, **kwargs): ... + +class Namespace(_BaseMetadata): + KIND_NAME: str + EMPTY_NAMESPACE_ID: int + @property + def namespace_name(self): ... + @classmethod + def key_for_namespace(cls, namespace): ... + @classmethod + def key_to_namespace(cls, key): ... + +class Kind(_BaseMetadata): + KIND_NAME: str + @property + def kind_name(self): ... + @classmethod + def key_for_kind(cls, kind): ... + @classmethod + def key_to_kind(cls, key): ... + +class Property(_BaseMetadata): + KIND_NAME: str + @property + def property_name(self): ... + @property + def kind_name(self): ... + property_representation: Any + @classmethod + def key_for_kind(cls, kind): ... + @classmethod + def key_for_property(cls, kind, property): ... + @classmethod + def key_to_kind(cls, key): ... + @classmethod + def key_to_property(cls, key): ... + +class EntityGroup: + def __new__(cls, *args, **kwargs): ... + +def get_entity_group_version(*args, **kwargs) -> None: ... +def get_kinds(start: Any | None = ..., end: Any | None = ...): ... +def get_namespaces(start: Any | None = ..., end: Any | None = ...): ... +def get_properties_of_kind(kind, start: Any | None = ..., end: Any | None = ...): ... +def get_representations_of_kind(kind, start: Any | None = ..., end: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi new file mode 100644 index 000000000000..37edd5825c08 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/model.pyi @@ -0,0 +1,508 @@ +import datetime +from _typeshed import Self +from collections.abc import Iterable, Sequence +from typing import Any, Callable, NoReturn +from typing_extensions import Literal + +from google.cloud.ndb import exceptions, key as key_module, query as query_module, tasklets as tasklets_module + +Key = key_module.Key +Rollback = exceptions.Rollback +BlobKey: object +GeoPt: object + +class KindError(exceptions.BadValueError): ... +class InvalidPropertyError(exceptions.Error): ... + +BadProjectionError = InvalidPropertyError + +class UnprojectedPropertyError(exceptions.Error): ... +class ReadonlyPropertyError(exceptions.Error): ... +class ComputedPropertyError(ReadonlyPropertyError): ... +class UserNotFoundError(exceptions.Error): ... + +class _NotEqualMixin: + def __ne__(self, other: object) -> bool: ... + +DirectionT = Literal["asc", "desc"] + +class IndexProperty(_NotEqualMixin): + def __new__(cls: type[Self], name: str, direction: DirectionT) -> Self: ... + @property + def name(self) -> str: ... + @property + def direction(self) -> DirectionT: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +class Index(_NotEqualMixin): + def __new__(cls: type[Self], kind: str, properties: list[IndexProperty], ancestor: bool) -> Self: ... + @property + def kind(self) -> str: ... + @property + def properties(self) -> list[IndexProperty]: ... + @property + def ancestor(self) -> bool: ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class IndexState(_NotEqualMixin): + def __new__(cls, definition, state, id): ... + @property + def definition(self): ... + @property + def state(self): ... + @property + def id(self): ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class ModelAdapter: + # This actually returns NoReturn, but mypy can't handle that + def __new__(cls: type[Self], *args, **kwargs) -> Self: ... + +def make_connection(*args, **kwargs) -> NoReturn: ... + +class ModelAttribute: ... + +class _BaseValue(_NotEqualMixin): + b_val: object = ... + def __init__(self, b_val) -> None: ... + def __eq__(self, other) -> bool: ... + def __hash__(self) -> int: ... + +class Property(ModelAttribute): + def __init__( + self, + name: str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: object | None = ..., + choices: Iterable[object] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + def __eq__(self, value: object) -> query_module.FilterNode: ... # type: ignore[override] + def __ne__(self, value: object) -> query_module.FilterNode: ... # type: ignore[override] + def __lt__(self, value: object) -> query_module.FilterNode: ... + def __le__(self, value: object) -> query_module.FilterNode: ... + def __gt__(self, value: object) -> query_module.FilterNode: ... + def __ge__(self, value: object) -> query_module.FilterNode: ... + def IN(self, value: Iterable[object]) -> query_module.DisjunctionNode | query_module.FilterNode | query_module.FalseNode: ... + def __neg__(self) -> query_module.PropertyOrder: ... + def __pos__(self) -> query_module.PropertyOrder: ... + def __set__(self, entity: Model, value: object) -> None: ... + def __delete__(self, entity: Model) -> None: ... + +class ModelKey(Property): + def __init__(self) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> key_module.Key | list[key_module.Key] | None: ... + +class BooleanProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> bool | list[bool] | None: ... + +class IntegerProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> int | list[int] | None: ... + +class FloatProperty(Property): + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> float | list[float] | None: ... + +class _CompressedValue(bytes): + z_val: bytes = ... + def __init__(self, z_val: bytes) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> NoReturn: ... + +class BlobProperty(Property): + def __init__( + self, + name: str | None = ..., + compressed: bool | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: bytes | None = ..., + choices: Iterable[bytes] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> bytes | list[bytes] | None: ... + +class CompressedTextProperty(BlobProperty): + def __init__(self, *args, **kwargs) -> None: ... + +class TextProperty(Property): + def __new__(cls, *args, **kwargs): ... + def __init__(self, *args, **kwargs) -> None: ... + def __get__(self, entity: Model, unused_cls: type[Model] | None = ...) -> str | list[str] | None: ... + +class StringProperty(TextProperty): + def __init__(self, *args, **kwargs) -> None: ... + +class GeoPtProperty(Property): ... +class PickleProperty(BlobProperty): ... + +class JsonProperty(BlobProperty): + def __init__( + self, + name: str | None = ..., + compressed: bool | None = ..., + json_type: type | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: object | None = ..., + choices: Iterable[object] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class User: + def __init__(self, email: str | None = ..., _auth_domain: str | None = ..., _user_id: str | None = ...) -> None: ... + def nickname(self) -> str: ... + def email(self): ... + def user_id(self) -> str | None: ... + def auth_domain(self) -> str: ... + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def __lt__(self, other: object) -> bool: ... + +class UserProperty(Property): + def __init__( + self, + name: str | None = ..., + auto_current_user: bool | None = ..., + auto_current_user_add: bool | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: bytes | None = ..., + choices: Iterable[bytes] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class KeyProperty(Property): + def __init__( + self, + name: str | None = ..., + kind: type[Model] | str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: key_module.Key | None = ..., + choices: Iterable[key_module.Key] | None = ..., + validator: Callable[[Property, key_module.Key], key_module.Key] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class BlobKeyProperty(Property): ... + +class DateTimeProperty(Property): + def __init__( + self, + name: str | None = ..., + auto_now: bool | None = ..., + auto_now_add: bool | None = ..., + tzinfo: datetime.tzinfo | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + required: bool | None = ..., + default: datetime.datetime | None = ..., + choices: Iterable[datetime.datetime] | None = ..., + validator: Callable[[Property, Any], object] | None = ..., + verbose_name: str | None = ..., + write_empty_list: bool | None = ..., + ) -> None: ... + +class DateProperty(DateTimeProperty): ... +class TimeProperty(DateTimeProperty): ... + +class StructuredProperty(Property): + def __init__(self, model_class: type, name: str | None = ..., **kwargs) -> None: ... + def __getattr__(self, attrname): ... + def IN(self, value: Iterable[object]) -> query_module.DisjunctionNode | query_module.FalseNode: ... + +class LocalStructuredProperty(BlobProperty): + def __init__(self, model_class: type[Model], **kwargs) -> None: ... + +class GenericProperty(Property): + def __init__(self, name: str | None = ..., compressed: bool = ..., **kwargs) -> None: ... + +class ComputedProperty(GenericProperty): + def __init__( + self, + func: Callable[[Model], object], + name: str | None = ..., + indexed: bool | None = ..., + repeated: bool | None = ..., + verbose_name: str | None = ..., + ) -> None: ... + +class MetaModel(type): + def __init__(cls, name: str, bases, classdict) -> None: ... + +class Model(_NotEqualMixin, metaclass=MetaModel): + key: ModelKey = ... + def __init__(_self, **kwargs) -> None: ... + def __hash__(self) -> NoReturn: ... + def __eq__(self, other: object) -> bool: ... + @classmethod + def gql(cls: type[Model], query_string: str, *args, **kwargs) -> query_module.Query: ... + def put(self, **kwargs): ... + def put_async(self, **kwargs) -> tasklets_module.Future: ... + @classmethod + def query(cls: type[Model], *args, **kwargs) -> query_module.Query: ... + @classmethod + def allocate_ids( + cls: type[Model], + size: int | None = ..., + max: int | None = ..., + parent: key_module.Key | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tuple[key_module.Key, key_module.Key]: ... + @classmethod + def allocate_ids_async( + cls: type[Model], + size: int | None = ..., + max: int | None = ..., + parent: key_module.Key | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tasklets_module.Future: ... + @classmethod + def get_by_id( + cls: type[Model], + id: int | str | None, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> Model | None: ... + @classmethod + def get_by_id_async( + cls: type[Model], + id: int | str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + ) -> tasklets_module.Future: ... + @classmethod + def get_or_insert( + cls: type[Model], + name: str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + **kw_model_args, + ) -> Model: ... + @classmethod + def get_or_insert_async( + cls: type[Model], + name: str, + parent: key_module.Key | None = ..., + namespace: str | None = ..., + project: str | None = ..., + app: str | None = ..., + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options=..., + **kw_model_args, + ) -> tasklets_module.Future: ... + def populate(self, **kwargs) -> None: ... + def has_complete_key(self) -> bool: ... + def to_dict( + self, + include: list[object] | tuple[object, object] | set[object] | None = ..., + exclude: list[object] | tuple[object, object] | set[object] | None = ..., + ): ... + +class Expando(Model): + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def __delattr__(self, name: str) -> None: ... + +def get_multi_async( + keys: Sequence[type[key_module.Key]], + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[type[tasklets_module.Future]]: ... +def get_multi( + keys: Sequence[type[key_module.Key]], + read_consistency: Literal["EVENTUAL"] | None = ..., + read_policy: Literal["EVENTUAL"] | None = ..., + transaction: bytes | None = ..., + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[type[Model] | None]: ... +def put_multi_async( + entities: list[type[Model]], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[tasklets_module.Future]: ... +def put_multi( + entities: list[Model], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[key_module.Key]: ... +def delete_multi_async( + keys: list[key_module.Key], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[tasklets_module.Future]: ... +def delete_multi( + keys: Sequence[key_module.Key], + retries: int | None = ..., + timeout: float | None = ..., + deadline: float | None = ..., + use_cache: bool | None = ..., + use_global_cache: bool | None = ..., + global_cache_timeout: int | None = ..., + use_datastore: bool | None = ..., + use_memcache: bool | None = ..., + memcache_timeout: int | None = ..., + max_memcache_items: int | None = ..., + force_writes: bool | None = ..., + _options: object | None = ..., +) -> list[None]: ... +def get_indexes_async(**options: object) -> NoReturn: ... +def get_indexes(**options: object) -> NoReturn: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi new file mode 100644 index 000000000000..2bb3e67d8435 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/msgprop.pyi @@ -0,0 +1,5 @@ +class EnumProperty: + def __init__(self, *args, **kwargs) -> None: ... + +class MessageProperty: + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi new file mode 100644 index 000000000000..43cabd288871 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/polymodel.pyi @@ -0,0 +1,9 @@ +from typing import Any + +from google.cloud.ndb import model + +class _ClassKeyProperty(model.StringProperty): + def __init__(self, name=..., indexed: bool = ...) -> None: ... + +class PolyModel(model.Model): + class_: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi new file mode 100644 index 000000000000..6ca3d6f33976 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/query.pyi @@ -0,0 +1,146 @@ +from typing import Any + +from google.cloud.ndb import _options + +class PropertyOrder: + name: Any + reverse: Any + def __init__(self, name, reverse: bool = ...) -> None: ... + def __neg__(self): ... + +class RepeatedStructuredPropertyPredicate: + name: Any + match_keys: Any + match_values: Any + def __init__(self, name, match_keys, entity_pb) -> None: ... + def __call__(self, entity_pb): ... + +class ParameterizedThing: + def __eq__(self, other): ... + def __ne__(self, other): ... + +class Parameter(ParameterizedThing): + def __init__(self, key) -> None: ... + def __eq__(self, other): ... + @property + def key(self): ... + def resolve(self, bindings, used): ... + +class ParameterizedFunction(ParameterizedThing): + func: Any + values: Any + def __init__(self, func, values) -> None: ... + def __eq__(self, other): ... + def is_parameterized(self): ... + def resolve(self, bindings, used): ... + +class Node: + def __new__(cls): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __le__(self, unused_other): ... + def __lt__(self, unused_other): ... + def __ge__(self, unused_other): ... + def __gt__(self, unused_other): ... + def resolve(self, bindings, used): ... + +class FalseNode(Node): + def __eq__(self, other): ... + +class ParameterNode(Node): + def __new__(cls, prop, op, param): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +class FilterNode(Node): + def __new__(cls, name, opsymbol, value): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + +class PostFilterNode(Node): + def __new__(cls, predicate): ... + def __getnewargs__(self): ... + def __eq__(self, other): ... + +class _BooleanClauses: + name: Any + combine_or: Any + or_parts: Any + def __init__(self, name, combine_or) -> None: ... + def add_node(self, node) -> None: ... + +class ConjunctionNode(Node): + def __new__(cls, *nodes): ... + def __getnewargs__(self): ... + def __iter__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +class DisjunctionNode(Node): + def __new__(cls, *nodes): ... + def __getnewargs__(self): ... + def __iter__(self): ... + def __eq__(self, other): ... + def resolve(self, bindings, used): ... + +AND = ConjunctionNode +OR = DisjunctionNode + +class QueryOptions(_options.ReadOptions): + project: Any + namespace: Any + def __init__(self, config: Any | None = ..., context: Any | None = ..., **kwargs) -> None: ... + +class Query: + default_options: Any + kind: Any + ancestor: Any + filters: Any + order_by: Any + project: Any + namespace: Any + limit: Any + offset: Any + keys_only: Any + projection: Any + distinct_on: Any + def __init__( + self, + kind: Any | None = ..., + filters: Any | None = ..., + ancestor: Any | None = ..., + order_by: Any | None = ..., + orders: Any | None = ..., + project: Any | None = ..., + app: Any | None = ..., + namespace: Any | None = ..., + projection: Any | None = ..., + distinct_on: Any | None = ..., + group_by: Any | None = ..., + limit: Any | None = ..., + offset: Any | None = ..., + keys_only: Any | None = ..., + default_options: Any | None = ..., + ) -> None: ... + @property + def is_distinct(self): ... + def filter(self, *filters): ... + def order(self, *props): ... + def analyze(self): ... + def bind(self, *positional, **keyword): ... + def fetch(self, limit: Any | None = ..., **kwargs): ... + def fetch_async(self, limit: Any | None = ..., **kwargs): ... + def run_to_queue(self, queue, conn, options: Any | None = ..., dsquery: Any | None = ...) -> None: ... + def iter(self, **kwargs): ... + __iter__: Any + def map(self, callback, **kwargs): ... + def map_async(self, callback, **kwargs) -> None: ... + def get(self, **kwargs): ... + def get_async(self, **kwargs) -> None: ... + def count(self, limit: Any | None = ..., **kwargs): ... + def count_async(self, limit: Any | None = ..., **kwargs): ... + def fetch_page(self, page_size, **kwargs): ... + def fetch_page_async(self, page_size, **kwargs) -> None: ... + +def gql(query_string: str, *args: Any, **kwds: Any) -> Query: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi new file mode 100644 index 000000000000..1ffa02ff04de --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/stats.pyi @@ -0,0 +1,102 @@ +from typing import Any + +from google.cloud.ndb import model + +class BaseStatistic(model.Model): + STORED_KIND_NAME: str + bytes: Any + count: Any + timestamp: Any + +class BaseKindStatistic(BaseStatistic): + STORED_KIND_NAME: str + kind_name: Any + entity_bytes: Any + +class GlobalStat(BaseStatistic): + STORED_KIND_NAME: str + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class NamespaceStat(BaseStatistic): + STORED_KIND_NAME: str + subject_namespace: Any + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class KindStat(BaseKindStatistic): + STORED_KIND_NAME: str + builtin_index_bytes: Any + builtin_index_count: Any + composite_index_bytes: Any + composite_index_count: Any + +class KindRootEntityStat(BaseKindStatistic): + STORED_KIND_NAME: str + +class KindNonRootEntityStat(BaseKindStatistic): + STORED_KIND_NAME: str + +class PropertyTypeStat(BaseStatistic): + STORED_KIND_NAME: str + property_type: Any + entity_bytes: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyTypeStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_type: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyNameStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_name: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindPropertyNamePropertyTypeStat(BaseKindStatistic): + STORED_KIND_NAME: str + property_type: Any + property_name: Any + builtin_index_bytes: Any + builtin_index_count: Any + +class KindCompositeIndexStat(BaseStatistic): + STORED_KIND_NAME: str + index_id: Any + kind_name: Any + +class NamespaceGlobalStat(GlobalStat): + STORED_KIND_NAME: str + +class NamespaceKindStat(KindStat): + STORED_KIND_NAME: str + +class NamespaceKindRootEntityStat(KindRootEntityStat): + STORED_KIND_NAME: str + +class NamespaceKindNonRootEntityStat(KindNonRootEntityStat): + STORED_KIND_NAME: str + +class NamespacePropertyTypeStat(PropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyTypeStat(KindPropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyNameStat(KindPropertyNameStat): + STORED_KIND_NAME: str + +class NamespaceKindPropertyNamePropertyTypeStat(KindPropertyNamePropertyTypeStat): + STORED_KIND_NAME: str + +class NamespaceKindCompositeIndexStat(KindCompositeIndexStat): + STORED_KIND_NAME: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi new file mode 100644 index 000000000000..4d9f34d69784 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/tasklets.pyi @@ -0,0 +1,58 @@ +from typing import Any + +class Future: + info: Any + def __init__(self, info: str = ...) -> None: ... + def done(self): ... + def running(self): ... + def wait(self) -> None: ... + def check_success(self) -> None: ... + def set_result(self, result) -> None: ... + def set_exception(self, exception) -> None: ... + def result(self): ... + get_result: Any + def exception(self): ... + get_exception: Any + def get_traceback(self): ... + def add_done_callback(self, callback) -> None: ... + def cancel(self) -> None: ... + def cancelled(self): ... + @staticmethod + def wait_any(futures): ... + @staticmethod + def wait_all(futures): ... + +class _TaskletFuture(Future): + generator: Any + context: Any + waiting_on: Any + def __init__(self, generator, context, info: str = ...) -> None: ... + def cancel(self) -> None: ... + +class _MultiFuture(Future): + def __init__(self, dependencies) -> None: ... + def cancel(self) -> None: ... + +def tasklet(wrapped): ... +def wait_any(futures): ... +def wait_all(futures) -> None: ... + +class Return(Exception): ... + +def sleep(seconds): ... +def add_flow_exception(*args, **kwargs) -> None: ... +def make_context(*args, **kwargs) -> None: ... +def make_default_context(*args, **kwargs) -> None: ... + +class QueueFuture: + def __init__(self, *args, **kwargs) -> None: ... + +class ReducingFuture: + def __init__(self, *args, **kwargs) -> None: ... + +class SerialQueueFuture: + def __init__(self, *args, **kwargs) -> None: ... + +def set_context(*args, **kwargs) -> None: ... +def synctasklet(wrapped): ... +def toplevel(wrapped): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi new file mode 100644 index 000000000000..3ddb96071d22 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/google-cloud-ndb/google/cloud/ndb/utils.pyi @@ -0,0 +1,28 @@ +import threading +from typing import Any + +TRUTHY_STRINGS: Any + +def asbool(value): ... + +DEBUG: Any + +def code_info(*args, **kwargs) -> None: ... +def decorator(*args, **kwargs) -> None: ... +def frame_info(*args, **kwargs) -> None: ... +def func_info(*args, **kwargs) -> None: ... +def gen_info(*args, **kwargs) -> None: ... +def get_stack(*args, **kwargs) -> None: ... +def logging_debug(log, message, *args, **kwargs) -> None: ... + +class keyword_only: + defaults: Any + def __init__(self, **kwargs) -> None: ... + def __call__(self, wrapped): ... + +def positional(max_pos_args): ... + +threading_local = threading.local + +def tweak_logging(*args, **kwargs) -> None: ... +def wrapping(*args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/METADATA.toml new file mode 100644 index 000000000000..4e51482b6fc4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/METADATA.toml @@ -0,0 +1 @@ +version = "2.10.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/__init__.pyi new file mode 100644 index 000000000000..bda5b5a7f4cc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/__init__.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/dbapi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/dbapi.pyi new file mode 100644 index 000000000000..33406fb93e44 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/dbapi.pyi @@ -0,0 +1,115 @@ +import decimal +from _typeshed import ReadableBuffer +from datetime import date, datetime, time +from typing import Any, Sequence, overload +from typing_extensions import Literal + +from .resultrow import ResultRow + +apilevel: str +threadsafety: int +paramstyle: tuple[str, ...] +connect = Connection + +class Connection: + def __init__( + self, + address: str, + port: int, + username: str, + password: str, + autocommit: bool = ..., + packetsize: int | None = ..., + userkey: str | None = ..., + *, + sessionvariables: dict[str, str] | None = ..., + forcebulkfetch: bool | None = ..., + ) -> None: ... + def cancel(self) -> bool: ... + def close(self) -> None: ... + def commit(self) -> None: ... + def cursor(self) -> Cursor: ... + def getaddress(self) -> str: ... + def getautocommit(self) -> bool: ... + def getclientinfo(self, key: str = ...) -> str | dict[str, str]: ... + def isconnected(self) -> bool: ... + def rollback(self) -> None: ... + def setautocommit(self, auto: bool = ...) -> None: ... + def setclientinfo(self, key: str, value: str | None = ...) -> None: ... + +class LOB: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def close(self) -> bool: ... + def find(self, object: str, length: int, position: int = ...) -> int: ... + def read(self, size: int = ..., position: int = ...) -> str | bytes: ... + def write(self, object: str | bytes) -> int: ... + +_Parameters = Sequence[tuple[Any, ...]] + +class Cursor: + description: tuple[tuple[Any, ...], ...] + rowcount: int + statementhash: str | None + connection: Connection + arraysize: int + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def callproc(self, procname: str, parameters: tuple[Any, ...] = ..., overview: bool = ...) -> tuple[Any, ...]: ... + def close(self) -> None: ... + def description_ext(self) -> Sequence[tuple[Any, ...]]: ... + def execute(self, operation: str, parameters: tuple[Any, ...]) -> bool: ... + def executemany(self, operation: str, parameters: _Parameters) -> Any: ... + def executemanyprepared(self, parameters: _Parameters) -> Any: ... + def executeprepared(self, parameters: _Parameters = ...) -> Any: ... + def fetchone(self, uselob: bool = ...) -> ResultRow | None: ... + def fetchall(self) -> list[ResultRow]: ... + def fetchmany(self, size: int | None = ...) -> list[ResultRow]: ... + def get_resultset_holdability(self) -> int: ... + def getwarning(self) -> Warning | None: ... + def haswarning(self) -> bool: ... + def nextset(self) -> None: ... + def parameter_description(self) -> tuple[str, ...]: ... + @overload + def prepare(self, operation: str, newcursor: Literal[True]) -> Cursor: ... + @overload + def prepare(self, operation: str, newcursor: Literal[False]) -> Any: ... + def scroll(self, value: int, mode: Literal["absolute", "relative"] = ...) -> None: ... + def server_cpu_time(self) -> int: ... + def server_memory_usage(self) -> int: ... + def server_processing_time(self) -> int: ... + def setinputsizes(self, *args: Any, **kwargs: Any) -> None: ... + def setfetchsize(self, value: int) -> None: ... + def set_resultset_holdability(self, holdability: int) -> None: ... + def setoutputsize(self, *args: Any, **kwargs: Any) -> None: ... + +class Warning(Exception): + errorcode: int + errortext: str + +class Error(Exception): + errorcode: int + errortext: str + +class DatabaseError(Error): ... +class OperationalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InterfaceError(Error): ... +class InternalError(DatabaseError): ... +class DataError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +def Date(year: int, month: int, day: int) -> date: ... +def Time(hour: int, minute: int, second: int, millisecond: int = ...) -> time: ... +def Timestamp(year: int, month: int, day: int, hour: int, minute: int, second: int, millisecond: int = ...) -> datetime: ... +def DateFromTicks(ticks: float) -> date: ... +def TimeFromTicks(ticks: float) -> time: ... +def TimestampFromTicks(ticks: float) -> datetime: ... +def Binary(data: ReadableBuffer) -> memoryview: ... + +Decimal = decimal.Decimal + +NUMBER: type[int] | type[float] | type[complex] +DATETIME: type[date] | type[time] | type[datetime] +STRING = str +BINARY = memoryview +ROWID = int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/resultrow.pyi b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/resultrow.pyi new file mode 100644 index 000000000000..cf0ee12ecd88 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/hdbcli/hdbcli/resultrow.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class ResultRow: + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + column_names: tuple[str, ...] + column_values: tuple[Any, ...] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml index bad265e4fe3f..c9f594bd7bbd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/METADATA.toml @@ -1 +1 @@ -version = "1.1" +version = "1.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi index cf62e2ca65d9..fd9f6dac7caf 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_tokenizer.pyi @@ -1,10 +1,10 @@ import sys from collections import OrderedDict -from typing import Any, Dict +from typing import Any entitiesTrie: Any if sys.version_info >= (3, 7): - attributeMap = Dict[Any, Any] + attributeMap = dict[Any, Any] else: attributeMap = OrderedDict[Any, Any] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi index c6f85f5a622f..1ea974392438 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/_utils.pyi @@ -1,9 +1,9 @@ from collections.abc import Mapping -from typing import Any, Dict +from typing import Any supports_lone_surrogates: bool -class MethodDispatcher(Dict[Any, Any]): +class MethodDispatcher(dict[Any, Any]): default: Any def __init__(self, items=...) -> None: ... def __getitem__(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treebuilders/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treebuilders/base.pyi index 12e89bb296d4..8c73d5257666 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treebuilders/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/html5lib/html5lib/treebuilders/base.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any Marker: Any listElementsMap: Any @@ -18,7 +18,7 @@ class Node: def cloneNode(self) -> None: ... def hasContent(self) -> None: ... -class ActiveFormattingElements(List[Any]): +class ActiveFormattingElements(list[Any]): def append(self, node) -> None: ... def nodesEqual(self, node1, node2): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/httplib2/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/httplib2/METADATA.toml index 7918a438b96b..ae1fb69ffa18 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/httplib2/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/httplib2/METADATA.toml @@ -1 +1 @@ -version = "0.19" +version = "0.20.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/httplib2/httplib2/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/httplib2/httplib2/__init__.pyi index 7813440b4578..f1da1887fee3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/httplib2/httplib2/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/httplib2/httplib2/__init__.pyi @@ -1,6 +1,6 @@ import http.client from collections.abc import Generator -from typing import Any, Dict, TypeVar +from typing import Any, TypeVar from .error import * @@ -79,8 +79,8 @@ class Credentials: def iter(self, domain) -> Generator[tuple[str, str], None, None]: ... class KeyCerts(Credentials): - def add(self, key, cert, domain, password) -> None: ... # type: ignore - def iter(self, domain) -> Generator[tuple[str, str, str], None, None]: ... # type: ignore + def add(self, key, cert, domain, password) -> None: ... # type: ignore[override] + def iter(self, domain) -> Generator[tuple[str, str, str], None, None]: ... # type: ignore[override] class AllHosts: ... @@ -175,7 +175,7 @@ class Http: connection_type: Any | None = ..., ): ... -class Response(Dict[str, Any]): +class Response(dict[str, Any]): fromcache: bool version: int status: int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/METADATA.toml index 27a0042d11dd..a6930073a3ac 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/METADATA.toml @@ -1 +1 @@ -version = "9.2" +version = "9.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/__init__.pyi index 1e68777186ad..6630b1a4ad84 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/__init__.pyi @@ -1,32 +1,33 @@ -from typing import Any, NamedTuple +import datetime +from typing import Any, NamedTuple, Pattern class SizeUnit(NamedTuple): - divider: Any - symbol: Any - name: Any + divider: int + symbol: str + name: str class CombinedUnit(NamedTuple): - decimal: Any - binary: Any + decimal: SizeUnit + binary: SizeUnit disk_size_units: Any length_size_units: Any time_units: Any -def coerce_boolean(value): ... -def coerce_pattern(value, flags: int = ...): ... -def coerce_seconds(value): ... -def format_size(num_bytes, keep_width: bool = ..., binary: bool = ...): ... -def parse_size(size, binary: bool = ...): ... -def format_length(num_metres, keep_width: bool = ...): ... -def parse_length(length): ... -def format_number(number, num_decimals: int = ...): ... -def round_number(count, keep_width: bool = ...): ... -def format_timespan(num_seconds, detailed: bool = ..., max_units: int = ...): ... -def parse_timespan(timespan): ... -def parse_date(datestring): ... -def format_path(pathname): ... -def parse_path(pathname): ... +def coerce_boolean(value: object) -> bool: ... +def coerce_pattern(value: str | Pattern[str], flags: int = ...) -> Pattern[str]: ... +def coerce_seconds(value: float | datetime.timedelta) -> float: ... +def format_size(num_bytes: float, keep_width: bool = ..., binary: bool = ...) -> str: ... +def parse_size(size: str, binary: bool = ...) -> int: ... +def format_length(num_metres: float, keep_width: bool = ...) -> str: ... +def parse_length(length: str) -> float: ... +def format_number(number: float, num_decimals: int = ...) -> str: ... +def round_number(count: float, keep_width: bool = ...) -> str: ... +def format_timespan(num_seconds: float | datetime.timedelta, detailed: bool = ..., max_units: int = ...) -> str: ... +def parse_timespan(timespan: str) -> float: ... +def parse_date(datestring: str) -> tuple[int, int, int, int, int, int]: ... +def format_path(pathname: str) -> str: ... +def parse_path(pathname: str) -> str: ... class Timer: monotonic: bool @@ -36,7 +37,7 @@ class Timer: def __init__(self, start_time: Any | None = ..., resumable: bool = ...) -> None: ... def __enter__(self): ... def __exit__(self, exc_type: Any | None = ..., exc_value: Any | None = ..., traceback: Any | None = ...) -> None: ... - def sleep(self, seconds) -> None: ... + def sleep(self, seconds: float) -> None: ... @property def elapsed_time(self): ... @property diff --git a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/case.pyi b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/case.pyi index b3e276725064..2dc151869845 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/case.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/humanfriendly/humanfriendly/case.pyi @@ -11,7 +11,7 @@ class CaseInsensitiveDict(collections.OrderedDict): def get(self, key, default: Any | None = ...): ... def pop(self, key, default: Any | None = ...): ... def setdefault(self, key, default: Any | None = ...): ... - def update(self, other: Any | None = ..., **kw) -> None: ... # type: ignore + def update(self, other: Any | None = ..., **kw) -> None: ... # type: ignore[override] def __contains__(self, key): ... def __delitem__(self, key): ... def __getitem__(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/invoke/METADATA.toml new file mode 100644 index 000000000000..6cf9fae44d92 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/METADATA.toml @@ -0,0 +1 @@ +version = "1.6.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/__init__.pyi new file mode 100644 index 000000000000..6bf0e8a102fc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/__init__.pyi @@ -0,0 +1,37 @@ +from typing import Any + +from .collection import Collection as Collection +from .config import Config as Config +from .context import Context as Context, MockContext as MockContext +from .exceptions import ( + AmbiguousEnvVar as AmbiguousEnvVar, + AuthFailure as AuthFailure, + CollectionNotFound as CollectionNotFound, + CommandTimedOut as CommandTimedOut, + Exit as Exit, + Failure as Failure, + ParseError as ParseError, + PlatformError as PlatformError, + ResponseNotAccepted as ResponseNotAccepted, + SubprocessPipeError as SubprocessPipeError, + ThreadException as ThreadException, + UncastableEnvVar as UncastableEnvVar, + UnexpectedExit as UnexpectedExit, + UnknownFileType as UnknownFileType, + UnpicklableConfigMember as UnpicklableConfigMember, + WatcherError as WatcherError, +) +from .executor import Executor as Executor +from .loader import FilesystemLoader as FilesystemLoader +from .parser import Argument as Argument, Parser as Parser, ParserContext as ParserContext, ParseResult as ParseResult +from .program import Program as Program +from .runners import Local as Local, Promise as Promise, Result as Result, Runner as Runner +from .tasks import Call as Call, Task as Task, call as call, task as task +from .terminals import pty_size as pty_size +from .watchers import FailingResponder as FailingResponder, Responder as Responder, StreamWatcher as StreamWatcher + +__version_info__: tuple[int, int, int] +__version__: str + +def run(command: str, **kwargs: Any) -> Result: ... +def sudo(command: str, **kwargs: Any) -> Result: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/collection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/collection.pyi new file mode 100644 index 000000000000..c1f0744fad95 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/collection.pyi @@ -0,0 +1,24 @@ +from typing import Any + +class Collection: + tasks: Any + collections: Any + default: str | None + name: str | None + loaded_from: Any + auto_dash_names: bool + def __init__(self, *args, **kwargs) -> None: ... + @classmethod + def from_module(cls, module, name=..., config=..., loaded_from=..., auto_dash_names=...): ... + def add_task(self, task, name=..., aliases=..., default=...) -> None: ... + def add_collection(self, coll, name=..., default=...) -> None: ... + def subcollection_from_path(self, path): ... + def task_with_config(self, name): ... + def to_contexts(self): ... + def subtask_name(self, collection_name, task_name): ... + def transform(self, name): ... + @property + def task_names(self): ... + def configuration(self, taskpath=...): ... + def configure(self, options) -> None: ... + def serialized(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/completion/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/completion/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/completion/complete.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/completion/complete.pyi new file mode 100644 index 000000000000..a59c2c44e42f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/completion/complete.pyi @@ -0,0 +1,8 @@ +from typing import Iterable, NoReturn, Sequence + +from ..collection import Collection +from ..parser import ParserContext, ParseResult + +def complete(names: Iterable[str], core: ParseResult, initial_context: ParserContext, collection: Collection) -> NoReturn: ... +def print_task_names(collection: Collection) -> None: ... +def print_completion_script(shell: str, names: Sequence[str]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/config.pyi new file mode 100644 index 000000000000..c64180eb28f4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/config.pyi @@ -0,0 +1,60 @@ +from typing import Any + +def load_source(name: str, path: str) -> dict[str, Any]: ... + +class DataProxy: + @classmethod + def from_data(cls, data, root=..., keypath=...): ... + def __getattr__(self, key): ... + def __setattr__(self, key, value) -> None: ... + def __iter__(self): ... + def __eq__(self, other): ... + __hash__: Any + def __len__(self): ... + def __setitem__(self, key, value) -> None: ... + def __getitem__(self, key): ... + def __contains__(self, key): ... + def __delitem__(self, key) -> None: ... + def __delattr__(self, name) -> None: ... + def clear(self) -> None: ... + def pop(self, *args): ... + def popitem(self): ... + def setdefault(self, *args): ... + def update(self, *args, **kwargs) -> None: ... + +class Config(DataProxy): + prefix: str + file_prefix: Any + env_prefix: Any + @staticmethod + def global_defaults(): ... + def __init__( + self, + overrides=..., + defaults=..., + system_prefix=..., + user_prefix=..., + project_location=..., + runtime_path=..., + lazy: bool = ..., + ) -> None: ... + def load_base_conf_files(self) -> None: ... + def load_defaults(self, data, merge: bool = ...) -> None: ... + def load_overrides(self, data, merge: bool = ...) -> None: ... + def load_system(self, merge: bool = ...) -> None: ... + def load_user(self, merge: bool = ...) -> None: ... + def load_project(self, merge: bool = ...) -> None: ... + def set_runtime_path(self, path) -> None: ... + def load_runtime(self, merge: bool = ...) -> None: ... + def load_shell_env(self) -> None: ... + def load_collection(self, data, merge: bool = ...) -> None: ... + def set_project_location(self, path) -> None: ... + def merge(self) -> None: ... + def clone(self, into=...): ... + +class AmbiguousMergeError(ValueError): ... + +def merge_dicts(base, updates): ... +def copy_dict(source): ... +def excise(dict_, keypath) -> None: ... +def obliterate(base, deletions) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/context.pyi new file mode 100644 index 000000000000..b8013b1094b7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/context.pyi @@ -0,0 +1,22 @@ +from contextlib import AbstractContextManager + +from .config import Config, DataProxy + +class Context(DataProxy): + def __init__(self, config: Config | None = ...) -> None: ... + @property + def config(self) -> Config: ... + @config.setter + def config(self, value: Config) -> None: ... + def run(self, command: str, **kwargs): ... + def sudo(self, command: str, *, password: str = ..., user: str = ..., **kwargs): ... + def prefix(self, command: str) -> AbstractContextManager[None]: ... + @property + def cwd(self) -> str: ... + def cd(self, path: str) -> AbstractContextManager[None]: ... + +class MockContext(Context): + def __init__(self, config: Config | None = ..., **kwargs) -> None: ... + def run(self, command: str, *args, **kwargs): ... + def sudo(self, command: str, *args, **kwargs): ... + def set_result_for(self, attname, command, result) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/env.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/env.pyi new file mode 100644 index 000000000000..38ffc67215d2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/env.pyi @@ -0,0 +1,6 @@ +from typing import Any + +class Environment: + data: Any + def __init__(self, config, prefix) -> None: ... + def load(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/exceptions.pyi new file mode 100644 index 000000000000..437cbc935bfb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/exceptions.pyi @@ -0,0 +1,47 @@ +from typing import Any + +class CollectionNotFound(Exception): + name: Any + start: Any + def __init__(self, name, start) -> None: ... + +class Failure(Exception): + result: Any + reason: Any + def __init__(self, result, reason=...) -> None: ... + def streams_for_display(self): ... + +class UnexpectedExit(Failure): ... + +class CommandTimedOut(Failure): + timeout: Any + def __init__(self, result, timeout) -> None: ... + +class AuthFailure(Failure): + result: Any + prompt: Any + def __init__(self, result, prompt) -> None: ... + +class ParseError(Exception): + context: Any + def __init__(self, msg, context=...) -> None: ... + +class Exit(Exception): + message: Any + def __init__(self, message=..., code=...) -> None: ... + @property + def code(self): ... + +class PlatformError(Exception): ... +class AmbiguousEnvVar(Exception): ... +class UncastableEnvVar(Exception): ... +class UnknownFileType(Exception): ... +class UnpicklableConfigMember(Exception): ... + +class ThreadException(Exception): + exceptions: Any + def __init__(self, exceptions) -> None: ... + +class WatcherError(Exception): ... +class ResponseNotAccepted(WatcherError): ... +class SubprocessPipeError(Exception): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/executor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/executor.pyi new file mode 100644 index 000000000000..7d8852cf29a1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/executor.pyi @@ -0,0 +1,16 @@ +from typing import Any, Iterable + +from .collection import Collection +from .config import Config +from .parser import ParserContext, ParseResult +from .tasks import Call, Task + +class Executor: + collection: Collection + config: Config + core: ParseResult | None + def __init__(self, collection: Collection, config: Config | None = ..., core: ParseResult | None = ...) -> None: ... + def execute(self, *tasks: str | tuple[str, dict[str, Any]] | ParserContext) -> dict[Task, Any]: ... + def normalize(self, tasks: Iterable[str | tuple[str, dict[str, Any]] | ParserContext]): ... + def dedupe(self, calls: Iterable[Call]) -> list[Call]: ... + def expand_calls(self, calls: Iterable[Call | Task]) -> list[Call]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/loader.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/loader.pyi new file mode 100644 index 000000000000..b339d5de3540 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/loader.pyi @@ -0,0 +1,15 @@ +from types import ModuleType +from typing import IO, Any + +from . import Config + +class Loader: + config: Config + def __init__(self, config: Config | None = ...) -> None: ... + def find(self, name: str) -> tuple[str, IO[Any], str, tuple[str, str, int]]: ... + def load(self, name: str | None = ...) -> tuple[ModuleType, str]: ... + +class FilesystemLoader(Loader): + def __init__(self, start: str | None = ..., **kwargs: Any) -> None: ... + @property + def start(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/main.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/main.pyi new file mode 100644 index 000000000000..d05ea6428b31 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/main.pyi @@ -0,0 +1,3 @@ +from . import Program + +program: Program diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/__init__.pyi new file mode 100644 index 000000000000..86a533969f35 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/__init__.pyi @@ -0,0 +1,3 @@ +from .argument import Argument as Argument +from .context import ParserContext as ParserContext, to_flag as to_flag, translate_underscores as translate_underscores +from .parser import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/argument.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/argument.pyi new file mode 100644 index 000000000000..1fd21823f07b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/argument.pyi @@ -0,0 +1,37 @@ +from typing import Any + +class Argument: + names: Any + kind: Any + raw_value: Any + default: Any + help: Any + positional: Any + optional: Any + incrementable: Any + attr_name: Any + def __init__( + self, + name=..., + names=..., + kind=..., + default=..., + help=..., + positional: bool = ..., + optional: bool = ..., + incrementable: bool = ..., + attr_name=..., + ) -> None: ... + @property + def name(self): ... + @property + def nicknames(self): ... + @property + def takes_value(self): ... + @property + def value(self): ... + @value.setter + def value(self, arg) -> None: ... + def set_value(self, value, cast: bool = ...): ... + @property + def got_value(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/context.pyi new file mode 100644 index 000000000000..80cc56a57a58 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/context.pyi @@ -0,0 +1,24 @@ +from typing import Any + +def translate_underscores(name: str) -> str: ... +def to_flag(name: str) -> str: ... +def sort_candidate(arg): ... +def flag_key(x): ... + +class ParserContext: + args: Any + positional_args: Any + flags: Any + inverse_flags: Any + name: Any + aliases: Any + def __init__(self, name=..., aliases=..., args=...) -> None: ... + def add_arg(self, *args, **kwargs) -> None: ... + @property + def missing_positional_args(self): ... + @property + def as_kwargs(self): ... + def names_for(self, flag): ... + def help_for(self, flag): ... + def help_tuples(self): ... + def flag_names(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/parser.pyi new file mode 100644 index 000000000000..2f836db1a8de --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/parser/parser.pyi @@ -0,0 +1,42 @@ +from typing import Any + +from .context import ParserContext + +def is_flag(value: str) -> bool: ... +def is_long_flag(value: str) -> bool: ... + +class Parser: + initial: Any + contexts: Any + ignore_unknown: Any + def __init__(self, contexts=..., initial=..., ignore_unknown: bool = ...) -> None: ... + def parse_argv(self, argv): ... + +class ParseMachine: + initial_state: str + def changing_state(self, from_, to) -> None: ... + ignore_unknown: Any + initial: Any + flag: Any + flag_got_value: bool + result: Any + contexts: Any + def __init__(self, initial, contexts, ignore_unknown) -> None: ... + @property + def waiting_for_flag_value(self): ... + def handle(self, token) -> None: ... + def store_only(self, token) -> None: ... + def complete_context(self) -> None: ... + context: Any + def switch_to_context(self, name) -> None: ... + def complete_flag(self) -> None: ... + def check_ambiguity(self, value): ... + def switch_to_flag(self, flag, inverse: bool = ...) -> None: ... + def see_value(self, value) -> None: ... + def see_positional_arg(self, value) -> None: ... + def error(self, msg) -> None: ... + +class ParseResult(list[ParserContext]): + remainder: str + unparsed: Any + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/program.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/program.pyi new file mode 100644 index 000000000000..280458237e13 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/program.pyi @@ -0,0 +1,71 @@ +from typing import Any + +class Program: + def core_args(self): ... + def task_args(self): ... + leading_indent_width: int + leading_indent: str + indent_width: int + indent: str + col_padding: int + version: Any + namespace: Any + argv: Any + loader_class: Any + executor_class: Any + config_class: Any + def __init__( + self, + version=..., + namespace=..., + name=..., + binary=..., + loader_class=..., + executor_class=..., + config_class=..., + binary_names=..., + ) -> None: ... + config: Any + def create_config(self) -> None: ... + def update_config(self, merge: bool = ...) -> None: ... + def run(self, argv=..., exit: bool = ...) -> None: ... + def parse_core(self, argv) -> None: ... + collection: Any + list_root: Any + list_depth: Any + list_format: str + scoped_collection: Any + def parse_collection(self) -> None: ... + def parse_cleanup(self) -> None: ... + def no_tasks_given(self) -> None: ... + def execute(self) -> None: ... + def normalize_argv(self, argv) -> None: ... + @property + def name(self): ... + @property + def called_as(self): ... + @property + def binary(self): ... + @property + def binary_names(self): ... + @property + def args(self): ... + @property + def initial_context(self): ... + def print_version(self) -> None: ... + def print_help(self) -> None: ... + core: Any + def parse_core_args(self) -> None: ... + def load_collection(self) -> None: ... + parser: Any + core_via_tasks: Any + tasks: Any + def parse_tasks(self) -> None: ... + def print_task_help(self, name) -> None: ... + def list_tasks(self) -> None: ... + def list_flat(self) -> None: ... + def list_nested(self) -> None: ... + def list_json(self) -> None: ... + def task_list_opener(self, extra: str = ...): ... + def display_with_columns(self, pairs, extra: str = ...) -> None: ... + def print_columns(self, tuples) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/runners.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/runners.pyi new file mode 100644 index 000000000000..fd198395e317 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/runners.pyi @@ -0,0 +1,94 @@ +from typing import Any +from typing_extensions import Literal + +class Runner: + read_chunk_size: int + input_sleep: float + context: Any + program_finished: Any + warned_about_pty_fallback: bool + watchers: Any + def __init__(self, context) -> None: ... + def run(self, command, **kwargs): ... + def echo(self, command) -> None: ... + def make_promise(self): ... + def create_io_threads(self): ... + def generate_result(self, **kwargs): ... + def read_proc_output(self, reader) -> None: ... + def write_our_output(self, stream, string) -> None: ... + def handle_stdout(self, buffer_, hide, output) -> None: ... + def handle_stderr(self, buffer_, hide, output) -> None: ... + def read_our_stdin(self, input_): ... + def handle_stdin(self, input_, output, echo) -> None: ... + def should_echo_stdin(self, input_, output): ... + def respond(self, buffer_) -> None: ... + def generate_env(self, env, replace_env): ... + def should_use_pty(self, pty, fallback): ... + @property + def has_dead_threads(self): ... + def wait(self) -> None: ... + def write_proc_stdin(self, data) -> None: ... + def decode(self, data): ... + @property + def process_is_finished(self) -> None: ... + def start(self, command, shell, env) -> None: ... + def start_timer(self, timeout) -> None: ... + def read_proc_stdout(self, num_bytes) -> None: ... + def read_proc_stderr(self, num_bytes) -> None: ... + def close_proc_stdin(self) -> None: ... + def default_encoding(self): ... + def send_interrupt(self, interrupt) -> None: ... + def returncode(self) -> None: ... + def stop(self) -> None: ... + def stop_timer(self) -> None: ... + def kill(self) -> None: ... + @property + def timed_out(self): ... + +class Local(Runner): + status: Any + def __init__(self, context) -> None: ... + def should_use_pty(self, pty: bool = ..., fallback: bool = ...): ... + process: Any + +class Result: + stdout: str + stderr: str + encoding: str + command: str + shell: Any + env: dict[str, Any] + exited: int + pty: bool + hide: tuple[Literal["stdout", "stderr"], ...] + def __init__( + self, + stdout: str = ..., + stderr: str = ..., + encoding: str | None = ..., + command: str = ..., + shell: str = ..., + env=..., + exited: int = ..., + pty: bool = ..., + hide: tuple[Literal["stdout", "stderr"], ...] = ..., + ) -> None: ... + @property + def return_code(self) -> int: ... + def __nonzero__(self) -> bool: ... + def __bool__(self) -> bool: ... + @property + def ok(self) -> bool: ... + @property + def failed(self) -> bool: ... + def tail(self, stream: Literal["stderr", "stdout"], count: int = ...) -> str: ... + +class Promise(Result): + runner: Any + def __init__(self, runner) -> None: ... + def join(self): ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + +def normalize_hide(val, out_stream=..., err_stream=...): ... +def default_encoding() -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/tasks.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/tasks.pyi new file mode 100644 index 000000000000..cc20ae2ff469 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/tasks.pyi @@ -0,0 +1,72 @@ +from _typeshed import Self +from typing import Any + +from .config import Config +from .context import Context + +NO_DEFAULT: object + +class Task: + body: Any + __doc__: str + __name__: str + __module__: Any + aliases: Any + is_default: bool + positional: Any + optional: Any + iterable: Any + incrementable: Any + auto_shortflags: Any + help: Any + pre: Any + post: Any + times_called: int + autoprint: Any + def __init__( + self, + body, + name=..., + aliases=..., + positional=..., + optional=..., + default: bool = ..., + auto_shortflags: bool = ..., + help=..., + pre=..., + post=..., + autoprint: bool = ..., + iterable=..., + incrementable=..., + ) -> None: ... + @property + def name(self): ... + def __eq__(self, other): ... + def __hash__(self): ... + def __call__(self, *args, **kwargs): ... + @property + def called(self): ... + def argspec(self, body): ... + def fill_implicit_positionals(self, positional): ... + def arg_opts(self, name, default, taken_names): ... + def get_arguments(self): ... + +def task(*args, **kwargs) -> Task: ... + +class Call: + task: Task + called_as: str | None + args: tuple[Any, ...] + kwargs: dict[str, Any] + def __init__( + self, task: Task, called_as: str | None = ..., args: tuple[Any, ...] | None = ..., kwargs: dict[str, Any] | None = ... + ) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __deepcopy__(self: Self, memo: Any) -> Self: ... + def __eq__(self, other: Call) -> bool: ... # type: ignore[override] + def make_context(self, config: Config) -> Context: ... + def clone_data(self): ... + # TODO use overload + def clone(self, into: type[Call] | None = ..., with_: dict[str, Any] | None = ...) -> Call: ... + +def call(task: Task, *args: Any, **kwargs: Any) -> Call: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/terminals.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/terminals.pyi new file mode 100644 index 000000000000..611954043a91 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/terminals.pyi @@ -0,0 +1,12 @@ +from contextlib import AbstractContextManager +from io import TextIOWrapper +from typing import Any + +WINDOWS: bool + +def pty_size() -> tuple[int, int]: ... +def stdin_is_foregrounded_tty(stream: Any) -> bool: ... +def cbreak_already_set(stream: TextIOWrapper) -> bool: ... +def character_buffered(stream: TextIOWrapper) -> AbstractContextManager[None]: ... +def ready_for_reading(input_: TextIOWrapper) -> bool: ... +def bytes_to_read(input_: TextIOWrapper) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/util.pyi new file mode 100644 index 000000000000..f99b779304cd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/util.pyi @@ -0,0 +1,39 @@ +import threading +from contextlib import AbstractContextManager +from logging import Logger +from types import TracebackType +from typing import Any, Callable, Iterable, Mapping, NamedTuple + +LOG_FORMAT: str + +def enable_logging() -> None: ... + +log: Logger + +def task_name_sort_key(name: str) -> tuple[list[str], str]: ... +def cd(where: str) -> AbstractContextManager[None]: ... +def has_fileno(stream) -> bool: ... +def isatty(stream) -> bool: ... +def encode_output(string: str, encoding: str) -> str: ... +def helpline(obj: Callable[..., Any]) -> str | None: ... + +class ExceptionHandlingThread(threading.Thread): + def __init__( + self, + *, + group: None = ..., + target: Callable[..., Any] | None = ..., + name: str | None = ..., + args: Iterable[Any] = ..., + kwargs: Mapping[str, Any] | None = ..., + daemon: bool | None = ..., + ) -> None: ... + def exception(self) -> ExceptionWrapper | None: ... + @property + def is_dead(self) -> bool: ... + +class ExceptionWrapper(NamedTuple): + kwargs: Any + type: type[BaseException] + value: BaseException + traceback: TracebackType diff --git a/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/watchers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/watchers.pyi new file mode 100644 index 000000000000..d1da58a97725 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/invoke/invoke/watchers.pyi @@ -0,0 +1,20 @@ +import threading +from typing import Iterable + +class StreamWatcher(threading.local): + def submit(self, stream) -> Iterable[str]: ... + +class Responder(StreamWatcher): + pattern: str + response: str + index: int + def __init__(self, pattern: str, response: str) -> None: ... + def pattern_matches(self, stream: str, pattern: str, index_attr: str) -> Iterable[str]: ... + def submit(self, stream: str) -> Iterable[str]: ... + +class FailingResponder(Responder): + sentinel: str + failure_index: int + tried: bool + def __init__(self, pattern: str, response: str, sentinel: str) -> None: ... + def submit(self, stream: str) -> Iterable[str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/METADATA.toml deleted file mode 100644 index 2266533dffdd..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/METADATA.toml +++ /dev/null @@ -1,3 +0,0 @@ -version = "1.1" -python2 = true -obsolete_since = "2.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/itsdangerous.pyi b/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/itsdangerous.pyi deleted file mode 100644 index 17e1ba1ead3c..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/itsdangerous/itsdangerous.pyi +++ /dev/null @@ -1,174 +0,0 @@ -from datetime import datetime -from typing import IO, Any, Callable, Generator, Mapping, MutableMapping, Text, Tuple - -_serializer = Any # must be an object that has "dumps" and "loads" attributes (e.g. the json module) - -def want_bytes(s: Text | bytes, encoding: Text = ..., errors: Text = ...) -> bytes: ... - -class BadData(Exception): - message: str - def __init__(self, message: str) -> None: ... - -class BadPayload(BadData): - original_error: Exception | None - def __init__(self, message: str, original_error: Exception | None = ...) -> None: ... - -class BadSignature(BadData): - payload: Any | None - def __init__(self, message: str, payload: Any | None = ...) -> None: ... - -class BadTimeSignature(BadSignature): - date_signed: int | None - def __init__(self, message: str, payload: Any | None = ..., date_signed: int | None = ...) -> None: ... - -class BadHeader(BadSignature): - header: Any - original_error: Any - def __init__( - self, message: str, payload: Any | None = ..., header: Any | None = ..., original_error: Any | None = ... - ) -> None: ... - -class SignatureExpired(BadTimeSignature): ... - -def base64_encode(string: Text | bytes) -> bytes: ... -def base64_decode(string: Text | bytes) -> bytes: ... - -class SigningAlgorithm(object): - def get_signature(self, key: bytes, value: bytes) -> bytes: ... - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: ... - -class NoneAlgorithm(SigningAlgorithm): - def get_signature(self, key: bytes, value: bytes) -> bytes: ... - -class HMACAlgorithm(SigningAlgorithm): - default_digest_method: Callable[..., Any] - digest_method: Callable[..., Any] - def __init__(self, digest_method: Callable[..., Any] | None = ...) -> None: ... - def get_signature(self, key: bytes, value: bytes) -> bytes: ... - -class Signer(object): - default_digest_method: Callable[..., Any] = ... - default_key_derivation: str = ... - - secret_key: bytes - sep: bytes - salt: Text | bytes - key_derivation: str - digest_method: Callable[..., Any] - algorithm: SigningAlgorithm - def __init__( - self, - secret_key: Text | bytes, - salt: Text | bytes | None = ..., - sep: Text | bytes | None = ..., - key_derivation: str | None = ..., - digest_method: Callable[..., Any] | None = ..., - algorithm: SigningAlgorithm | None = ..., - ) -> None: ... - def derive_key(self) -> bytes: ... - def get_signature(self, value: Text | bytes) -> bytes: ... - def sign(self, value: Text | bytes) -> bytes: ... - def verify_signature(self, value: bytes, sig: Text | bytes) -> bool: ... - def unsign(self, signed_value: Text | bytes) -> bytes: ... - def validate(self, signed_value: Text | bytes) -> bool: ... - -class TimestampSigner(Signer): - def get_timestamp(self) -> int: ... - def timestamp_to_datetime(self, ts: float) -> datetime: ... - def sign(self, value: Text | bytes) -> bytes: ... - def unsign( - self, value: Text | bytes, max_age: int | None = ..., return_timestamp: bool = ... - ) -> Any: ... # morally -> bytes | Tuple[bytes, datetime] - def validate(self, signed_value: Text | bytes, max_age: int | None = ...) -> bool: ... - -class Serializer(object): - default_serializer: _serializer = ... - default_signer: Callable[..., Signer] = ... - - secret_key: bytes - salt: bytes - serializer: _serializer - is_text_serializer: bool - signer: Callable[..., Signer] - signer_kwargs: MutableMapping[str, Any] - def __init__( - self, - secret_key: Text | bytes, - salt: Text | bytes | None = ..., - serializer: _serializer | None = ..., - signer: Callable[..., Signer] | None = ..., - signer_kwargs: MutableMapping[str, Any] | None = ..., - ) -> None: ... - def load_payload(self, payload: bytes, serializer: _serializer | None = ...) -> Any: ... - def dump_payload(self, obj: Any) -> bytes: ... - def make_signer(self, salt: Text | bytes | None = ...) -> Signer: ... - def iter_unsigners(self, salt: Text | bytes | None = ...) -> Generator[Any, None, None]: ... - def dumps(self, obj: Any, salt: Text | bytes | None = ...) -> Any: ... # morally -> str | bytes - def dump(self, obj: Any, f: IO[Any], salt: Text | bytes | None = ...) -> None: ... - def loads(self, s: Text | bytes, salt: Text | bytes | None = ...) -> Any: ... - def load(self, f: IO[Any], salt: Text | bytes | None = ...) -> Any: ... - def loads_unsafe(self, s: Text | bytes, salt: Text | bytes | None = ...) -> Tuple[bool, Any | None]: ... - def load_unsafe(self, f: IO[Any], salt: Text | bytes | None = ...) -> Tuple[bool, Any | None]: ... - -class TimedSerializer(Serializer): - def loads( - self, s: Text | bytes, salt: Text | bytes | None = ..., max_age: int | None = ..., return_timestamp: bool = ... - ) -> Any: ... # morally -> Any | Tuple[Any, datetime] - def loads_unsafe(self, s: Text | bytes, salt: Text | bytes | None = ..., max_age: int | None = ...) -> Tuple[bool, Any]: ... - -class JSONWebSignatureSerializer(Serializer): - jws_algorithms: MutableMapping[Text, SigningAlgorithm] = ... - default_algorithm: Text = ... - default_serializer: Any = ... - - algorithm_name: Text - algorithm: SigningAlgorithm - def __init__( - self, - secret_key: Text | bytes, - salt: Text | bytes | None = ..., - serializer: _serializer | None = ..., - signer: Callable[..., Signer] | None = ..., - signer_kwargs: MutableMapping[str, Any] | None = ..., - algorithm_name: Text | None = ..., - ) -> None: ... - def load_payload( - self, payload: Text | bytes, serializer: _serializer | None = ..., return_header: bool = ... - ) -> Any: ... # morally -> Any | Tuple[Any, MutableMapping[str, Any]] - def dump_payload(self, header: Mapping[str, Any], obj: Any) -> bytes: ... # type: ignore - def make_algorithm(self, algorithm_name: Text) -> SigningAlgorithm: ... - def make_signer(self, salt: Text | bytes | None = ..., algorithm: SigningAlgorithm = ...) -> Signer: ... - def make_header(self, header_fields: Mapping[str, Any] | None) -> MutableMapping[str, Any]: ... - def dumps(self, obj: Any, salt: Text | bytes | None = ..., header_fields: Mapping[str, Any] | None = ...) -> bytes: ... - def loads( - self, s: Text | bytes, salt: Text | bytes | None = ..., return_header: bool = ... - ) -> Any: ... # morally -> Any | Tuple[Any, MutableMapping[str, Any]] - def loads_unsafe(self, s: Text | bytes, salt: Text | bytes | None = ..., return_header: bool = ...) -> Tuple[bool, Any]: ... - -class TimedJSONWebSignatureSerializer(JSONWebSignatureSerializer): - DEFAULT_EXPIRES_IN: int = ... - expires_in: int - def __init__( - self, - secret_key: Text | bytes, - expires_in: int | None = ..., - salt: Text | bytes | None = ..., - serializer: _serializer | None = ..., - signer: Callable[..., Signer] | None = ..., - signer_kwargs: MutableMapping[str, Any] | None = ..., - algorithm_name: Text | None = ..., - ) -> None: ... - def make_header(self, header_fields: Mapping[str, Any] | None) -> MutableMapping[str, Any]: ... - def loads( - self, s: Text | bytes, salt: Text | bytes | None = ..., return_header: bool = ... - ) -> Any: ... # morally -> Any | Tuple[Any, MutableMapping[str, Any]] - def get_issue_date(self, header: Mapping[str, Any]) -> datetime | None: ... - def now(self) -> int: ... - -class _URLSafeSerializerMixin(object): - default_serializer: _serializer = ... - def load_payload(self, payload: bytes, serializer: _serializer | None = ...) -> Any: ... - def dump_payload(self, obj: Any) -> bytes: ... - -class URLSafeSerializer(_URLSafeSerializerMixin, Serializer): ... -class URLSafeTimedSerializer(_URLSafeSerializerMixin, TimedSerializer): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jmespath/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/jmespath/METADATA.toml index 3d8655e7d6a3..5c7ed21e8ad5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jmespath/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/jmespath/METADATA.toml @@ -1 +1 @@ -version = "0.10" +version = "0.10.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/METADATA.toml index ffc5a1c5e98b..2e800d80f5da 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/METADATA.toml @@ -1 +1 @@ -version = "3.2" +version = "4.4.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/__init__.pyi index 87b8e96f56b7..dcb925bc6032 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/__init__.pyi @@ -4,6 +4,8 @@ from jsonschema._format import ( draft4_format_checker as draft4_format_checker, draft6_format_checker as draft6_format_checker, draft7_format_checker as draft7_format_checker, + draft201909_format_checker as draft201909_format_checker, + draft202012_format_checker as draft202012_format_checker, ) from jsonschema._types import TypeChecker as TypeChecker from jsonschema.exceptions import ( @@ -13,11 +15,14 @@ from jsonschema.exceptions import ( SchemaError as SchemaError, ValidationError as ValidationError, ) +from jsonschema.protocols import Validator as Validator from jsonschema.validators import ( Draft3Validator as Draft3Validator, Draft4Validator as Draft4Validator, Draft6Validator as Draft6Validator, Draft7Validator as Draft7Validator, + Draft201909Validator as Draft201909Validator, + Draft202012Validator as Draft202012Validator, RefResolver as RefResolver, validate as validate, ) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_format.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_format.pyi index fdab126bb1fa..492800ab5190 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_format.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_format.pyi @@ -1,35 +1,42 @@ -from typing import Any +from typing import Any, Iterable class FormatChecker: checkers: Any - def __init__(self, formats: Any | None = ...) -> None: ... + def __init__(self, formats: Iterable[str] | None = ...) -> None: ... def checks(self, format, raises=...): ... cls_checks: Any def check(self, instance, format) -> None: ... - def conforms(self, instance, format): ... + def conforms(self, instance, format) -> bool: ... -draft3_format_checker: Any -draft4_format_checker: Any -draft6_format_checker: Any -draft7_format_checker: Any +draft3_format_checker: FormatChecker +draft4_format_checker: FormatChecker +draft6_format_checker: FormatChecker +draft7_format_checker: FormatChecker +draft201909_format_checker: FormatChecker +draft202012_format_checker: FormatChecker -def is_email(instance): ... -def is_ipv4(instance): ... -def is_ipv6(instance): ... -def is_host_name(instance): ... -def is_idn_host_name(instance): ... -def is_uri(instance): ... -def is_uri_reference(instance): ... -def is_iri(instance): ... -def is_iri_reference(instance): ... -def is_datetime(instance): ... -def is_time(instance): ... -def is_regex(instance): ... -def is_date(instance): ... -def is_draft3_time(instance): ... -def is_css_color_code(instance): ... -def is_css21_color(instance): ... -def is_css3_color(instance): ... -def is_json_pointer(instance): ... -def is_relative_json_pointer(instance): ... -def is_uri_template(instance, template_validator=...): ... +def is_email(instance) -> bool: ... +def is_ipv4(instance) -> bool: ... +def is_ipv6(instance) -> bool: ... + +# is_host_name is only defined if fqdn is installed. +def is_host_name(instance) -> bool: ... +def is_idn_host_name(instance) -> bool: ... +def is_uri(instance) -> bool: ... +def is_uri_reference(instance) -> bool: ... +def is_iri(instance) -> bool: ... +def is_iri_reference(instance) -> bool: ... +def is_datetime(instance) -> bool: ... +def is_time(instance) -> bool: ... +def is_regex(instance) -> bool: ... +def is_date(instance) -> bool: ... +def is_draft3_time(instance) -> bool: ... +def is_css_color_code(instance) -> bool: ... +def is_css21_color(instance) -> bool: ... +def is_json_pointer(instance) -> bool: ... +def is_relative_json_pointer(instance) -> bool: ... +def is_uri_template(instance) -> bool: ... + +# is_duration is only defined if isoduration is installed. +def is_duration(instance) -> bool: ... +def is_uuid(instance) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_legacy_validators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_legacy_validators.pyi index 40e61896d73a..08a7dd5394ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_legacy_validators.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_legacy_validators.pyi @@ -1,8 +1,15 @@ +from typing import Any, ItemsView + +def ignore_ref_siblings(schema) -> list[tuple[str, Any]] | ItemsView[str, Any]: ... def dependencies_draft3(validator, dependencies, instance, schema) -> None: ... +def dependencies_draft4_draft6_draft7(validator, dependencies, instance, schema) -> None: ... def disallow_draft3(validator, disallow, instance, schema) -> None: ... def extends_draft3(validator, extends, instance, schema) -> None: ... def items_draft3_draft4(validator, items, instance, schema) -> None: ... +def items_draft6_draft7_draft201909(validator, items, instance, schema) -> None: ... def minimum_draft3_draft4(validator, minimum, instance, schema) -> None: ... def maximum_draft3_draft4(validator, maximum, instance, schema) -> None: ... def properties_draft3(validator, properties, instance, schema) -> None: ... def type_draft3(validator, types, instance, schema) -> None: ... +def contains_draft6_draft7(validator, contains, instance, schema) -> None: ... +def recursiveRef(validator, recursiveRef, instance, schema) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_types.pyi index efbe6ba4e56d..5c97055445e0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_types.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_types.pyi @@ -1,26 +1,24 @@ -from typing import Any +from typing import Callable, Iterable, Mapping -def is_array(checker, instance): ... -def is_bool(checker, instance): ... -def is_integer(checker, instance): ... -def is_null(checker, instance): ... -def is_number(checker, instance): ... -def is_object(checker, instance): ... -def is_string(checker, instance): ... -def is_any(checker, instance): ... +def is_array(checker, instance) -> bool: ... +def is_bool(checker, instance) -> bool: ... +def is_integer(checker, instance) -> bool: ... +def is_null(checker, instance) -> bool: ... +def is_number(checker, instance) -> bool: ... +def is_object(checker, instance) -> bool: ... +def is_string(checker, instance) -> bool: ... +def is_any(checker, instance) -> bool: ... class TypeChecker: - def is_type(self, instance, type): ... - def redefine(self, type, fn): ... - def redefine_many(self, definitions=...): ... - def remove(self, *types): ... - def __init__(self, type_checkers=...) -> None: ... - def __lt__(self, other): ... - def __le__(self, other): ... - def __gt__(self, other): ... - def __ge__(self, other): ... + def __init__(self, type_checkers: Mapping[str, Callable[[object], bool]] = ...) -> None: ... + def is_type(self, instance, type: str) -> bool: ... + def redefine(self, type: str, fn: Callable[..., bool]) -> TypeChecker: ... + def redefine_many(self, definitions=...) -> TypeChecker: ... + def remove(self, *types: Iterable[str]) -> TypeChecker: ... -draft3_type_checker: Any -draft4_type_checker: Any -draft6_type_checker: Any -draft7_type_checker: Any +draft3_type_checker: TypeChecker +draft4_type_checker: TypeChecker +draft6_type_checker: TypeChecker +draft7_type_checker: TypeChecker +draft201909_type_checker: TypeChecker +draft202012_type_checker: TypeChecker diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_utils.pyi index 596ec6472f06..2b20d72a5c39 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_utils.pyi @@ -1,25 +1,25 @@ -from typing import Any, MutableMapping +from _typeshed import SupportsKeysAndGetItem +from typing import Any, Generator, Iterable, Iterator, Mapping, MutableMapping, Sized -class URIDict(MutableMapping[Any, Any]): - def normalize(self, uri): ... - store: Any - def __init__(self, *args, **kwargs) -> None: ... - def __getitem__(self, uri): ... - def __setitem__(self, uri, value) -> None: ... - def __delitem__(self, uri) -> None: ... - def __iter__(self): ... - def __len__(self): ... +class URIDict(MutableMapping[str, str]): + def normalize(self, uri: str) -> str: ... + store: dict[str, str] + def __init__(self, __m: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]], **kwargs: str) -> None: ... + def __getitem__(self, uri: str) -> str: ... + def __setitem__(self, uri: str, value: str) -> None: ... + def __delitem__(self, uri: str) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def __len__(self) -> int: ... class Unset: ... def load_schema(name): ... -def indent(string, times: int = ...): ... -def format_as_index(indices): ... -def find_additional_properties(instance, schema) -> None: ... -def extras_msg(extras): ... -def types_msg(instance, types): ... -def flatten(suitable_for_isinstance): ... -def ensure_list(thing): ... -def equal(one, two): ... +def format_as_index(container: str, indices) -> str: ... +def find_additional_properties(instance: Iterable[Any], schema: Mapping[Any, Any]) -> Generator[Any, None, None]: ... +def extras_msg(extras: Iterable[Any] | Sized) -> str: ... +def ensure_list(thing) -> list[Any]: ... +def equal(one, two) -> bool: ... def unbool(element, true=..., false=...): ... -def uniq(container): ... +def uniq(container) -> bool: ... +def find_evaluated_item_indexes_by_schema(validator, instance, schema) -> list[Any]: ... +def find_evaluated_property_keys_by_schema(validator, instance, schema) -> list[Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_validators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_validators.pyi index 8afcc3c07b4c..f6daf12685ea 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_validators.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/_validators.pyi @@ -17,9 +17,11 @@ def pattern(validator, patrn, instance, schema) -> None: ... def format(validator, format, instance, schema) -> None: ... def minLength(validator, mL, instance, schema) -> None: ... def maxLength(validator, mL, instance, schema) -> None: ... -def dependencies(validator, dependencies, instance, schema) -> None: ... +def dependentRequired(validator, dependentRequired, instance, schema) -> None: ... +def dependentSchemas(validator, dependentSchemas, instance, schema) -> None: ... def enum(validator, enums, instance, schema) -> None: ... def ref(validator, ref, instance, schema) -> None: ... +def dynamicRef(validator, dynamicRef, instance, schema) -> None: ... def type(validator, types, instance, schema) -> None: ... def properties(validator, properties, instance, schema) -> None: ... def required(validator, required, instance, schema) -> None: ... @@ -30,3 +32,6 @@ def anyOf(validator, anyOf, instance, schema) -> None: ... def oneOf(validator, oneOf, instance, schema) -> None: ... def not_(validator, not_schema, instance, schema) -> None: ... def if_(validator, if_schema, instance, schema) -> None: ... +def unevaluatedItems(validator, unevaluatedItems, instance, schema) -> None: ... +def unevaluatedProperties(validator, unevaluatedProperties, instance, schema) -> None: ... +def prefixItems(validator, prefixItems, instance, schema) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/cli.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/cli.pyi index 1ccff3f60407..85c3bd71d0a2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/cli.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/cli.pyi @@ -1,10 +1,32 @@ from typing import Any -from jsonschema._reflect import namedAny as namedAny -from jsonschema.validators import validator_for as validator_for +class _CannotLoadFile(Exception): ... + +class _Outputter: + def __init__(self, formatter, stdout, stderr): ... + @classmethod + def from_arguments(cls, arguments, stdout, stderr): ... + def load(self, path): ... + def filenotfound_error(self, **kwargs) -> None: ... + def parsing_error(self, **kwargs) -> None: ... + def validation_error(self, **kwargs) -> None: ... + def validation_success(self, **kwargs) -> None: ... + +class _PrettyFormatter: + def filenotfound_error(self, path, exc_info): ... + def parsing_error(self, path, exc_info): ... + def validation_error(self, instance_path, error): ... + def validation_success(self, instance_path): ... + +class _PlainFormatter: + def __init__(self, error_format): ... + def filenotfound_error(self, path, exc_info): ... + def parsing_error(self, path, exc_info): ... + def validation_error(self, instance_path, error): ... + def validation_success(self, instance_path): ... parser: Any def parse_args(args): ... def main(args=...) -> None: ... -def run(arguments, stdout=..., stderr=...): ... +def run(arguments, stdout=..., stderr=..., stdin=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/exceptions.pyi index 078d44b643c2..466264dbbf0c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/exceptions.pyi @@ -27,41 +27,35 @@ class _Error(Exception): schema_path=..., parent: Any | None = ..., ) -> None: ... - def __unicode__(self): ... @classmethod def create_from(cls, other): ... @property def absolute_path(self): ... @property def absolute_schema_path(self): ... + @property + def json_path(self): ... class ValidationError(_Error): ... class SchemaError(_Error): ... class RefResolutionError(Exception): - def __init__(self, cause) -> None: ... - def __lt__(self, other): ... - def __le__(self, other): ... - def __gt__(self, other): ... - def __ge__(self, other): ... + def __init__(self, cause: str) -> None: ... class UndefinedTypeCheck(Exception): type: Any def __init__(self, type) -> None: ... - def __unicode__(self): ... class UnknownType(Exception): type: Any instance: Any schema: Any def __init__(self, type, instance, schema) -> None: ... - def __unicode__(self): ... class FormatError(Exception): message: Any cause: Any def __init__(self, message, cause: Any | None = ...) -> None: ... - def __unicode__(self): ... class ErrorTree: errors: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/protocols.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/protocols.pyi new file mode 100644 index 000000000000..16610ac2eaa7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/protocols.pyi @@ -0,0 +1,22 @@ +from typing import Any, ClassVar, Iterator, Protocol + +from jsonschema._format import FormatChecker +from jsonschema._types import TypeChecker +from jsonschema.exceptions import ValidationError +from jsonschema.validators import RefResolver + +class Validator(Protocol): + META_SCHEMA: ClassVar[dict[Any, Any]] + VALIDATORS: ClassVar[dict[Any, Any]] + TYPE_CHECKER: ClassVar[TypeChecker] + schema: dict[Any, Any] | bool + def __init__( + self, schema: dict[Any, Any] | bool, resolver: RefResolver | None = ..., format_checker: FormatChecker | None = ... + ) -> None: ... + @classmethod + def check_schema(cls, schema: dict[Any, Any]) -> None: ... + def is_type(self, instance: Any, type: str) -> bool: ... + def is_valid(self, instance: dict[Any, Any]) -> bool: ... + def iter_errors(self, instance: dict[Any, Any]) -> Iterator[ValidationError]: ... + def validate(self, instance: dict[Any, Any]) -> None: ... + def evolve(self, **kwargs) -> Validator: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/validators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/validators.pyi index b9a078e601f9..656aa20bc093 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/validators.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/jsonschema/jsonschema/validators.pyi @@ -1,45 +1,60 @@ -from typing import Any +from _typeshed import SupportsKeysAndGetItem +from collections.abc import Callable, Generator, Iterable +from typing import Any, ClassVar -from jsonschema import exceptions as exceptions -from jsonschema.exceptions import ErrorTree as ErrorTree +from ._utils import URIDict -class _DontDoThat(Exception): ... +_Schema = Any -validators: Any -meta_schemas: Any - -def validates(version): ... - -class _DefaultTypesDeprecatingMetaClass(type): - DEFAULT_TYPES: Any +# This class does not exist at runtime. Compatible classes are created at +# runtime by create(). +class _Validator: + VALIDATORS: ClassVar[dict[Any, Any]] + META_SCHEMA: ClassVar[dict[Any, Any]] + TYPE_CHECKER: Any + @staticmethod + def ID_OF(schema: _Schema) -> str: ... + schema: Any + resolver: Any + format_checker: Any + evolve: Any + def __init__(self, schema: _Schema, resolver: Any | None = ..., format_checker: Any | None = ...) -> None: ... + @classmethod + def check_schema(cls, schema) -> None: ... + def iter_errors(self, instance, _schema: Any | None = ...) -> Generator[Any, None, None]: ... + def descend(self, instance, schema, path: Any | None = ..., schema_path: Any | None = ...) -> Generator[Any, None, None]: ... + def validate(self, *args, **kwargs) -> None: ... + def is_type(self, instance, type): ... + def is_valid(self, instance, _schema: Any | None = ...) -> bool: ... +def validates(version: str) -> Callable[..., Any]: ... def create( - meta_schema, - validators=..., - version: Any | None = ..., - default_types: Any | None = ..., - type_checker: Any | None = ..., - id_of=..., -): ... + meta_schema, validators=..., version: Any | None = ..., type_checker=..., id_of=..., applicable_validators=... +) -> type[_Validator]: ... def extend(validator, validators=..., version: Any | None = ..., type_checker: Any | None = ...): ... -Draft3Validator: Any -Draft4Validator: Any -Draft6Validator: Any -Draft7Validator: Any +# At runtime these are fields that are assigned the return values of create() calls. +class Draft3Validator(_Validator): ... +class Draft4Validator(_Validator): ... +class Draft6Validator(_Validator): ... +class Draft7Validator(_Validator): ... +class Draft201909Validator(_Validator): ... +class Draft202012Validator(_Validator): ... + +_Handler = Callable[[str], Any] class RefResolver: - referrer: Any + referrer: str cache_remote: Any - handlers: Any - store: Any + handlers: dict[str, _Handler] + store: URIDict def __init__( self, - base_uri, - referrer, - store=..., + base_uri: str, + referrer: str, + store: SupportsKeysAndGetItem[str, str] | Iterable[tuple[str, str]] = ..., cache_remote: bool = ..., - handlers=..., + handlers: SupportsKeysAndGetItem[str, _Handler] | Iterable[tuple[str, _Handler]] = ..., urljoin_cache: Any | None = ..., remote_cache: Any | None = ..., ) -> None: ... @@ -58,5 +73,5 @@ class RefResolver: def resolve_fragment(self, document, fragment): ... def resolve_remote(self, uri): ... -def validate(instance, schema, cls: Any | None = ..., *args, **kwargs) -> None: ... +def validate(instance: object, schema: object, cls: type[_Validator] | None = ..., *args: Any, **kwargs: Any) -> None: ... def validator_for(schema, default=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/METADATA.toml new file mode 100644 index 000000000000..77ff5706c1d9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/METADATA.toml @@ -0,0 +1,2 @@ +version = "2.9.*" +requires = [] # requires types-pyasn1 (not available yet) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi new file mode 100644 index 000000000000..4e9d2efd2d8f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/__init__.pyi @@ -0,0 +1,104 @@ +from typing import Any +from typing_extensions import Literal + +from .abstract.attrDef import AttrDef as AttrDef +from .abstract.attribute import ( + Attribute as Attribute, + OperationalAttribute as OperationalAttribute, + WritableAttribute as WritableAttribute, +) +from .abstract.cursor import Reader as Reader, Writer as Writer +from .abstract.entry import Entry as Entry, WritableEntry as WritableEntry +from .abstract.objectDef import ObjectDef as ObjectDef +from .core.connection import Connection as Connection +from .core.pooling import ServerPool as ServerPool +from .core.rdns import ReverseDnsSetting as ReverseDnsSetting +from .core.server import Server as Server +from .core.tls import Tls as Tls +from .protocol.rfc4512 import DsaInfo as DsaInfo, SchemaInfo as SchemaInfo +from .utils.config import get_config_parameter as get_config_parameter, set_config_parameter as set_config_parameter +from .version import __description__ as __description__, __status__ as __status__, __url__ as __url__ + +ANONYMOUS: Literal["ANONYMOUS"] +SIMPLE: Literal["SIMPLE"] +SASL: Literal["SASL"] +NTLM: Literal["NTLM"] + +EXTERNAL: Literal["EXTERNAL"] +DIGEST_MD5: Literal["DIGEST-MD5"] +KERBEROS: Literal["GSSAPI"] +GSSAPI: Literal["GSSAPI"] +PLAIN: Literal["PLAIN"] + +AUTO_BIND_DEFAULT: Literal["DEFAULT"] +AUTO_BIND_NONE: Literal["NONE"] +AUTO_BIND_NO_TLS: Literal["NO_TLS"] +AUTO_BIND_TLS_BEFORE_BIND: Literal["TLS_BEFORE_BIND"] +AUTO_BIND_TLS_AFTER_BIND: Literal["TLS_AFTER_BIND"] + +IP_SYSTEM_DEFAULT: Literal["IP_SYSTEM_DEFAULT"] +IP_V4_ONLY: Literal["IP_V4_ONLY"] +IP_V6_ONLY: Literal["IP_V6_ONLY"] +IP_V4_PREFERRED: Literal["IP_V4_PREFERRED"] +IP_V6_PREFERRED: Literal["IP_V6_PREFERRED"] + +BASE: Literal["BASE"] +LEVEL: Literal["LEVEL"] +SUBTREE: Literal["SUBTREE"] + +DEREF_NEVER: Literal["NEVER"] +DEREF_SEARCH: Literal["SEARCH"] +DEREF_BASE: Literal["FINDING_BASE"] +DEREF_ALWAYS: Literal["ALWAYS"] + +ALL_ATTRIBUTES: Literal["*"] +NO_ATTRIBUTES: Literal["1.1"] +ALL_OPERATIONAL_ATTRIBUTES: Literal["+"] + +MODIFY_ADD: Literal["MODIFY_ADD"] +MODIFY_DELETE: Literal["MODIFY_DELETE"] +MODIFY_REPLACE: Literal["MODIFY_REPLACE"] +MODIFY_INCREMENT: Literal["MODIFY_INCREMENT"] + +SYNC: Literal["SYNC"] +SAFE_SYNC: Literal["SAFE_SYNC"] +SAFE_RESTARTABLE: Literal["SAFE_RESTARTABLE"] +ASYNC: Literal["ASYNC"] +LDIF: Literal["LDIF"] +RESTARTABLE: Literal["RESTARTABLE"] +REUSABLE: Literal["REUSABLE"] +MOCK_SYNC: Literal["MOCK_SYNC"] +MOCK_ASYNC: Literal["MOCK_ASYNC"] +ASYNC_STREAM: Literal["ASYNC_STREAM"] + +NONE: Literal["NO_INFO"] +DSA: Literal["DSA"] +SCHEMA: Literal["SCHEMA"] +ALL: Literal["ALL"] + +OFFLINE_EDIR_8_8_8: Literal["EDIR_8_8_8"] +OFFLINE_EDIR_9_1_4: Literal["EDIR_9_1_4"] +OFFLINE_AD_2012_R2: Literal["AD_2012_R2"] +OFFLINE_SLAPD_2_4: Literal["SLAPD_2_4"] +OFFLINE_DS389_1_3_3: Literal["DS389_1_3_3"] + +FIRST: Literal["FIRST"] +ROUND_ROBIN: Literal["ROUND_ROBIN"] +RANDOM: Literal["RANDOM"] + +HASHED_NONE: Literal["PLAIN"] +HASHED_SHA: Literal["SHA"] +HASHED_SHA256: Literal["SHA256"] +HASHED_SHA384: Literal["SHA384"] +HASHED_SHA512: Literal["SHA512"] +HASHED_MD5: Literal["MD5"] +HASHED_SALTED_SHA: Literal["SALTED_SHA"] +HASHED_SALTED_SHA256: Literal["SALTED_SHA256"] +HASHED_SALTED_SHA384: Literal["SALTED_SHA384"] +HASHED_SALTED_SHA512: Literal["SALTED_SHA512"] +HASHED_SALTED_MD5: Literal["SALTED_MD5"] + +NUMERIC_TYPES: tuple[type[Any], ...] +INTEGER_TYPES: tuple[type[Any], ...] +STRING_TYPES: tuple[type[Any], ...] +SEQUENCE_TYPES: tuple[type[Any], ...] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi new file mode 100644 index 000000000000..5c2b1bd77518 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/__init__.pyi @@ -0,0 +1,15 @@ +from typing import Any + +STATUS_INIT: str +STATUS_VIRTUAL: str +STATUS_MANDATORY_MISSING: str +STATUS_READ: str +STATUS_WRITABLE: str +STATUS_PENDING_CHANGES: str +STATUS_COMMITTED: str +STATUS_READY_FOR_DELETION: str +STATUS_READY_FOR_MOVING: str +STATUS_READY_FOR_RENAMING: str +STATUS_DELETED: str +STATUSES: Any +INITIAL_STATUSES: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi new file mode 100644 index 000000000000..980262393e08 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attrDef.pyi @@ -0,0 +1,33 @@ +from typing import Any + +class AttrDef: + name: Any + key: Any + validate: Any + pre_query: Any + post_query: Any + default: Any + dereference_dn: Any + description: Any + mandatory: Any + single_value: Any + oid_info: Any + other_names: Any + def __init__( + self, + name, + key: Any | None = ..., + validate: Any | None = ..., + pre_query: Any | None = ..., + post_query: Any | None = ..., + default=..., + dereference_dn: Any | None = ..., + description: Any | None = ..., + mandatory: bool = ..., + single_value: Any | None = ..., + alias: Any | None = ..., + ) -> None: ... + def __eq__(self, other): ... + def __lt__(self, other): ... + def __hash__(self): ... + def __setattr__(self, key, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi new file mode 100644 index 000000000000..d5ed793dbf77 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/attribute.pyi @@ -0,0 +1,34 @@ +from typing import Any + +class Attribute: + key: Any + definition: Any + values: Any + raw_values: Any + response: Any + entry: Any + cursor: Any + other_names: Any + def __init__(self, attr_def, entry, cursor) -> None: ... + def __len__(self): ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + @property + def value(self): ... + +class OperationalAttribute(Attribute): ... + +class WritableAttribute(Attribute): + def __iadd__(self, other): ... + def __isub__(self, other): ... + def add(self, values) -> None: ... + def set(self, values) -> None: ... + def delete(self, values) -> None: ... + def remove(self) -> None: ... + def discard(self) -> None: ... + @property + def virtual(self): ... + @property + def changes(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi new file mode 100644 index 000000000000..ee27126afca7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/cursor.pyi @@ -0,0 +1,102 @@ +from typing import Any, NamedTuple + +class Operation(NamedTuple): + request: Any + result: Any + response: Any + +class Cursor: + connection: Any + get_operational_attributes: Any + definition: Any + attributes: Any + controls: Any + execution_time: Any + entries: Any + schema: Any + def __init__( + self, + connection, + object_def, + get_operational_attributes: bool = ..., + attributes: Any | None = ..., + controls: Any | None = ..., + auxiliary_class: Any | None = ..., + ) -> None: ... + def __iter__(self): ... + def __getitem__(self, item): ... + def __len__(self): ... + def __bool__(self): ... + def match_dn(self, dn): ... + def match(self, attributes, value): ... + def remove(self, entry) -> None: ... + @property + def operations(self): ... + @property + def errors(self): ... + @property + def failed(self): ... + +class Reader(Cursor): + entry_class: Any + attribute_class: Any + entry_initial_status: Any + sub_tree: Any + base: Any + dereference_aliases: Any + validated_query: Any + query_filter: Any + def __init__( + self, + connection, + object_def, + base, + query: str = ..., + components_in_and: bool = ..., + sub_tree: bool = ..., + get_operational_attributes: bool = ..., + attributes: Any | None = ..., + controls: Any | None = ..., + auxiliary_class: Any | None = ..., + ) -> None: ... + @property + def query(self): ... + @query.setter + def query(self, value) -> None: ... + @property + def components_in_and(self): ... + @components_in_and.setter + def components_in_and(self, value) -> None: ... + def clear(self) -> None: ... + execution_time: Any + entries: Any + def reset(self) -> None: ... + def search(self, attributes: Any | None = ...): ... + def search_object(self, entry_dn: Any | None = ..., attributes: Any | None = ...): ... + def search_level(self, attributes: Any | None = ...): ... + def search_subtree(self, attributes: Any | None = ...): ... + def search_paged(self, paged_size, paged_criticality: bool = ..., generator: bool = ..., attributes: Any | None = ...): ... + +class Writer(Cursor): + entry_class: Any + attribute_class: Any + entry_initial_status: Any + @staticmethod + def from_cursor(cursor, connection: Any | None = ..., object_def: Any | None = ..., custom_validator: Any | None = ...): ... + @staticmethod + def from_response(connection, object_def, response: Any | None = ...): ... + dereference_aliases: Any + def __init__( + self, + connection, + object_def, + get_operational_attributes: bool = ..., + attributes: Any | None = ..., + controls: Any | None = ..., + auxiliary_class: Any | None = ..., + ) -> None: ... + execution_time: Any + def commit(self, refresh: bool = ...): ... + def discard(self) -> None: ... + def new(self, dn): ... + def refresh_entry(self, entry, tries: int = ..., seconds: int = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi new file mode 100644 index 000000000000..b7392e22cd6d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/entry.pyi @@ -0,0 +1,83 @@ +from typing import Any + +class EntryState: + dn: Any + status: Any + attributes: Any + raw_attributes: Any + response: Any + cursor: Any + origin: Any + read_time: Any + changes: Any + definition: Any + def __init__(self, dn, cursor) -> None: ... + def set_status(self, status) -> None: ... + @property + def entry_raw_attributes(self): ... + +class EntryBase: + def __init__(self, dn, cursor) -> None: ... + def __iter__(self): ... + def __contains__(self, item): ... + def __getattr__(self, item): ... + def __setattr__(self, item, value) -> None: ... + def __getitem__(self, item): ... + def __eq__(self, other): ... + def __lt__(self, other): ... + @property + def entry_dn(self): ... + @property + def entry_cursor(self): ... + @property + def entry_status(self): ... + @property + def entry_definition(self): ... + @property + def entry_raw_attributes(self): ... + def entry_raw_attribute(self, name): ... + @property + def entry_mandatory_attributes(self): ... + @property + def entry_attributes(self): ... + @property + def entry_attributes_as_dict(self): ... + @property + def entry_read_time(self): ... + def entry_to_json( + self, + raw: bool = ..., + indent: int = ..., + sort: bool = ..., + stream: Any | None = ..., + checked_attributes: bool = ..., + include_empty: bool = ..., + ): ... + def entry_to_ldif( + self, all_base64: bool = ..., line_separator: Any | None = ..., sort_order: Any | None = ..., stream: Any | None = ... + ): ... + +class Entry(EntryBase): + def entry_writable( + self, + object_def: Any | None = ..., + writer_cursor: Any | None = ..., + attributes: Any | None = ..., + custom_validator: Any | None = ..., + auxiliary_class: Any | None = ..., + ): ... + +class WritableEntry(EntryBase): + def __setitem__(self, key, value) -> None: ... + def __setattr__(self, item, value) -> None: ... + def __getattr__(self, item): ... + @property + def entry_virtual_attributes(self): ... + def entry_commit_changes(self, refresh: bool = ..., controls: Any | None = ..., clear_history: bool = ...): ... + def entry_discard_changes(self) -> None: ... + def entry_delete(self) -> None: ... + def entry_refresh(self, tries: int = ..., seconds: int = ...): ... + def entry_move(self, destination_dn) -> None: ... + def entry_rename(self, new_name) -> None: ... + @property + def entry_changes(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/objectDef.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/objectDef.pyi new file mode 100644 index 000000000000..31931796f27c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/abstract/objectDef.pyi @@ -0,0 +1,23 @@ +from typing import Any + +class ObjectDef: + def __init__( + self, + object_class: Any | None = ..., + schema: Any | None = ..., + custom_validator: Any | None = ..., + auxiliary_class: Any | None = ..., + ) -> None: ... + def __getitem__(self, item): ... + def __getattr__(self, item): ... + def __setattr__(self, key, value) -> None: ... + def __iadd__(self, other): ... + def __isub__(self, other): ... + def __iter__(self): ... + def __len__(self): ... + def __bool__(self): ... + def __contains__(self, item): ... + def add_from_schema(self, attribute_name, mandatory: bool = ...) -> None: ... + def add_attribute(self, definition: Any | None = ...) -> None: ... + def remove_attribute(self, item) -> None: ... + def clear_attributes(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/connection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/connection.pyi new file mode 100644 index 000000000000..bcea8aae5cdb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/connection.pyi @@ -0,0 +1,174 @@ +from _collections_abc import Generator, dict_keys +from _typeshed import Self +from types import TracebackType +from typing import Any +from typing_extensions import Literal + +from .pooling import ServerPool +from .server import Server + +SASL_AVAILABLE_MECHANISMS: Any +CLIENT_STRATEGIES: Any + +_ServerSequence = set[Server] | list[Server] | tuple[Server, ...] | Generator[Server, None, None] | dict_keys[Server, Any] + +class Connection: + connection_lock: Any + last_error: str + strategy_type: Any + user: Any + password: Any + authentication: Any + version: Any + auto_referrals: Any + request: Any + response: Any | None + result: Any + bound: bool + listening: bool + closed: bool + auto_bind: Any + sasl_mechanism: Any + sasl_credentials: Any + socket: Any + tls_started: bool + sasl_in_progress: bool + read_only: Any + lazy: Any + pool_name: Any + pool_size: int | None + cred_store: Any + pool_lifetime: Any + pool_keepalive: Any + starting_tls: bool + check_names: Any + raise_exceptions: Any + auto_range: Any + extend: Any + fast_decoder: Any + receive_timeout: Any + empty_attributes: Any + use_referral_cache: Any + auto_escape: Any + auto_encode: Any + source_address: Any + source_port_list: Any + server_pool: Any | None + server: Any + strategy: Any + send: Any + open: Any + get_response: Any + post_send_single_response: Any + post_send_search: Any + def __init__( + self, + server: Server | str | _ServerSequence | ServerPool, + user: str | None = ..., + password: str | None = ..., + auto_bind: Literal["DEFAULT", "NONE", "NO_TLS", "TLS_BEFORE_BIND", "TLS_AFTER_BIND"] = ..., + version: int = ..., + authentication: Literal["ANONYMOUS", "SIMPLE", "SASL", "NTLM"] | None = ..., + client_strategy: Literal[ + "SYNC", "SAFE_SYNC", "ASYNC", "LDIF", "RESTARTABLE", "REUSABLE", "MOCK_SYNC", "MOCK_ASYNC", "ASYNC_STREAM" + ] = ..., + auto_referrals: bool = ..., + auto_range: bool = ..., + sasl_mechanism: str | None = ..., + sasl_credentials: Any | None = ..., + check_names: bool = ..., + collect_usage: bool = ..., + read_only: bool = ..., + lazy: bool = ..., + raise_exceptions: bool = ..., + pool_name: str | None = ..., + pool_size: int | None = ..., + pool_lifetime: int | None = ..., + cred_store: Any | None = ..., + fast_decoder: bool = ..., + receive_timeout: Any | None = ..., + return_empty_attributes: bool = ..., + use_referral_cache: bool = ..., + auto_escape: bool = ..., + auto_encode: bool = ..., + pool_keepalive: Any | None = ..., + source_address: str | None = ..., + source_port: int | None = ..., + source_port_list: Any | None = ..., + ) -> None: ... + def repr_with_sensitive_data_stripped(self): ... + @property + def stream(self): ... + @stream.setter + def stream(self, value) -> None: ... + @property + def usage(self): ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> Literal[False] | None: ... + def bind(self, read_server_info: bool = ..., controls: Any | None = ...): ... + def rebind( + self, + user: Any | None = ..., + password: Any | None = ..., + authentication: Any | None = ..., + sasl_mechanism: Any | None = ..., + sasl_credentials: Any | None = ..., + read_server_info: bool = ..., + controls: Any | None = ..., + ): ... + def unbind(self, controls: Any | None = ...): ... + def search( + self, + search_base: str, + search_filter: str, + search_scope: Literal["BASE", "LEVEL", "SUBTREE"] = ..., + dereference_aliases: Literal["NEVER", "SEARCH", "FINDING_BASE", "ALWAYS"] = ..., + attributes: Any | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Any | None = ..., + paged_size: int | None = ..., + paged_criticality: bool = ..., + paged_cookie: str | bytes | None = ..., + auto_escape: bool | None = ..., + ): ... + def compare(self, dn, attribute, value, controls: Any | None = ...): ... + def add(self, dn, object_class: Any | None = ..., attributes: Any | None = ..., controls: Any | None = ...): ... + def delete(self, dn, controls: Any | None = ...): ... + def modify(self, dn, changes, controls: Any | None = ...): ... + def modify_dn( + self, dn, relative_dn, delete_old_dn: bool = ..., new_superior: Any | None = ..., controls: Any | None = ... + ): ... + def abandon(self, message_id, controls: Any | None = ...): ... + def extended( + self, request_name, request_value: Any | None = ..., controls: Any | None = ..., no_encode: Any | None = ... + ): ... + def start_tls(self, read_server_info: bool = ...): ... + def do_sasl_bind(self, controls): ... + def do_ntlm_bind(self, controls): ... + def refresh_server_info(self) -> None: ... + def response_to_ldif( + self, + search_result: Any | None = ..., + all_base64: bool = ..., + line_separator: Any | None = ..., + sort_order: Any | None = ..., + stream: Any | None = ..., + ): ... + def response_to_json( + self, + raw: bool = ..., + search_result: Any | None = ..., + indent: int = ..., + sort: bool = ..., + stream: Any | None = ..., + checked_attributes: bool = ..., + include_empty: bool = ..., + ): ... + def response_to_file(self, target, raw: bool = ..., indent: int = ..., sort: bool = ...) -> None: ... + @property + def entries(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi new file mode 100644 index 000000000000..3958f7cb3804 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/exceptions.pyi @@ -0,0 +1,145 @@ +import socket +from _typeshed import Self +from typing import Any + +class LDAPException(Exception): ... + +class LDAPOperationResult(LDAPException): + def __new__( + cls: type[Self], + result: Any | None = ..., + description: Any | None = ..., + dn: Any | None = ..., + message: Any | None = ..., + response_type: Any | None = ..., + response: Any | None = ..., + ) -> Self: ... + result: Any + description: Any + dn: Any + message: Any + type: Any + response: Any + def __init__( + self, + result: Any | None = ..., + description: Any | None = ..., + dn: Any | None = ..., + message: Any | None = ..., + response_type: Any | None = ..., + response: Any | None = ..., + ) -> None: ... + +class LDAPOperationsErrorResult(LDAPOperationResult): ... +class LDAPProtocolErrorResult(LDAPOperationResult): ... +class LDAPTimeLimitExceededResult(LDAPOperationResult): ... +class LDAPSizeLimitExceededResult(LDAPOperationResult): ... +class LDAPAuthMethodNotSupportedResult(LDAPOperationResult): ... +class LDAPStrongerAuthRequiredResult(LDAPOperationResult): ... +class LDAPReferralResult(LDAPOperationResult): ... +class LDAPAdminLimitExceededResult(LDAPOperationResult): ... +class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult): ... +class LDAPConfidentialityRequiredResult(LDAPOperationResult): ... +class LDAPSASLBindInProgressResult(LDAPOperationResult): ... +class LDAPNoSuchAttributeResult(LDAPOperationResult): ... +class LDAPUndefinedAttributeTypeResult(LDAPOperationResult): ... +class LDAPInappropriateMatchingResult(LDAPOperationResult): ... +class LDAPConstraintViolationResult(LDAPOperationResult): ... +class LDAPAttributeOrValueExistsResult(LDAPOperationResult): ... +class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult): ... +class LDAPNoSuchObjectResult(LDAPOperationResult): ... +class LDAPAliasProblemResult(LDAPOperationResult): ... +class LDAPInvalidDNSyntaxResult(LDAPOperationResult): ... +class LDAPAliasDereferencingProblemResult(LDAPOperationResult): ... +class LDAPInappropriateAuthenticationResult(LDAPOperationResult): ... +class LDAPInvalidCredentialsResult(LDAPOperationResult): ... +class LDAPInsufficientAccessRightsResult(LDAPOperationResult): ... +class LDAPBusyResult(LDAPOperationResult): ... +class LDAPUnavailableResult(LDAPOperationResult): ... +class LDAPUnwillingToPerformResult(LDAPOperationResult): ... +class LDAPLoopDetectedResult(LDAPOperationResult): ... +class LDAPNamingViolationResult(LDAPOperationResult): ... +class LDAPObjectClassViolationResult(LDAPOperationResult): ... +class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult): ... +class LDAPNotAllowedOnRDNResult(LDAPOperationResult): ... +class LDAPEntryAlreadyExistsResult(LDAPOperationResult): ... +class LDAPObjectClassModsProhibitedResult(LDAPOperationResult): ... +class LDAPAffectMultipleDSASResult(LDAPOperationResult): ... +class LDAPOtherResult(LDAPOperationResult): ... +class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult): ... +class LDAPLCUPSecurityViolationResult(LDAPOperationResult): ... +class LDAPLCUPInvalidDataResult(LDAPOperationResult): ... +class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult): ... +class LDAPLCUPReloadRequiredResult(LDAPOperationResult): ... +class LDAPCanceledResult(LDAPOperationResult): ... +class LDAPNoSuchOperationResult(LDAPOperationResult): ... +class LDAPTooLateResult(LDAPOperationResult): ... +class LDAPCannotCancelResult(LDAPOperationResult): ... +class LDAPAssertionFailedResult(LDAPOperationResult): ... +class LDAPAuthorizationDeniedResult(LDAPOperationResult): ... +class LDAPESyncRefreshRequiredResult(LDAPOperationResult): ... + +exception_table: Any + +class LDAPExceptionError(LDAPException): ... +class LDAPConfigurationError(LDAPExceptionError): ... +class LDAPUnknownStrategyError(LDAPConfigurationError): ... +class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError): ... +class LDAPSSLConfigurationError(LDAPConfigurationError): ... +class LDAPDefinitionError(LDAPConfigurationError): ... +class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError): ... +class LDAPConfigurationParameterError(LDAPConfigurationError): ... +class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError): ... +class LDAPObjectError(LDAPExceptionError, ValueError): ... +class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError): ... +class LDAPCursorError(LDAPExceptionError): ... +class LDAPCursorAttributeError(LDAPCursorError, AttributeError): ... +class LDAPObjectDereferenceError(LDAPExceptionError): ... +class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError): ... +class LDAPInvalidTlsSpecificationError(LDAPExceptionError): ... +class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError): ... +class LDAPSignatureVerificationFailedError(LDAPExceptionError): ... +class LDAPBindError(LDAPExceptionError): ... +class LDAPInvalidServerError(LDAPExceptionError): ... +class LDAPSASLMechanismNotSupportedError(LDAPExceptionError): ... +class LDAPConnectionIsReadOnlyError(LDAPExceptionError): ... +class LDAPChangeError(LDAPExceptionError, ValueError): ... +class LDAPServerPoolError(LDAPExceptionError): ... +class LDAPServerPoolExhaustedError(LDAPExceptionError): ... +class LDAPInvalidPortError(LDAPExceptionError): ... +class LDAPStartTLSError(LDAPExceptionError): ... +class LDAPCertificateError(LDAPExceptionError): ... +class LDAPUserNameNotAllowedError(LDAPExceptionError): ... +class LDAPUserNameIsMandatoryError(LDAPExceptionError): ... +class LDAPPasswordIsMandatoryError(LDAPExceptionError): ... +class LDAPInvalidFilterError(LDAPExceptionError): ... +class LDAPInvalidScopeError(LDAPExceptionError, ValueError): ... +class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError): ... +class LDAPInvalidValueError(LDAPExceptionError, ValueError): ... +class LDAPControlError(LDAPExceptionError, ValueError): ... +class LDAPExtensionError(LDAPExceptionError, ValueError): ... +class LDAPLDIFError(LDAPExceptionError): ... +class LDAPSchemaError(LDAPExceptionError): ... +class LDAPSASLPrepError(LDAPExceptionError): ... +class LDAPSASLBindInProgressError(LDAPExceptionError): ... +class LDAPMetricsError(LDAPExceptionError): ... +class LDAPObjectClassError(LDAPExceptionError): ... +class LDAPInvalidDnError(LDAPExceptionError): ... +class LDAPResponseTimeoutError(LDAPExceptionError): ... +class LDAPTransactionError(LDAPExceptionError): ... +class LDAPInfoError(LDAPExceptionError): ... +class LDAPCommunicationError(LDAPExceptionError): ... +class LDAPSocketOpenError(LDAPCommunicationError): ... +class LDAPSocketCloseError(LDAPCommunicationError): ... +class LDAPSocketReceiveError(LDAPCommunicationError, socket.error): ... +class LDAPSocketSendError(LDAPCommunicationError, socket.error): ... +class LDAPSessionTerminatedByServerError(LDAPCommunicationError): ... +class LDAPUnknownResponseError(LDAPCommunicationError): ... +class LDAPUnknownRequestError(LDAPCommunicationError): ... +class LDAPReferralError(LDAPCommunicationError): ... +class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError): ... +class LDAPConnectionPoolNotStartedError(LDAPExceptionError): ... +class LDAPMaximumRetriesError(LDAPExceptionError): ... + +def communication_exception_factory(exc_to_raise, exc): ... +def start_tls_exception_factory(exc): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi new file mode 100644 index 000000000000..088c73e1fb22 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/pooling.pyi @@ -0,0 +1,42 @@ +from typing import Any + +POOLING_STRATEGIES: Any + +class ServerState: + server: Any + last_checked_time: Any + available: Any + def __init__(self, server, last_checked_time, available) -> None: ... + +class ServerPoolState: + server_states: Any + strategy: Any + server_pool: Any + last_used_server: int + initialize_time: Any + def __init__(self, server_pool) -> None: ... + def refresh(self) -> None: ... + def get_current_server(self): ... + def get_server(self): ... + def find_active_random_server(self): ... + def find_active_server(self, starting): ... + def __len__(self): ... + +class ServerPool: + servers: Any + pool_states: Any + active: Any + exhaust: Any + single: Any + strategy: Any + def __init__( + self, servers: Any | None = ..., pool_strategy=..., active: bool = ..., exhaust: bool = ..., single_state: bool = ... + ) -> None: ... + def __len__(self): ... + def __getitem__(self, item): ... + def __iter__(self): ... + def add(self, servers) -> None: ... + def remove(self, server) -> None: ... + def initialize(self, connection) -> None: ... + def get_server(self, connection): ... + def get_current_server(self, connection): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi new file mode 100644 index 000000000000..e712f803bcce --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/rdns.pyi @@ -0,0 +1,12 @@ +from typing import Any + +class ReverseDnsSetting: + OFF: Any + REQUIRE_RESOLVE_ALL_ADDRESSES: Any + REQUIRE_RESOLVE_IP_ADDRESSES_ONLY: Any + OPTIONAL_RESOLVE_ALL_ADDRESSES: Any + OPTIONAL_RESOLVE_IP_ADDRESSES_ONLY: Any + SUPPORTED_VALUES: Any + +def get_hostname_by_addr(addr, success_required: bool = ...): ... +def is_ip_addr(addr): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi new file mode 100644 index 000000000000..a2772bd148fe --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/results.pyi @@ -0,0 +1,56 @@ +from typing import Any + +RESULT_SUCCESS: int +RESULT_OPERATIONS_ERROR: int +RESULT_PROTOCOL_ERROR: int +RESULT_TIME_LIMIT_EXCEEDED: int +RESULT_SIZE_LIMIT_EXCEEDED: int +RESULT_COMPARE_FALSE: int +RESULT_COMPARE_TRUE: int +RESULT_AUTH_METHOD_NOT_SUPPORTED: int +RESULT_STRONGER_AUTH_REQUIRED: int +RESULT_RESERVED: int +RESULT_REFERRAL: int +RESULT_ADMIN_LIMIT_EXCEEDED: int +RESULT_UNAVAILABLE_CRITICAL_EXTENSION: int +RESULT_CONFIDENTIALITY_REQUIRED: int +RESULT_SASL_BIND_IN_PROGRESS: int +RESULT_NO_SUCH_ATTRIBUTE: int +RESULT_UNDEFINED_ATTRIBUTE_TYPE: int +RESULT_INAPPROPRIATE_MATCHING: int +RESULT_CONSTRAINT_VIOLATION: int +RESULT_ATTRIBUTE_OR_VALUE_EXISTS: int +RESULT_INVALID_ATTRIBUTE_SYNTAX: int +RESULT_NO_SUCH_OBJECT: int +RESULT_ALIAS_PROBLEM: int +RESULT_INVALID_DN_SYNTAX: int +RESULT_ALIAS_DEREFERENCING_PROBLEM: int +RESULT_INAPPROPRIATE_AUTHENTICATION: int +RESULT_INVALID_CREDENTIALS: int +RESULT_INSUFFICIENT_ACCESS_RIGHTS: int +RESULT_BUSY: int +RESULT_UNAVAILABLE: int +RESULT_UNWILLING_TO_PERFORM: int +RESULT_LOOP_DETECTED: int +RESULT_NAMING_VIOLATION: int +RESULT_OBJECT_CLASS_VIOLATION: int +RESULT_NOT_ALLOWED_ON_NON_LEAF: int +RESULT_NOT_ALLOWED_ON_RDN: int +RESULT_ENTRY_ALREADY_EXISTS: int +RESULT_OBJECT_CLASS_MODS_PROHIBITED: int +RESULT_AFFECT_MULTIPLE_DSAS: int +RESULT_OTHER: int +RESULT_LCUP_RESOURCES_EXHAUSTED: int +RESULT_LCUP_SECURITY_VIOLATION: int +RESULT_LCUP_INVALID_DATA: int +RESULT_LCUP_UNSUPPORTED_SCHEME: int +RESULT_LCUP_RELOAD_REQUIRED: int +RESULT_CANCELED: int +RESULT_NO_SUCH_OPERATION: int +RESULT_TOO_LATE: int +RESULT_CANNOT_CANCEL: int +RESULT_ASSERTION_FAILED: int +RESULT_AUTHORIZATION_DENIED: int +RESULT_E_SYNC_REFRESH_REQUIRED: int +RESULT_CODES: Any +DO_NOT_RAISE_EXCEPTIONS: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi new file mode 100644 index 000000000000..65890428ec7a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/server.pyi @@ -0,0 +1,64 @@ +from socket import AF_UNIX as AF_UNIX +from typing import Any +from typing_extensions import Literal + +unix_socket_available: bool + +class Server: + ipc: bool + host: Any + port: Any + allowed_referral_hosts: Any + ssl: Any + tls: Any + name: Any + get_info: Any + dit_lock: Any + custom_formatter: Any + custom_validator: Any + current_address: Any + connect_timeout: Any + mode: Any + def __init__( + self, + host: str, + port: int | None = ..., + use_ssl: bool = ..., + allowed_referral_hosts: Any | None = ..., + get_info: Literal["NO_INFO", "DSA", "SCHEMA", "ALL"] = ..., + tls: Any | None = ..., + formatter: Any | None = ..., + connect_timeout: Any | None = ..., + mode: Literal["IP_SYSTEM_DEFAULT", "IP_V4_ONLY", "IP_V6_ONLY", "IP_V4_PREFERRED", "IP_V6_PREFERRED"] = ..., + validator: Any | None = ..., + ) -> None: ... + @property + def address_info(self): ... + def update_availability(self, address, available) -> None: ... + def reset_availability(self) -> None: ... + def check_availability( + self, source_address: Any | None = ..., source_port: Any | None = ..., source_port_list: Any | None = ... + ): ... + @staticmethod + def next_message_id(): ... + def get_info_from_server(self, connection) -> None: ... + def attach_dsa_info(self, dsa_info: Any | None = ...) -> None: ... + def attach_schema_info(self, dsa_schema: Any | None = ...) -> None: ... + @property + def info(self): ... + @property + def schema(self): ... + @staticmethod + def from_definition( + host, + dsa_info, + dsa_schema, + port: Any | None = ..., + use_ssl: bool = ..., + formatter: Any | None = ..., + validator: Any | None = ..., + ): ... + def candidate_addresses(self): ... + def has_control(self, control): ... + def has_extension(self, extension): ... + def has_feature(self, feature): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi new file mode 100644 index 000000000000..c6c52b37c0ae --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/timezone.pyi @@ -0,0 +1,11 @@ +from datetime import tzinfo +from typing import Any + +class OffsetTzInfo(tzinfo): + offset: Any + name: Any + def __init__(self, offset, name) -> None: ... + def utcoffset(self, dt): ... + def tzname(self, dt): ... + def dst(self, dt): ... + def __getinitargs__(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi new file mode 100644 index 000000000000..c776f7f88ad5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/tls.pyi @@ -0,0 +1,36 @@ +from typing import Any + +use_ssl_context: bool + +class Tls: + ssl_options: Any + validate: Any + ca_certs_file: Any + ca_certs_path: Any + ca_certs_data: Any + private_key_password: Any + version: Any + private_key_file: Any + certificate_file: Any + valid_names: Any + ciphers: Any + sni: Any + def __init__( + self, + local_private_key_file: Any | None = ..., + local_certificate_file: Any | None = ..., + validate=..., + version: Any | None = ..., + ssl_options: Any | None = ..., + ca_certs_file: Any | None = ..., + valid_names: Any | None = ..., + ca_certs_path: Any | None = ..., + ca_certs_data: Any | None = ..., + local_private_key_password: Any | None = ..., + ciphers: Any | None = ..., + sni: Any | None = ..., + ) -> None: ... + def wrap_socket(self, connection, do_handshake: bool = ...) -> None: ... + def start_tls(self, connection): ... + +def check_hostname(sock, server_name, additional_names) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi new file mode 100644 index 000000000000..ccb805bbde76 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/core/usage.pyi @@ -0,0 +1,41 @@ +from typing import Any + +class ConnectionUsage: + open_sockets: int + closed_sockets: int + wrapped_sockets: int + bytes_transmitted: int + bytes_received: int + messages_transmitted: int + messages_received: int + operations: int + abandon_operations: int + add_operations: int + bind_operations: int + compare_operations: int + delete_operations: int + extended_operations: int + modify_operations: int + modify_dn_operations: int + search_operations: int + unbind_operations: int + referrals_received: int + referrals_followed: int + referrals_connections: int + restartable_failures: int + restartable_successes: int + servers_from_pool: int + def reset(self) -> None: ... + initial_connection_start_time: Any + open_socket_start_time: Any + connection_stop_time: Any + last_transmitted_time: Any + last_received_time: Any + def __init__(self) -> None: ... + def __iadd__(self, other): ... + def update_transmitted_message(self, message, length) -> None: ... + def update_received_message(self, length) -> None: ... + def start(self, reset: bool = ...) -> None: ... + def stop(self) -> None: ... + @property + def elapsed_time(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi new file mode 100644 index 000000000000..61ca6f486b45 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/__init__.pyi @@ -0,0 +1,105 @@ +from typing import Any + +class ExtendedOperationContainer: + def __init__(self, connection) -> None: ... + +class StandardExtendedOperations(ExtendedOperationContainer): + def who_am_i(self, controls: Any | None = ...): ... + def modify_password( + self, + user: Any | None = ..., + old_password: Any | None = ..., + new_password: Any | None = ..., + hash_algorithm: Any | None = ..., + salt: Any | None = ..., + controls: Any | None = ..., + ): ... + def paged_search( + self, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Any | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Any | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., + generator: bool = ..., + ): ... + def persistent_search( + self, + search_base: str = ..., + search_filter: str = ..., + search_scope=..., + dereference_aliases=..., + attributes=..., + size_limit: int = ..., + time_limit: int = ..., + controls: Any | None = ..., + changes_only: bool = ..., + show_additions: bool = ..., + show_deletions: bool = ..., + show_modifications: bool = ..., + show_dn_modifications: bool = ..., + notifications: bool = ..., + streaming: bool = ..., + callback: Any | None = ..., + ): ... + def funnel_search( + self, + search_base: str = ..., + search_filter: str = ..., + search_scope=..., + dereference_aliases=..., + attributes=..., + size_limit: int = ..., + time_limit: int = ..., + controls: Any | None = ..., + streaming: bool = ..., + callback: Any | None = ..., + ): ... + +class NovellExtendedOperations(ExtendedOperationContainer): + def get_bind_dn(self, controls: Any | None = ...): ... + def get_universal_password(self, user, controls: Any | None = ...): ... + def set_universal_password(self, user, new_password: Any | None = ..., controls: Any | None = ...): ... + def list_replicas(self, server_dn, controls: Any | None = ...): ... + def partition_entry_count(self, partition_dn, controls: Any | None = ...): ... + def replica_info(self, server_dn, partition_dn, controls: Any | None = ...): ... + def start_transaction(self, controls: Any | None = ...): ... + def end_transaction(self, commit: bool = ..., controls: Any | None = ...): ... + def add_members_to_groups(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + def remove_members_from_groups(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + def check_groups_memberships(self, members, groups, fix: bool = ..., transaction: bool = ...): ... + +class MicrosoftExtendedOperations(ExtendedOperationContainer): + def dir_sync( + self, + sync_base, + sync_filter: str = ..., + attributes=..., + cookie: Any | None = ..., + object_security: bool = ..., + ancestors_first: bool = ..., + public_data_only: bool = ..., + incremental_values: bool = ..., + max_length: int = ..., + hex_guid: bool = ..., + ): ... + def modify_password(self, user, new_password, old_password: Any | None = ..., controls: Any | None = ...): ... + def unlock_account(self, user): ... + def add_members_to_groups(self, members, groups, fix: bool = ...): ... + def remove_members_from_groups(self, members, groups, fix: bool = ...): ... + def persistent_search( + self, search_base: str = ..., search_scope=..., attributes=..., streaming: bool = ..., callback: Any | None = ... + ): ... + +class ExtendedOperationsRoot(ExtendedOperationContainer): + standard: Any + novell: Any + microsoft: Any + def __init__(self, connection) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi new file mode 100644 index 000000000000..486d084f95ec --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/addMembersToGroups.pyi @@ -0,0 +1 @@ +def ad_add_members_to_groups(connection, members_dn, groups_dn, fix: bool = ..., raise_error: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi new file mode 100644 index 000000000000..68acbfc5d21e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/dirSync.pyi @@ -0,0 +1,30 @@ +from typing import Any + +class DirSync: + connection: Any + base: Any + filter: Any + attributes: Any + cookie: Any + object_security: Any + ancestors_first: Any + public_data_only: Any + incremental_values: Any + max_length: Any + hex_guid: Any + more_results: bool + def __init__( + self, + connection, + sync_base, + sync_filter, + attributes, + cookie, + object_security, + ancestors_first, + public_data_only, + incremental_values, + max_length, + hex_guid, + ) -> None: ... + def loop(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi new file mode 100644 index 000000000000..5b3d27a0f751 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/modifyPassword.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def ad_modify_password(connection, user_dn, new_password, old_password, controls: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi new file mode 100644 index 000000000000..95fca82fce4b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/persistentSearch.pyi @@ -0,0 +1,15 @@ +from typing import Any + +class ADPersistentSearch: + connection: Any + message_id: Any + base: Any + scope: Any + attributes: Any + controls: Any + filter: str + def __init__(self, connection, search_base, search_scope, attributes, streaming, callback) -> None: ... + def start(self) -> None: ... + def stop(self, unbind: bool = ...) -> None: ... + def next(self, block: bool = ..., timeout: Any | None = ...): ... + def funnel(self, block: bool = ..., timeout: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi new file mode 100644 index 000000000000..915fb9dbb562 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/removeMembersFromGroups.pyi @@ -0,0 +1 @@ +def ad_remove_members_from_groups(connection, members_dn, groups_dn, fix, raise_error: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi new file mode 100644 index 000000000000..842378089f63 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/microsoft/unlockAccount.pyi @@ -0,0 +1,3 @@ +from typing import Any + +def ad_unlock_account(connection, user_dn, controls: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi new file mode 100644 index 000000000000..5ba4cab9b309 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/addMembersToGroups.pyi @@ -0,0 +1 @@ +def edir_add_members_to_groups(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi new file mode 100644 index 000000000000..551636c2904f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/checkGroupsMemberships.pyi @@ -0,0 +1 @@ +def edir_check_groups_memberships(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi new file mode 100644 index 000000000000..f0e8c5896949 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/endTransaction.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class EndTransaction(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + asn1_spec: Any + def config(self) -> None: ... + def __init__(self, connection, commit: bool = ..., controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... + response_value: Any + def set_response(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi new file mode 100644 index 000000000000..4c194ce82909 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/getBindDn.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class GetBindDn(ExtendedOperation): + request_name: str + response_name: str + response_attribute: str + asn1_spec: Any + def config(self) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi new file mode 100644 index 000000000000..0c7b6e7a80af --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/listReplicas.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class ListReplicas(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + asn1_spec: Any + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, server_dn, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi new file mode 100644 index 000000000000..59d0f1ca37ac --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasGetUniversalPassword.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class NmasGetUniversalPassword(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + asn1_spec: Any + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, user, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi new file mode 100644 index 000000000000..a35b984ba8f2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/nmasSetUniversalPassword.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class NmasSetUniversalPassword(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + asn1_spec: Any + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, user, new_password, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi new file mode 100644 index 000000000000..a9127983afcf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/partition_entry_count.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from ..operation import ExtendedOperation + +class PartitionEntryCount(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, partition_dn, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi new file mode 100644 index 000000000000..91a3223c52eb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/removeMembersFromGroups.pyi @@ -0,0 +1 @@ +def edir_remove_members_from_groups(connection, members_dn, groups_dn, fix, transaction): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi new file mode 100644 index 000000000000..6a4358053f43 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/replicaInfo.pyi @@ -0,0 +1,12 @@ +from typing import Any + +from ..operation import ExtendedOperation + +class ReplicaInfo(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + response_attribute: str + def config(self) -> None: ... + def __init__(self, connection, server_dn, partition_dn, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi new file mode 100644 index 000000000000..74dd78c5cf06 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/novell/startTransaction.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class StartTransaction(ExtendedOperation): + request_name: str + response_name: str + request_value: Any + asn1_spec: Any + def config(self) -> None: ... + def __init__(self, connection, controls: Any | None = ...) -> None: ... + def populate_result(self) -> None: ... + response_value: Any + def set_response(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/operation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/operation.pyi new file mode 100644 index 000000000000..4b14b4b5333d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/operation.pyi @@ -0,0 +1,19 @@ +from typing import Any + +class ExtendedOperation: + connection: Any + decoded_response: Any + result: Any + asn1_spec: Any + request_name: Any + response_name: Any + request_value: Any + response_value: Any + response_attribute: Any + controls: Any + def __init__(self, connection, controls: Any | None = ...) -> None: ... + def send(self): ... + def populate_result(self) -> None: ... + def decode_response(self, response: Any | None = ...) -> None: ... + def set_response(self) -> None: ... + def config(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi new file mode 100644 index 000000000000..741f7c5b96c5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PagedSearch.pyi @@ -0,0 +1,32 @@ +from typing import Any + +def paged_search_generator( + connection, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Any | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Any | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., +) -> None: ... +def paged_search_accumulator( + connection, + search_base, + search_filter, + search_scope=..., + dereference_aliases=..., + attributes: Any | None = ..., + size_limit: int = ..., + time_limit: int = ..., + types_only: bool = ..., + get_operational_attributes: bool = ..., + controls: Any | None = ..., + paged_size: int = ..., + paged_criticality: bool = ..., +): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi new file mode 100644 index 000000000000..3af402ec3654 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/PersistentSearch.pyi @@ -0,0 +1,36 @@ +from typing import Any + +class PersistentSearch: + connection: Any + changes_only: Any + notifications: Any + message_id: Any + base: Any + filter: Any + scope: Any + dereference_aliases: Any + attributes: Any + size_limit: Any + time_limit: Any + controls: Any + def __init__( + self, + connection, + search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + controls, + changes_only, + events_type, + notifications, + streaming, + callback, + ) -> None: ... + def start(self) -> None: ... + def stop(self, unbind: bool = ...) -> None: ... + def next(self, block: bool = ..., timeout: Any | None = ...): ... + def funnel(self, block: bool = ..., timeout: Any | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi new file mode 100644 index 000000000000..5df4b6d1ecbd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/modifyPassword.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from ...extend.operation import ExtendedOperation + +class ModifyPassword(ExtendedOperation): + request_name: str + request_value: Any + asn1_spec: Any + response_attribute: str + def config(self) -> None: ... + def __init__( + self, + connection, + user: Any | None = ..., + old_password: Any | None = ..., + new_password: Any | None = ..., + hash_algorithm: Any | None = ..., + salt: Any | None = ..., + controls: Any | None = ..., + ) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi new file mode 100644 index 000000000000..e61b175e92c9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/extend/standard/whoAmI.pyi @@ -0,0 +1,7 @@ +from ...extend.operation import ExtendedOperation + +class WhoAmI(ExtendedOperation): + request_name: str + response_attribute: str + def config(self) -> None: ... + def populate_result(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/abandon.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/abandon.pyi new file mode 100644 index 000000000000..2413c214cdc1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/abandon.pyi @@ -0,0 +1,2 @@ +def abandon_operation(msg_id): ... +def abandon_request_to_dict(request): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/add.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/add.pyi new file mode 100644 index 000000000000..59d87bae8a03 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/add.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def add_operation( + dn, attributes, auto_encode, schema: Any | None = ..., validator: Any | None = ..., check_names: bool = ... +): ... +def add_request_to_dict(request): ... +def add_response_to_dict(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/bind.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/bind.pyi new file mode 100644 index 000000000000..5079a87da77d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/bind.pyi @@ -0,0 +1,23 @@ +from typing import Any + +def bind_operation( + version, + authentication, + name: str = ..., + password: Any | None = ..., + sasl_mechanism: Any | None = ..., + sasl_credentials: Any | None = ..., + auto_encode: bool = ..., +): ... +def bind_request_to_dict(request): ... +def bind_response_operation( + result_code, + matched_dn: str = ..., + diagnostic_message: str = ..., + referral: Any | None = ..., + server_sasl_credentials: Any | None = ..., +): ... +def bind_response_to_dict(response): ... +def sicily_bind_response_to_dict(response): ... +def bind_response_to_dict_fast(response): ... +def sicily_bind_response_to_dict_fast(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/compare.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/compare.pyi new file mode 100644 index 000000000000..4b12d354700d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/compare.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def compare_operation( + dn, attribute, value, auto_encode, schema: Any | None = ..., validator: Any | None = ..., check_names: bool = ... +): ... +def compare_request_to_dict(request): ... +def compare_response_to_dict(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/delete.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/delete.pyi new file mode 100644 index 000000000000..618c8f41c96a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/delete.pyi @@ -0,0 +1,3 @@ +def delete_operation(dn): ... +def delete_request_to_dict(request): ... +def delete_response_to_dict(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/extended.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/extended.pyi new file mode 100644 index 000000000000..2cc46be69181 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/extended.pyi @@ -0,0 +1,8 @@ +from typing import Any + +def extended_operation(request_name, request_value: Any | None = ..., no_encode: Any | None = ...): ... +def extended_request_to_dict(request): ... +def extended_response_to_dict(response): ... +def intermediate_response_to_dict(response): ... +def extended_response_to_dict_fast(response): ... +def intermediate_response_to_dict_fast(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi new file mode 100644 index 000000000000..8700499b9e94 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modify.pyi @@ -0,0 +1,9 @@ +from typing import Any + +change_table: Any + +def modify_operation( + dn, changes, auto_encode, schema: Any | None = ..., validator: Any | None = ..., check_names: bool = ... +): ... +def modify_request_to_dict(request): ... +def modify_response_to_dict(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modifyDn.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modifyDn.pyi new file mode 100644 index 000000000000..db754b8fa4ca --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/modifyDn.pyi @@ -0,0 +1,5 @@ +from typing import Any + +def modify_dn_operation(dn, new_relative_dn, delete_old_rdn: bool = ..., new_superior: Any | None = ...): ... +def modify_dn_request_to_dict(request): ... +def modify_dn_response_to_dict(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi new file mode 100644 index 000000000000..78a728ed439e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/search.pyi @@ -0,0 +1,63 @@ +from typing import Any + +ROOT: int +AND: int +OR: int +NOT: int +MATCH_APPROX: int +MATCH_GREATER_OR_EQUAL: int +MATCH_LESS_OR_EQUAL: int +MATCH_EXTENSIBLE: int +MATCH_PRESENT: int +MATCH_SUBSTRING: int +MATCH_EQUAL: int +SEARCH_OPEN: int +SEARCH_OPEN_OR_CLOSE: int +SEARCH_MATCH_OR_CLOSE: int +SEARCH_MATCH_OR_CONTROL: int + +class FilterNode: + tag: Any + parent: Any + assertion: Any + elements: Any + def __init__(self, tag: Any | None = ..., assertion: Any | None = ...) -> None: ... + def append(self, filter_node): ... + +def evaluate_match(match, schema, auto_escape, auto_encode, validator, check_names): ... +def parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names): ... +def compile_filter(filter_node): ... +def build_attribute_selection(attribute_list, schema): ... +def search_operation( + search_base, + search_filter, + search_scope, + dereference_aliases, + attributes, + size_limit, + time_limit, + types_only, + auto_escape, + auto_encode, + schema: Any | None = ..., + validator: Any | None = ..., + check_names: bool = ..., +): ... +def decode_vals(vals): ... +def decode_vals_fast(vals): ... +def attributes_to_dict(attribute_list): ... +def attributes_to_dict_fast(attribute_list): ... +def decode_raw_vals(vals): ... +def decode_raw_vals_fast(vals): ... +def raw_attributes_to_dict(attribute_list): ... +def raw_attributes_to_dict_fast(attribute_list): ... +def checked_attributes_to_dict(attribute_list, schema: Any | None = ..., custom_formatter: Any | None = ...): ... +def checked_attributes_to_dict_fast(attribute_list, schema: Any | None = ..., custom_formatter: Any | None = ...): ... +def matching_rule_assertion_to_string(matching_rule_assertion): ... +def filter_to_string(filter_object): ... +def search_request_to_dict(request): ... +def search_result_entry_response_to_dict(response, schema, custom_formatter, check_names): ... +def search_result_done_response_to_dict(response): ... +def search_result_reference_response_to_dict(response): ... +def search_result_entry_response_to_dict_fast(response, schema, custom_formatter, check_names): ... +def search_result_reference_response_to_dict_fast(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/unbind.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/unbind.pyi new file mode 100644 index 000000000000..0c66a79d615c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/operation/unbind.pyi @@ -0,0 +1 @@ +def unbind_operation(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/controls.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/controls.pyi new file mode 100644 index 000000000000..7c67b9f3c104 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/controls.pyi @@ -0,0 +1 @@ +def build_control(oid, criticality, value, encode_control_value: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/convert.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/convert.pyi new file mode 100644 index 000000000000..4bf4baf99b3b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/convert.pyi @@ -0,0 +1,22 @@ +from typing import Any + +def to_str_or_normalized_unicode(val): ... +def attribute_to_dict(attribute): ... +def attributes_to_dict(attributes): ... +def referrals_to_list(referrals): ... +def search_refs_to_list(search_refs): ... +def search_refs_to_list_fast(search_refs): ... +def sasl_to_dict(sasl): ... +def authentication_choice_to_dict(authentication_choice): ... +def partial_attribute_to_dict(modification): ... +def change_to_dict(change): ... +def changes_to_list(changes): ... +def attributes_to_list(attributes): ... +def ava_to_dict(ava): ... +def substring_to_dict(substring): ... +def prepare_changes_for_request(changes): ... +def build_controls_list(controls): ... +def validate_assertion_value(schema, name, value, auto_escape, auto_encode, validator, check_names): ... +def validate_attribute_value(schema, name, value, auto_encode, validator: Any | None = ..., check_names: bool = ...): ... +def prepare_filter_for_sending(raw_string): ... +def prepare_for_sending(raw_string): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi new file mode 100644 index 000000000000..69c4e1aea172 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/formatters.pyi @@ -0,0 +1,16 @@ +from typing import Any + +def format_unicode(raw_value): ... +def format_integer(raw_value): ... +def format_binary(raw_value): ... +def format_uuid(raw_value): ... +def format_uuid_le(raw_value): ... +def format_boolean(raw_value): ... +def format_ad_timestamp(raw_value): ... + +time_format: Any + +def format_time(raw_value): ... +def format_ad_timedelta(raw_value): ... +def format_time_with_0_year(raw_value): ... +def format_sid(raw_value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi new file mode 100644 index 000000000000..f85dd6485adf --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/standard.pyi @@ -0,0 +1,7 @@ +from typing import Any + +standard_formatter: Any + +def find_attribute_helpers(attr_type, name, custom_formatter): ... +def format_attribute_values(schema, name, values, custom_formatter): ... +def find_attribute_validator(schema, name, custom_validator): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/validators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/validators.pyi new file mode 100644 index 000000000000..b49eee6262d2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/formatters/validators.pyi @@ -0,0 +1,16 @@ +def check_backslash(value): ... +def check_type(input_value, value_type): ... +def always_valid(input_value): ... +def validate_generic_single_value(input_value): ... +def validate_zero_and_minus_one_and_positive_int(input_value): ... +def validate_integer(input_value): ... +def validate_bytes(input_value): ... +def validate_boolean(input_value): ... +def validate_time_with_0_year(input_value): ... +def validate_time(input_value): ... +def validate_ad_timestamp(input_value): ... +def validate_ad_timedelta(input_value): ... +def validate_guid(input_value): ... +def validate_uuid(input_value): ... +def validate_uuid_le(input_value): ... +def validate_sid(input_value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/microsoft.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/microsoft.pyi new file mode 100644 index 000000000000..204a718e333d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/microsoft.pyi @@ -0,0 +1,27 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import Sequence +Sequence = Any + +class SicilyBindResponse(Sequence): + tagSet: Any + componentType: Any + +class DirSyncControlRequestValue(Sequence): + componentType: Any + +class DirSyncControlResponseValue(Sequence): + componentType: Any + +class SdFlags(Sequence): + componentType: Any + +class ExtendedDN(Sequence): + componentType: Any + +def dir_sync_control(criticality, object_security, ancestors_first, public_data_only, incremental_values, max_length, cookie): ... +def extended_dn_control(criticality: bool = ..., hex_format: bool = ...): ... +def show_deleted_control(criticality: bool = ...): ... +def security_descriptor_control(criticality: bool = ..., sdflags: int = ...): ... +def persistent_search_control(criticality: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/novell.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/novell.pyi new file mode 100644 index 000000000000..8ced7089e996 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/novell.pyi @@ -0,0 +1,72 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import Integer, OctetString, Sequence, SequenceOf +Integer = Any +OctetString = Any +Sequence = Any +SequenceOf = Any + +NMAS_LDAP_EXT_VERSION: int + +class Identity(OctetString): + encoding: str + +class LDAPDN(OctetString): + tagSet: Any + encoding: str + +class Password(OctetString): + tagSet: Any + encoding: str + +class LDAPOID(OctetString): + tagSet: Any + encoding: str + +class GroupCookie(Integer): + tagSet: Any + +class NmasVer(Integer): + tagSet: Any + +class Error(Integer): + tagSet: Any + +class NmasGetUniversalPasswordRequestValue(Sequence): + componentType: Any + +class NmasGetUniversalPasswordResponseValue(Sequence): + componentType: Any + +class NmasSetUniversalPasswordRequestValue(Sequence): + componentType: Any + +class NmasSetUniversalPasswordResponseValue(Sequence): + componentType: Any + +class ReplicaList(SequenceOf): + componentType: Any + +class ReplicaInfoRequestValue(Sequence): + tagSet: Any + componentType: Any + +class ReplicaInfoResponseValue(Sequence): + tagSet: Any + componentType: Any + +class CreateGroupTypeRequestValue(Sequence): + componentType: Any + +class CreateGroupTypeResponseValue(Sequence): + componentType: Any + +class EndGroupTypeRequestValue(Sequence): + componentType: Any + +class EndGroupTypeResponseValue(Sequence): + componentType: Any + +class GroupingControlValue(Sequence): + componentType: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi new file mode 100644 index 000000000000..77efddad270f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/oid.pyi @@ -0,0 +1,29 @@ +from typing import Any + +OID_CONTROL: str +OID_EXTENSION: str +OID_FEATURE: str +OID_UNSOLICITED_NOTICE: str +OID_ATTRIBUTE_TYPE: str +OID_DIT_CONTENT_RULE: str +OID_LDAP_URL_EXTENSION: str +OID_FAMILY: str +OID_MATCHING_RULE: str +OID_NAME_FORM: str +OID_OBJECT_CLASS: str +OID_ADMINISTRATIVE_ROLE: str +OID_LDAP_SYNTAX: str +CLASS_STRUCTURAL: str +CLASS_ABSTRACT: str +CLASS_AUXILIARY: str +ATTRIBUTE_USER_APPLICATION: str +ATTRIBUTE_DIRECTORY_OPERATION: str +ATTRIBUTE_DISTRIBUTED_OPERATION: str +ATTRIBUTE_DSA_OPERATION: str + +def constant_to_oid_kind(oid_kind): ... +def decode_oids(sequence): ... +def decode_syntax(syntax): ... +def oid_to_string(oid): ... + +Oids: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/persistentSearch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/persistentSearch.pyi new file mode 100644 index 000000000000..933ae390b28c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/persistentSearch.pyi @@ -0,0 +1,17 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import Enumerated, Sequence +Enumerated = Any +Sequence = Any + +class PersistentSearchControl(Sequence): + componentType: Any + +class ChangeType(Enumerated): + namedValues: Any + +class EntryChangeNotificationControl(Sequence): + componentType: Any + +def persistent_search_control(change_types, changes_only: bool = ..., return_ecs: bool = ..., criticality: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2696.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2696.pyi new file mode 100644 index 000000000000..8cb346f0304b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2696.pyi @@ -0,0 +1,21 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import Integer, OctetString, Sequence +Integer = Any +OctetString = Any +Sequence = Any + +MAXINT: Any +rangeInt0ToMaxConstraint: Any + +class Integer0ToMax(Integer): + subtypeSpec: Any + +class Size(Integer0ToMax): ... +class Cookie(OctetString): ... + +class RealSearchControlValue(Sequence): + componentType: Any + +def paged_search_control(criticality: bool = ..., size: int = ..., cookie: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi new file mode 100644 index 000000000000..b589bc57ce65 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc2849.pyi @@ -0,0 +1,18 @@ +from typing import Any + +conf_ldif_line_length: Any + +def safe_ldif_string(bytes_value): ... +def add_controls(controls, all_base64): ... +def add_attributes(attributes, all_base64): ... +def sort_ldif_lines(lines, sort_order): ... +def search_response_to_ldif(entries, all_base64, sort_order: Any | None = ...): ... +def add_request_to_ldif(entry, all_base64, sort_order: Any | None = ...): ... +def delete_request_to_ldif(entry, all_base64, sort_order: Any | None = ...): ... +def modify_request_to_ldif(entry, all_base64, sort_order: Any | None = ...): ... +def modify_dn_request_to_ldif(entry, all_base64, sort_order: Any | None = ...): ... +def operation_to_ldif(operation_type, entries, all_base64: bool = ..., sort_order: Any | None = ...): ... +def add_ldif_header(ldif_lines): ... +def ldif_sort(line, sort_order): ... +def decode_persistent_search_control(change): ... +def persistent_search_response_to_ldif(change): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc3062.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc3062.pyi new file mode 100644 index 000000000000..c508ccbbfb10 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc3062.pyi @@ -0,0 +1,28 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import OctetString, Sequence +OctetString = Any +Sequence = Any + +class UserIdentity(OctetString): + tagSet: Any + encoding: str + +class OldPasswd(OctetString): + tagSet: Any + encoding: str + +class NewPasswd(OctetString): + tagSet: Any + encoding: str + +class GenPasswd(OctetString): + tagSet: Any + encoding: str + +class PasswdModifyRequestValue(Sequence): + componentType: Any + +class PasswdModifyResponseValue(Sequence): + componentType: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4511.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4511.pyi new file mode 100644 index 000000000000..070910ae3868 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4511.pyi @@ -0,0 +1,323 @@ +from typing import Any as _Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.type.univ import Boolean, Choice, Enumerated, Integer, Null, OctetString, Sequence, SequenceOf, SetOf +Boolean = _Any +Choice = _Any +Enumerated = _Any +Integer = _Any +Null = _Any +OctetString = _Any +Sequence = _Any +SequenceOf = _Any +SetOf = _Any + +LDAP_MAX_INT: int +MAXINT: _Any +rangeInt0ToMaxConstraint: _Any +rangeInt1To127Constraint: _Any +size1ToMaxConstraint: _Any +responseValueConstraint: _Any +numericOIDConstraint: _Any +distinguishedNameConstraint: _Any +nameComponentConstraint: _Any +attributeDescriptionConstraint: _Any +uriConstraint: _Any +attributeSelectorConstraint: _Any + +class Integer0ToMax(Integer): + subtypeSpec: _Any + +class LDAPString(OctetString): + encoding: str + +class MessageID(Integer0ToMax): ... +class LDAPOID(OctetString): ... +class LDAPDN(LDAPString): ... +class RelativeLDAPDN(LDAPString): ... +class AttributeDescription(LDAPString): ... + +class AttributeValue(OctetString): + encoding: str + +class AssertionValue(OctetString): + encoding: str + +class AttributeValueAssertion(Sequence): + componentType: _Any + +class MatchingRuleId(LDAPString): ... + +class Vals(SetOf): + componentType: _Any + +class ValsAtLeast1(SetOf): + componentType: _Any + subtypeSpec: _Any + +class PartialAttribute(Sequence): + componentType: _Any + +class Attribute(Sequence): + componentType: _Any + +class AttributeList(SequenceOf): + componentType: _Any + +class Simple(OctetString): + tagSet: _Any + encoding: str + +class Credentials(OctetString): + encoding: str + +class SaslCredentials(Sequence): + tagSet: _Any + componentType: _Any + +class SicilyPackageDiscovery(OctetString): + tagSet: _Any + encoding: str + +class SicilyNegotiate(OctetString): + tagSet: _Any + encoding: str + +class SicilyResponse(OctetString): + tagSet: _Any + encoding: str + +class AuthenticationChoice(Choice): + componentType: _Any + +class Version(Integer): + subtypeSpec: _Any + +class ResultCode(Enumerated): + namedValues: _Any + subTypeSpec: _Any + +class URI(LDAPString): ... + +class Referral(SequenceOf): + tagSet: _Any + componentType: _Any + +class ServerSaslCreds(OctetString): + tagSet: _Any + encoding: str + +class LDAPResult(Sequence): + componentType: _Any + +class Criticality(Boolean): + defaultValue: bool + +class ControlValue(OctetString): + encoding: str + +class Control(Sequence): + componentType: _Any + +class Controls(SequenceOf): + tagSet: _Any + componentType: _Any + +class Scope(Enumerated): + namedValues: _Any + +class DerefAliases(Enumerated): + namedValues: _Any + +class TypesOnly(Boolean): ... +class Selector(LDAPString): ... + +class AttributeSelection(SequenceOf): + componentType: _Any + +class MatchingRule(MatchingRuleId): + tagSet: _Any + +class Type(AttributeDescription): + tagSet: _Any + +class MatchValue(AssertionValue): + tagSet: _Any + +class DnAttributes(Boolean): + tagSet: _Any + defaultValue: _Any + +class MatchingRuleAssertion(Sequence): + componentType: _Any + +class Initial(AssertionValue): + tagSet: _Any + +class Any(AssertionValue): + tagSet: _Any + +class Final(AssertionValue): + tagSet: _Any + +class Substring(Choice): + componentType: _Any + +class Substrings(SequenceOf): + subtypeSpec: _Any + componentType: _Any + +class SubstringFilter(Sequence): + tagSet: _Any + componentType: _Any + +class And(SetOf): + tagSet: _Any + subtypeSpec: _Any + +class Or(SetOf): + tagSet: _Any + subtypeSpec: _Any + +class Not(Choice): ... + +class EqualityMatch(AttributeValueAssertion): + tagSet: _Any + +class GreaterOrEqual(AttributeValueAssertion): + tagSet: _Any + +class LessOrEqual(AttributeValueAssertion): + tagSet: _Any + +class Present(AttributeDescription): + tagSet: _Any + +class ApproxMatch(AttributeValueAssertion): + tagSet: _Any + +class ExtensibleMatch(MatchingRuleAssertion): + tagSet: _Any + +class Filter(Choice): + componentType: _Any + +class PartialAttributeList(SequenceOf): + componentType: _Any + +class Operation(Enumerated): + namedValues: _Any + +class Change(Sequence): + componentType: _Any + +class Changes(SequenceOf): + componentType: _Any + +class DeleteOldRDN(Boolean): ... + +class NewSuperior(LDAPDN): + tagSet: _Any + +class RequestName(LDAPOID): + tagSet: _Any + +class RequestValue(OctetString): + tagSet: _Any + encoding: str + +class ResponseName(LDAPOID): + tagSet: _Any + +class ResponseValue(OctetString): + tagSet: _Any + encoding: str + +class IntermediateResponseName(LDAPOID): + tagSet: _Any + +class IntermediateResponseValue(OctetString): + tagSet: _Any + encoding: str + +class BindRequest(Sequence): + tagSet: _Any + componentType: _Any + +class BindResponse(Sequence): + tagSet: _Any + componentType: _Any + +class UnbindRequest(Null): + tagSet: _Any + +class SearchRequest(Sequence): + tagSet: _Any + componentType: _Any + +class SearchResultReference(SequenceOf): + tagSet: _Any + subtypeSpec: _Any + componentType: _Any + +class SearchResultEntry(Sequence): + tagSet: _Any + componentType: _Any + +class SearchResultDone(LDAPResult): + tagSet: _Any + +class ModifyRequest(Sequence): + tagSet: _Any + componentType: _Any + +class ModifyResponse(LDAPResult): + tagSet: _Any + +class AddRequest(Sequence): + tagSet: _Any + componentType: _Any + +class AddResponse(LDAPResult): + tagSet: _Any + +class DelRequest(LDAPDN): + tagSet: _Any + +class DelResponse(LDAPResult): + tagSet: _Any + +class ModifyDNRequest(Sequence): + tagSet: _Any + componentType: _Any + +class ModifyDNResponse(LDAPResult): + tagSet: _Any + +class CompareRequest(Sequence): + tagSet: _Any + componentType: _Any + +class CompareResponse(LDAPResult): + tagSet: _Any + +class AbandonRequest(MessageID): + tagSet: _Any + +class ExtendedRequest(Sequence): + tagSet: _Any + componentType: _Any + +class ExtendedResponse(Sequence): + tagSet: _Any + componentType: _Any + +class IntermediateResponse(Sequence): + tagSet: _Any + componentType: _Any + +class ProtocolOp(Choice): + componentType: _Any + +class LDAPMessage(Sequence): + componentType: _Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi new file mode 100644 index 000000000000..816dfeb2d0a6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4512.pyi @@ -0,0 +1,218 @@ +from typing import Any + +def constant_to_class_kind(value): ... +def constant_to_attribute_usage(value): ... +def attribute_usage_to_constant(value): ... +def quoted_string_to_list(quoted_string): ... +def oids_string_to_list(oid_string): ... +def extension_to_tuple(extension_string): ... +def list_to_string(list_object): ... + +class BaseServerInfo: + raw: Any + def __init__(self, raw_attributes) -> None: ... + @classmethod + def from_json(cls, json_definition, schema: Any | None = ..., custom_formatter: Any | None = ...): ... + @classmethod + def from_file(cls, target, schema: Any | None = ..., custom_formatter: Any | None = ...): ... + def to_file(self, target, indent: int = ..., sort: bool = ...) -> None: ... + def to_json(self, indent: int = ..., sort: bool = ...): ... + +class DsaInfo(BaseServerInfo): + alt_servers: Any + naming_contexts: Any + supported_controls: Any + supported_extensions: Any + supported_features: Any + supported_ldap_versions: Any + supported_sasl_mechanisms: Any + vendor_name: Any + vendor_version: Any + schema_entry: Any + other: Any + def __init__(self, attributes, raw_attributes) -> None: ... + +class SchemaInfo(BaseServerInfo): + schema_entry: Any + create_time_stamp: Any + modify_time_stamp: Any + attribute_types: Any + object_classes: Any + matching_rules: Any + matching_rule_uses: Any + dit_content_rules: Any + dit_structure_rules: Any + name_forms: Any + ldap_syntaxes: Any + other: Any + def __init__(self, schema_entry, attributes, raw_attributes) -> None: ... + def is_valid(self): ... + +class BaseObjectInfo: + oid: Any + name: Any + description: Any + obsolete: Any + extensions: Any + experimental: Any + raw_definition: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + @property + def oid_info(self): ... + @classmethod + def from_definition(cls, definitions): ... + +class MatchingRuleInfo(BaseObjectInfo): + syntax: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + syntax: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class MatchingRuleUseInfo(BaseObjectInfo): + apply_to: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + apply_to: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class ObjectClassInfo(BaseObjectInfo): + superior: Any + kind: Any + must_contain: Any + may_contain: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + superior: Any | None = ..., + kind: Any | None = ..., + must_contain: Any | None = ..., + may_contain: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class AttributeTypeInfo(BaseObjectInfo): + superior: Any + equality: Any + ordering: Any + substring: Any + syntax: Any + min_length: Any + single_value: Any + collective: Any + no_user_modification: Any + usage: Any + mandatory_in: Any + optional_in: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + superior: Any | None = ..., + equality: Any | None = ..., + ordering: Any | None = ..., + substring: Any | None = ..., + syntax: Any | None = ..., + min_length: Any | None = ..., + single_value: bool = ..., + collective: bool = ..., + no_user_modification: bool = ..., + usage: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class LdapSyntaxInfo(BaseObjectInfo): + def __init__( + self, + oid: Any | None = ..., + description: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class DitContentRuleInfo(BaseObjectInfo): + auxiliary_classes: Any + must_contain: Any + may_contain: Any + not_contains: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + auxiliary_classes: Any | None = ..., + must_contain: Any | None = ..., + may_contain: Any | None = ..., + not_contains: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class DitStructureRuleInfo(BaseObjectInfo): + superior: Any + name_form: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + name_form: Any | None = ..., + superior: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... + +class NameFormInfo(BaseObjectInfo): + object_class: Any + must_contain: Any + may_contain: Any + def __init__( + self, + oid: Any | None = ..., + name: Any | None = ..., + description: Any | None = ..., + obsolete: bool = ..., + object_class: Any | None = ..., + must_contain: Any | None = ..., + may_contain: Any | None = ..., + extensions: Any | None = ..., + experimental: Any | None = ..., + definition: Any | None = ..., + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4527.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4527.pyi new file mode 100644 index 000000000000..eacc393c49c1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/rfc4527.pyi @@ -0,0 +1,2 @@ +def pre_read_control(attributes, criticality: bool = ...): ... +def post_read_control(attributes, criticality: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi new file mode 100644 index 000000000000..433416f25d27 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/digestMd5.pyi @@ -0,0 +1,9 @@ +STATE_KEY: int +STATE_VALUE: int + +def md5_h(value): ... +def md5_kd(k, s): ... +def md5_hex(value): ... +def md5_hmac(k, s): ... +def sasl_digest_md5(connection, controls): ... +def decode_directives(directives_string): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/external.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/external.pyi new file mode 100644 index 000000000000..8403ee7944af --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/external.pyi @@ -0,0 +1 @@ +def sasl_external(connection, controls): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi new file mode 100644 index 000000000000..7a795b210a71 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/kerberos.pyi @@ -0,0 +1,8 @@ +posix_gssapi_unavailable: bool +windows_gssapi_unavailable: bool +NO_SECURITY_LAYER: int +INTEGRITY_PROTECTION: int +CONFIDENTIALITY_PROTECTION: int + +def get_channel_bindings(ssl_socket): ... +def sasl_gssapi(connection, controls): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/plain.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/plain.pyi new file mode 100644 index 000000000000..5be879c5abef --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/plain.pyi @@ -0,0 +1 @@ +def sasl_plain(connection, controls): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi new file mode 100644 index 000000000000..564c26ab5dc7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/sasl/sasl.pyi @@ -0,0 +1,5 @@ +def sasl_prep(data): ... +def validate_simple_password(password, accept_empty: bool = ...): ... +def abort_sasl_negotiation(connection, controls): ... +def send_sasl_negotiation(connection, controls, payload): ... +def random_hex_string(size): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi new file mode 100644 index 000000000000..3f484293670c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ad2012R2.pyi @@ -0,0 +1,2 @@ +ad_2012_r2_schema: str +ad_2012_r2_dsa_info: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi new file mode 100644 index 000000000000..4d90cdc59857 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/ds389.pyi @@ -0,0 +1,2 @@ +ds389_1_3_3_schema: str +ds389_1_3_3_dsa_info: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi new file mode 100644 index 000000000000..5b982a48e0d1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir888.pyi @@ -0,0 +1,2 @@ +edir_8_8_8_schema: str +edir_8_8_8_dsa_info: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi new file mode 100644 index 000000000000..d7c9cf64f817 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/edir914.pyi @@ -0,0 +1,2 @@ +edir_9_1_4_schema: str +edir_9_1_4_dsa_info: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi new file mode 100644 index 000000000000..c080d0820aa0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/protocol/schemas/slapd24.pyi @@ -0,0 +1,2 @@ +slapd_2_4_schema: str +slapd_2_4_dsa_info: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi new file mode 100644 index 000000000000..b0ed1b4fd07e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asyncStream.pyi @@ -0,0 +1,18 @@ +from typing import Any + +from ..strategy.asynchronous import AsyncStrategy + +class AsyncStreamStrategy(AsyncStrategy): + can_stream: bool + line_separator: Any + all_base64: bool + stream: Any + order: Any + persistent_search_message_id: Any + streaming: bool + callback: Any + events: Any + def __init__(self, ldap_connection) -> None: ... + def accumulate_stream(self, message_id, change) -> None: ... + def get_stream(self): ... + def set_stream(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi new file mode 100644 index 000000000000..ecc746f863f3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/asynchronous.pyi @@ -0,0 +1,27 @@ +from threading import Thread +from typing import Any + +from ..strategy.base import BaseStrategy + +class AsyncStrategy(BaseStrategy): + class ReceiverSocketThread(Thread): + connection: Any + socket_size: Any + def __init__(self, ldap_connection) -> None: ... + def run(self) -> None: ... + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + receiver: Any + async_lock: Any + event_lock: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def close(self) -> None: ... + def set_event_for_message(self, message_id) -> None: ... + def post_send_search(self, message_id): ... + def post_send_single_response(self, message_id): ... + def receiving(self) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi new file mode 100644 index 000000000000..4c7a07a1a415 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/base.pyi @@ -0,0 +1,42 @@ +from typing import Any + +unix_socket_available: bool +SESSION_TERMINATED_BY_SERVER: str +TRANSACTION_ERROR: str +RESPONSE_COMPLETE: str + +class BaseStrategy: + connection: Any + sync: Any + no_real_dsa: Any + pooled: Any + can_stream: Any + referral_cache: Any + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def close(self) -> None: ... + def send(self, message_type, request, controls: Any | None = ...): ... + def get_response(self, message_id, timeout: Any | None = ..., get_request: bool = ...): ... + @staticmethod + def compute_ldap_message_size(data): ... + def decode_response(self, ldap_message): ... + def decode_response_fast(self, ldap_message): ... + @staticmethod + def decode_control(control): ... + @staticmethod + def decode_control_fast(control, from_server: bool = ...): ... + @staticmethod + def decode_request(message_type, component, controls: Any | None = ...): ... + def valid_referral_list(self, referrals): ... + def do_next_range_search(self, request, response, attr_name): ... + def do_search_on_auto_range(self, request, response): ... + def create_referral_connection(self, referrals): ... + def do_operation_on_referral(self, request, referrals): ... + def sending(self, ldap_message) -> None: ... + def receiving(self) -> None: ... + def post_send_single_response(self, message_id) -> None: ... + def post_send_search(self, message_id) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... + def unbind_referral_cache(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi new file mode 100644 index 000000000000..7ac55a30b077 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/ldifProducer.pyi @@ -0,0 +1,21 @@ +from typing import Any + +from .base import BaseStrategy + +class LdifProducerStrategy(BaseStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + line_separator: Any + all_base64: bool + stream: Any + order: Any + def __init__(self, ldap_connection) -> None: ... + def receiving(self) -> None: ... + def send(self, message_type, request, controls: Any | None = ...): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id) -> None: ... + def accumulate_stream(self, fragment) -> None: ... + def get_stream(self): ... + def set_stream(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi new file mode 100644 index 000000000000..2acd74751413 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockAsync.pyi @@ -0,0 +1,11 @@ +from typing import Any + +from .asynchronous import AsyncStrategy +from .mockBase import MockBaseStrategy + +class MockAsyncStrategy(MockBaseStrategy, AsyncStrategy): + def __init__(self, ldap_connection) -> None: ... + def post_send_search(self, payload): ... + bound: Any + def post_send_single_response(self, payload): ... + def get_response(self, message_id, timeout: Any | None = ..., get_request: bool = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi new file mode 100644 index 000000000000..c4b0c3483047 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockBase.pyi @@ -0,0 +1,37 @@ +from typing import Any + +SEARCH_CONTROLS: Any +SERVER_ENCODING: str + +def random_cookie(): ... + +class PagedSearchSet: + size: Any + response: Any + cookie: Any + sent: int + done: bool + def __init__(self, response, size, criticality) -> None: ... + def next(self, size: Any | None = ...): ... + +class MockBaseStrategy: + entries: Any + no_real_dsa: bool + bound: Any + custom_validators: Any + operational_attributes: Any + def __init__(self) -> None: ... + def add_entry(self, dn, attributes, validate: bool = ...): ... + def remove_entry(self, dn): ... + def entries_from_json(self, json_entry_file) -> None: ... + def mock_bind(self, request_message, controls): ... + def mock_delete(self, request_message, controls): ... + def mock_add(self, request_message, controls): ... + def mock_compare(self, request_message, controls): ... + def mock_modify_dn(self, request_message, controls): ... + def mock_modify(self, request_message, controls): ... + def mock_search(self, request_message, controls): ... + def mock_extended(self, request_message, controls): ... + def evaluate_filter_node(self, node, candidates): ... + def equal(self, dn, attribute_type, value_to_check): ... + def send(self, message_type, request, controls: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi new file mode 100644 index 000000000000..dcfd12d8958b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/mockSync.pyi @@ -0,0 +1,10 @@ +from typing import Any + +from .mockBase import MockBaseStrategy +from .sync import SyncStrategy + +class MockSyncStrategy(MockBaseStrategy, SyncStrategy): + def __init__(self, ldap_connection) -> None: ... + def post_send_search(self, payload): ... + bound: Any + def post_send_single_response(self, payload): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi new file mode 100644 index 000000000000..d38804ee0f85 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/restartable.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from .sync import SyncStrategy + +class RestartableStrategy(SyncStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + restartable_sleep_time: Any + restartable_tries: Any + exception_history: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def send(self, message_type, request, controls: Any | None = ...): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id): ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi new file mode 100644 index 000000000000..2203fdf72a75 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/reusable.pyi @@ -0,0 +1,74 @@ +from threading import Thread +from typing import Any + +from .base import BaseStrategy + +TERMINATE_REUSABLE: str +BOGUS_BIND: int +BOGUS_UNBIND: int +BOGUS_EXTENDED: int +BOGUS_ABANDON: int + +class ReusableStrategy(BaseStrategy): + pools: Any + def receiving(self) -> None: ... + def get_stream(self) -> None: ... + def set_stream(self, value) -> None: ... + + class ConnectionPool: + def __new__(cls, connection): ... + name: Any + master_connection: Any + workers: Any + pool_size: Any + lifetime: Any + keepalive: Any + request_queue: Any + open_pool: bool + bind_pool: bool + tls_pool: bool + counter: int + terminated_usage: Any + terminated: bool + pool_lock: Any + started: bool + def __init__(self, connection) -> None: ... + def get_info_from_server(self) -> None: ... + def rebind_pool(self) -> None: ... + def start_pool(self): ... + def create_pool(self) -> None: ... + def terminate_pool(self) -> None: ... + + class PooledConnectionThread(Thread): + daemon: bool + worker: Any + master_connection: Any + def __init__(self, worker, master_connection) -> None: ... + def run(self) -> None: ... + + class PooledConnectionWorker: + master_connection: Any + request_queue: Any + running: bool + busy: bool + get_info_from_server: bool + connection: Any + creation_time: Any + task_counter: int + thread: Any + worker_lock: Any + def __init__(self, connection, request_queue) -> None: ... + def new_connection(self) -> None: ... + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + pool: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def terminate(self) -> None: ... + def send(self, message_type, request, controls: Any | None = ...): ... + def validate_bind(self, controls): ... + def get_response(self, counter, timeout: Any | None = ..., get_request: bool = ...): ... + def post_send_single_response(self, counter): ... + def post_send_search(self, counter): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeRestartable.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeRestartable.pyi new file mode 100644 index 000000000000..b52aa5a1b4a1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeRestartable.pyi @@ -0,0 +1,5 @@ +from .restartable import RestartableStrategy + +class SafeRestartableStrategy(RestartableStrategy): + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeSync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeSync.pyi new file mode 100644 index 000000000000..2b8b51390eb8 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/safeSync.pyi @@ -0,0 +1,5 @@ +from .sync import SyncStrategy + +class SafeSyncStrategy(SyncStrategy): + thread_safe: bool + def __init__(self, ldap_connection) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi new file mode 100644 index 000000000000..a1a8460902e2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/strategy/sync.pyi @@ -0,0 +1,19 @@ +from typing import Any + +from ..strategy.base import BaseStrategy + +LDAP_MESSAGE_TEMPLATE: Any + +class SyncStrategy(BaseStrategy): + sync: bool + no_real_dsa: bool + pooled: bool + can_stream: bool + socket_size: Any + def __init__(self, ldap_connection) -> None: ... + def open(self, reset_usage: bool = ..., read_server_info: bool = ...) -> None: ... + def receiving(self): ... + def post_send_single_response(self, message_id): ... + def post_send_search(self, message_id): ... + def set_stream(self, value) -> None: ... + def get_stream(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/asn1.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/asn1.pyi new file mode 100644 index 000000000000..859a9b958630 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/asn1.pyi @@ -0,0 +1,37 @@ +from typing import Any + +# Enable when pyasn1 gets stubs: +# from pyasn1.codec.ber.encoder import AbstractItemEncoder, BooleanEncoder +AbstractItemEncoder = Any +BooleanEncoder = Any + +CLASSES: Any + +class LDAPBooleanEncoder(AbstractItemEncoder): + supportIndefLenMode: bool + # Requires pyasn1 > 0.3.7 + def encodeValue(self, value, asn1Spec, encodeFun, **options): ... + +customTagMap: Any +customTypeMap: Any + +def compute_ber_size(data): ... +def decode_message_fast(message): ... +def decode_sequence(message, start, stop, context_decoders: Any | None = ...): ... +def decode_integer(message, start, stop, context_decoders: Any | None = ...): ... +def decode_octet_string(message, start, stop, context_decoders: Any | None = ...): ... +def decode_boolean(message, start, stop, context_decoders: Any | None = ...): ... +def decode_bind_response(message, start, stop, context_decoders: Any | None = ...): ... +def decode_extended_response(message, start, stop, context_decoders: Any | None = ...): ... +def decode_intermediate_response(message, start, stop, context_decoders: Any | None = ...): ... +def decode_controls(message, start, stop, context_decoders: Any | None = ...): ... +def ldap_result_to_dict_fast(response): ... +def get_byte(x): ... +def get_bytes(x): ... + +DECODERS: Any +BIND_RESPONSE_CONTEXT: Any +EXTENDED_RESPONSE_CONTEXT: Any +INTERMEDIATE_RESPONSE_CONTEXT: Any +LDAP_MESSAGE_CONTEXT: Any +CONTROLS_CONTEXT: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ciDict.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ciDict.pyi new file mode 100644 index 000000000000..2564fc656c57 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ciDict.pyi @@ -0,0 +1,29 @@ +from collections.abc import MutableMapping +from typing import Any, Generic, TypeVar + +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +class CaseInsensitiveDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, other: Any | None = ..., **kwargs) -> None: ... + def __contains__(self, item): ... + def __delitem__(self, key) -> None: ... + def __setitem__(self, key, item) -> None: ... + def __getitem__(self, key): ... + def __iter__(self): ... + def __len__(self): ... + def keys(self): ... + def values(self): ... + def items(self): ... + def __eq__(self, other): ... + def copy(self): ... + +class CaseInsensitiveWithAliasDict(CaseInsensitiveDict[_KT, _VT], Generic[_KT, _VT]): + def __init__(self, other: Any | None = ..., **kwargs) -> None: ... + def aliases(self): ... + def __setitem__(self, key, value) -> None: ... + def __delitem__(self, key) -> None: ... + def set_alias(self, key, alias, ignore_duplicates: bool = ...) -> None: ... + def remove_alias(self, alias) -> None: ... + def __getitem__(self, key): ... + def copy(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi new file mode 100644 index 000000000000..80971d5fc15c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/config.pyi @@ -0,0 +1,6 @@ +from typing import Any + +PARAMETERS: Any + +def get_config_parameter(parameter): ... +def set_config_parameter(parameter, value) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/conv.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/conv.pyi new file mode 100644 index 000000000000..280e2085cfec --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/conv.pyi @@ -0,0 +1,14 @@ +from typing import Any + +def to_unicode(obj: float | bytes | str, encoding: str | None = ..., from_server: bool = ...) -> str: ... +def to_raw(obj, encoding: str = ...): ... +def escape_filter_chars(text: float | bytes | str, encoding: str | None = ...) -> str: ... +def unescape_filter_chars(text, encoding: Any | None = ...): ... +def escape_bytes(bytes_value: str | bytes) -> str: ... +def prepare_for_stream(value): ... +def json_encode_b64(obj): ... +def check_json_dict(json_dict) -> None: ... +def json_hook(obj): ... +def format_json(obj, iso_format: bool = ...): ... +def is_filter_escaped(text): ... +def ldap_escape_to_bytes(text): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/dn.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/dn.pyi new file mode 100644 index 000000000000..dab0cdffd451 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/dn.pyi @@ -0,0 +1,11 @@ +STATE_ANY: int +STATE_ESCAPE: int +STATE_ESCAPE_HEX: int + +def to_dn( + iterator, decompose: bool = ..., remove_space: bool = ..., space_around_equal: bool = ..., separate_rdn: bool = ... +): ... +def parse_dn(dn, escape: bool = ..., strip: bool = ...): ... +def safe_dn(dn, decompose: bool = ..., reverse: bool = ...): ... +def safe_rdn(dn, decompose: bool = ...): ... +def escape_rdn(rdn: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi new file mode 100644 index 000000000000..74734cc6339e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/hashed.pyi @@ -0,0 +1,6 @@ +from typing import Any + +algorithms_table: Any +salted_table: Any + +def hashed(algorithm, value, salt: Any | None = ..., raw: bool = ..., encoding: str = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi new file mode 100644 index 000000000000..a5c47f7bdf4f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/log.pyi @@ -0,0 +1,25 @@ +from logging import NullHandler as NullHandler +from typing import Any + +OFF: int +ERROR: int +BASIC: int +PROTOCOL: int +NETWORK: int +EXTENDED: int +DETAIL_LEVELS: Any + +def get_detail_level_name(level_name): ... +def log(detail, message, *args) -> None: ... +def log_enabled(detail): ... +def set_library_log_hide_sensitive_data(hide: bool = ...) -> None: ... +def get_library_log_hide_sensitive_data(): ... +def set_library_log_activation_level(logging_level) -> None: ... +def get_library_log_activation_lavel(): ... +def set_library_log_max_line_length(length) -> None: ... +def get_library_log_max_line_length(): ... +def set_library_log_detail_level(detail) -> None: ... +def get_library_log_detail_level(): ... +def format_ldap_message(message, prefix): ... + +logger: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi new file mode 100644 index 000000000000..6e122f3a778d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/ntlm.pyi @@ -0,0 +1,117 @@ +from typing import Any + +oem_encoding: Any +NTLM_SIGNATURE: bytes +NTLM_MESSAGE_TYPE_NTLM_NEGOTIATE: int +NTLM_MESSAGE_TYPE_NTLM_CHALLENGE: int +NTLM_MESSAGE_TYPE_NTLM_AUTHENTICATE: int +FLAG_NEGOTIATE_56: int +FLAG_NEGOTIATE_KEY_EXCH: int +FLAG_NEGOTIATE_128: int +FLAG_NEGOTIATE_VERSION: int +FLAG_NEGOTIATE_TARGET_INFO: int +FLAG_REQUEST_NOT_NT_SESSION_KEY: int +FLAG_NEGOTIATE_IDENTIFY: int +FLAG_NEGOTIATE_EXTENDED_SESSIONSECURITY: int +FLAG_TARGET_TYPE_SERVER: int +FLAG_TARGET_TYPE_DOMAIN: int +FLAG_NEGOTIATE_ALWAYS_SIGN: int +FLAG_NEGOTIATE_OEM_WORKSTATION_SUPPLIED: int +FLAG_NEGOTIATE_OEM_DOMAIN_SUPPLIED: int +FLAG_NEGOTIATE_ANONYMOUS: int +FLAG_NEGOTIATE_NTLM: int +FLAG_NEGOTIATE_LM_KEY: int +FLAG_NEGOTIATE_DATAGRAM: int +FLAG_NEGOTIATE_SEAL: int +FLAG_NEGOTIATE_SIGN: int +FLAG_REQUEST_TARGET: int +FLAG_NEGOTIATE_OEM: int +FLAG_NEGOTIATE_UNICODE: int +FLAG_TYPES: Any +AV_END_OF_LIST: int +AV_NETBIOS_COMPUTER_NAME: int +AV_NETBIOS_DOMAIN_NAME: int +AV_DNS_COMPUTER_NAME: int +AV_DNS_DOMAIN_NAME: int +AV_DNS_TREE_NAME: int +AV_FLAGS: int +AV_TIMESTAMP: int +AV_SINGLE_HOST_DATA: int +AV_TARGET_NAME: int +AV_CHANNEL_BINDINGS: int +AV_TYPES: Any +AV_FLAG_CONSTRAINED: int +AV_FLAG_INTEGRITY: int +AV_FLAG_TARGET_SPN_UNTRUSTED: int +AV_FLAG_TYPES: Any + +def pack_windows_version(debug: bool = ...): ... +def unpack_windows_version(version_message): ... + +class NtlmClient: + client_config_flags: int + exported_session_key: Any + negotiated_flags: Any + user_name: Any + user_domain: Any + no_lm_response_ntlm_v1: Any + client_blocked: bool + client_block_exceptions: Any + client_require_128_bit_encryption: Any + max_life_time: Any + client_signing_key: Any + client_sealing_key: Any + sequence_number: Any + server_sealing_key: Any + server_signing_key: Any + integrity: bool + replay_detect: bool + sequence_detect: bool + confidentiality: bool + datagram: bool + identity: bool + client_supplied_target_name: Any + client_channel_binding_unhashed: Any + unverified_target_name: Any + server_challenge: Any + server_target_name: Any + server_target_info: Any + server_version: Any + server_av_netbios_computer_name: Any + server_av_netbios_domain_name: Any + server_av_dns_computer_name: Any + server_av_dns_domain_name: Any + server_av_dns_forest_name: Any + server_av_target_name: Any + server_av_flags: Any + server_av_timestamp: Any + server_av_single_host_data: Any + server_av_channel_bindings: Any + server_av_flag_constrained: Any + server_av_flag_integrity: Any + server_av_flag_target_spn_untrusted: Any + current_encoding: Any + client_challenge: Any + server_target_info_raw: Any + def __init__(self, domain, user_name, password) -> None: ... + def get_client_flag(self, flag): ... + def get_negotiated_flag(self, flag): ... + def get_server_av_flag(self, flag): ... + def set_client_flag(self, flags) -> None: ... + def reset_client_flags(self) -> None: ... + def unset_client_flag(self, flags) -> None: ... + def create_negotiate_message(self): ... + def parse_challenge_message(self, message): ... + def create_authenticate_message(self): ... + @staticmethod + def pack_field(value, offset): ... + @staticmethod + def unpack_field(field_message): ... + @staticmethod + def unpack_av_info(info): ... + @staticmethod + def pack_av_info(avs): ... + @staticmethod + def pack_windows_timestamp(): ... + def compute_nt_response(self): ... + def ntowf_v2(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/port_validators.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/port_validators.pyi new file mode 100644 index 000000000000..c120f02b89b9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/port_validators.pyi @@ -0,0 +1,2 @@ +def check_port(port): ... +def check_port_and_port_list(port, port_list): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi new file mode 100644 index 000000000000..f2c58e20ed4d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/repr.pyi @@ -0,0 +1,5 @@ +from typing import Any + +repr_encoding: Any + +def to_stdout_encoding(value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/tls_backport.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/tls_backport.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/uri.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/uri.pyi new file mode 100644 index 000000000000..45ea8bb7699d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/utils/uri.pyi @@ -0,0 +1 @@ +def parse_uri(uri): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/version.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/version.pyi new file mode 100644 index 000000000000..50be840c1d09 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/ldap3/ldap3/version.pyi @@ -0,0 +1,3 @@ +__url__: str +__description__: str +__status__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mock/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/mock/METADATA.toml index 3db9f3edf04c..3188e8fc281e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mock/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/mock/METADATA.toml @@ -1 +1 @@ -version = "4.0" +version = "4.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/__init__.pyi index e97abe80edee..9b47e2207bef 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/__init__.pyi @@ -1,5 +1,3 @@ -from typing import Tuple - from .mock import * -version_info: Tuple[int, int, int] +version_info: tuple[int, int, int] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/mock.pyi b/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/mock.pyi index 47d07301ba20..4e740a303c36 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/mock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/mock/mock/mock.pyi @@ -1,12 +1,14 @@ +from _typeshed import Self from collections.abc import Callable, Mapping, Sequence -from typing import Any, Generic, List, Tuple, Type, TypeVar, overload +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal _F = TypeVar("_F", bound=Callable[..., Any]) _T = TypeVar("_T") -_TT = TypeVar("_TT", bound=Type[Any]) +_TT = TypeVar("_TT", bound=type[Any]) _R = TypeVar("_R") -__all__ = [ +__all__ = ( "Mock", "MagicMock", "patch", @@ -22,64 +24,55 @@ __all__ = [ "mock_open", "PropertyMock", "seal", -] +) __version__: str FILTER_DIR: Any -class _slotted: ... - -class _SentinelObject: - name: Any - def __init__(self, name: Any) -> None: ... - -class _Sentinel: - def __init__(self) -> None: ... - def __getattr__(self, name: str) -> Any: ... - sentinel: Any DEFAULT: Any -class _Call(Tuple[Any, ...]): +class _Call(tuple[Any, ...]): def __new__( - cls, value: Any = ..., name: Any | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... - ) -> Any: ... + cls: type[Self], + value: Any = ..., + name: Any | None = ..., + parent: Any | None = ..., + two: bool = ..., + from_kall: bool = ..., + ) -> Self: ... name: Any parent: Any from_kall: Any def __init__( self, value: Any = ..., name: Any | None = ..., parent: Any | None = ..., two: bool = ..., from_kall: bool = ... ) -> None: ... - def __eq__(self, other: Any) -> bool: ... - __ne__: Any + def __eq__(self, other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... def __call__(self, *args: Any, **kwargs: Any) -> _Call: ... - def __getattr__(self, attr: Any) -> Any: ... - def count(self, *args: Any, **kwargs: Any) -> Any: ... - def index(self, *args: Any, **kwargs: Any) -> Any: ... - def call_list(self) -> Any: ... + def __getattr__(self, attr: str) -> Any: ... + @property + def args(self): ... + @property + def kwargs(self): ... + def call_list(self) -> _CallList: ... call: _Call -class _CallList(List[_Call]): +class _CallList(list[_Call]): def __contains__(self, value: Any) -> bool: ... -class _MockIter: - obj: Any - def __init__(self, obj: Any) -> None: ... - def __iter__(self) -> Any: ... - def __next__(self) -> Any: ... - class Base: def __init__(self, *args: Any, **kwargs: Any) -> None: ... -class NonCallableMock(Base, Any): # type: ignore - def __new__(__cls, *args: Any, **kw: Any) -> NonCallableMock: ... +class NonCallableMock(Base, Any): + def __new__(__cls: type[Self], *args: Any, **kw: Any) -> Self: ... def __init__( self, - spec: list[str] | object | Type[object] | None = ..., + spec: list[str] | object | type[object] | None = ..., wraps: Any | None = ..., name: str | None = ..., - spec_set: list[str] | object | Type[object] | None = ..., + spec_set: list[str] | object | type[object] | None = ..., parent: NonCallableMock | None = ..., _spec_state: Any | None = ..., _new_name: str = ..., @@ -113,7 +106,7 @@ class NonCallableMock(Base, Any): # type: ignore call_args_list: _CallList mock_calls: _CallList def _format_mock_call_signature(self, args: Any, kwargs: Any) -> str: ... - def _call_matcher(self, _call: Tuple[_Call, ...]) -> _Call: ... + def _call_matcher(self, _call: tuple[_Call, ...]) -> _Call: ... def _get_child_mock(self, **kw: Any) -> NonCallableMock: ... class CallableMixin(Base): @@ -149,23 +142,6 @@ class _patch(Generic[_T]): autospec: Any kwargs: Mapping[str, Any] additional_patchers: Any - @overload - def __init__( - self: _patch[MagicMock | AsyncMock], - getter: Callable[[], Any], - attribute: str, - *, - spec: Any | None, - create: bool, - spec_set: Any | None, - autospec: Any | None, - new_callable: Any | None, - kwargs: Mapping[str, Any], - ) -> None: ... - # This overload also covers the case, where new==DEFAULT. In this case, self is _patch[Any]. - # Ideally we'd be able to add an overload for it so that self is _patch[MagicMock], - # but that's impossible with the current type system. - @overload def __init__( self: _patch[_T], getter: Callable[[], Any], @@ -205,9 +181,9 @@ class _patch_dict: class _patcher: TEST_PREFIX: str - dict: Type[_patch_dict] + dict: type[_patch_dict] @overload - def __call__( # type: ignore + def __call__( # type: ignore[misc] self, target: Any, *, @@ -234,7 +210,7 @@ class _patcher: **kwargs: Any, ) -> _patch[_T]: ... @overload - def object( # type: ignore + def object( # type: ignore[misc] self, target: Any, attribute: str, @@ -284,7 +260,6 @@ class MagicMock(MagicMixin, Mock): class AsyncMockMixin(Base): def __init__(self, *args: Any, **kwargs: Any) -> None: ... - async def _execute_mock_call(self, *args: Any, **kwargs: Any) -> Any: ... def assert_awaited(self) -> None: ... def assert_awaited_once(self) -> None: ... def assert_awaited_with(self, *args: Any, **kwargs: Any) -> None: ... @@ -300,19 +275,18 @@ class AsyncMockMixin(Base): class AsyncMagicMixin(MagicMixin): def __init__(self, *args: Any, **kw: Any) -> None: ... -class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... +class AsyncMock(AsyncMockMixin, AsyncMagicMixin, Mock): ... # type: ignore # argument disparities between base classes -class MagicProxy: - name: Any +class MagicProxy(Base): + name: str parent: Any - def __init__(self, name: Any, parent: Any) -> None: ... - def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __init__(self, name: str, parent) -> None: ... def create_mock(self) -> Any: ... def __get__(self, obj: Any, _type: Any | None = ...) -> Any: ... class _ANY: - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, other: object) -> Literal[True]: ... + def __ne__(self, other: object) -> Literal[False]: ... ANY: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/METADATA.toml index 8732c02c405f..79b51931ee0b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.4" +version = "0.4.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/mypy_extensions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/mypy_extensions.pyi index fc6de37d07d1..33b47244d385 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/mypy_extensions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/mypy-extensions/mypy_extensions.pyi @@ -1,19 +1,20 @@ import abc import sys -from typing import Any, Callable, Generic, ItemsView, KeysView, Mapping, Type, TypeVar, Union, ValuesView +from _typeshed import Self +from typing import Any, Callable, Generic, ItemsView, KeysView, Mapping, TypeVar, ValuesView _T = TypeVar("_T") _U = TypeVar("_U") # Internal mypy fallback type for all typed dicts (does not exist at runtime) class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): - def copy(self: _T) -> _T: ... + def copy(self: Self) -> Self: ... # Using NoReturn so that only calls using mypy plugin hook that specialize the signature # can go through. def setdefault(self, k: NoReturn, default: object) -> object: ... # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. def pop(self, k: NoReturn, default: _T = ...) -> object: ... # type: ignore - def update(self: _T, __m: _T) -> None: ... + def update(self: Self, __m: Self) -> None: ... if sys.version_info >= (3, 0): def items(self) -> ItemsView[str, object]: ... def keys(self) -> KeysView[str]: ... @@ -23,9 +24,10 @@ class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): def viewitems(self) -> ItemsView[str, object]: ... def viewkeys(self) -> KeysView[str]: ... def viewvalues(self) -> ValuesView[object]: ... + def __delitem__(self, k: NoReturn) -> None: ... -def TypedDict(typename: str, fields: dict[str, Type[Any]], total: bool = ...) -> Type[dict[str, Any]]: ... +def TypedDict(typename: str, fields: dict[str, type[Any]], total: bool = ...) -> type[dict[str, Any]]: ... def Arg(type: _T = ..., name: str | None = ...) -> _T: ... def DefaultArg(type: _T = ..., name: str | None = ...) -> _T: ... def NamedArg(type: _T = ..., name: str | None = ...) -> _T: ... @@ -34,13 +36,13 @@ def VarArg(type: _T = ...) -> _T: ... def KwArg(type: _T = ...) -> _T: ... # Return type that indicates a function does not return. -# This type is equivalent to the None type, but the no-op Union is necessary to -# distinguish the None type from the None value. -NoReturn = Union[None] # Deprecated: Use typing.NoReturn instead. +# Deprecated: Use typing.NoReturn instead. +class NoReturn: ... -# This is intended as a class decorator, but mypy rejects abstract classes -# when a Type[_T] is expected, so we can't give it the type we want -def trait(cls: Any) -> Any: ... +# This is consistent with implementation. Usage intends for this as +# a class decorator, but mypy does not support type[_T] for abstract +# classes until this issue is resolved, https://github.com/python/mypy/issues/4717. +def trait(cls: _T) -> _T: ... def mypyc_attr(*attrs: str, **kwattrs: object) -> Callable[[_T], _T]: ... class FlexibleAlias(Generic[_T, _U]): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/METADATA.toml index be8be530e4fe..159b06b06fd2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/METADATA.toml @@ -1 +1 @@ -version = "2.0" \ No newline at end of file +version = "2.0.*" \ No newline at end of file diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/__init__.pyi index 88d6eb25bd2c..7e8070b3e09f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/__init__.pyi @@ -1,4 +1,4 @@ -from typing import Any, FrozenSet +from typing import Any from MySQLdb import connections as connections, constants as constants, converters as converters, cursors as cursors from MySQLdb._mysql import ( @@ -35,7 +35,7 @@ threadsafety: int apilevel: str paramstyle: str -class DBAPISet(FrozenSet[Any]): +class DBAPISet(frozenset[Any]): def __eq__(self, other): ... STRING: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/_mysql.pyi b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/_mysql.pyi index 690e3469893f..1fba3f13cc06 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/_mysql.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/mysqlclient/MySQLdb/_mysql.pyi @@ -1,9 +1,9 @@ import builtins -from typing import Any, Tuple +from typing import Any import MySQLdb._exceptions -version_info: Tuple[Any, ...] +version_info: tuple[Any, ...] class DataError(MySQLdb._exceptions.DatabaseError): ... class DatabaseError(MySQLdb._exceptions.Error): ... @@ -63,8 +63,8 @@ class connection: def thread_id(self, *args, **kwargs) -> Any: ... def use_result(self, *args, **kwargs) -> Any: ... def warning_count(self, *args, **kwargs) -> Any: ... - def __delattr__(self, name) -> Any: ... - def __setattr__(self, name, value) -> Any: ... + def __delattr__(self, __name) -> Any: ... + def __setattr__(self, __name, __value) -> Any: ... class result: converter: Any @@ -76,8 +76,8 @@ class result: def field_flags(self, *args, **kwargs) -> Any: ... def num_fields(self, *args, **kwargs) -> Any: ... def num_rows(self, *args, **kwargs) -> Any: ... - def __delattr__(self, name) -> Any: ... - def __setattr__(self, name, value) -> Any: ... + def __delattr__(self, __name) -> Any: ... + def __setattr__(self, __name, __value) -> Any: ... def connect(*args, **kwargs) -> Any: ... def debug(*args, **kwargs) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/METADATA.toml index f9586707dfed..84307529a94b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/METADATA.toml @@ -1 +1 @@ -version = "3.1" +version = "3.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/common.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/common.pyi index bdd765525fe8..622e9e3ea8da 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/common.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/common.pyi @@ -1,4 +1,4 @@ -from typing import Any, Dict +from typing import Any UNICODE_ASCII_CHARACTER_SET: str CLIENT_ID_CHARACTER_SET: str @@ -28,7 +28,7 @@ def add_params_to_uri(uri, params, fragment: bool = ...): ... def safe_string_equals(a, b): ... def to_unicode(data, encoding: str = ...): ... -class CaseInsensitiveDict(Dict[Any, Any]): +class CaseInsensitiveDict(dict[Any, Any]): proxy: Any def __init__(self, data) -> None: ... def __contains__(self, k): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi index 97ce01969909..f3c64ec4a50e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/backend_application.pyi @@ -4,4 +4,4 @@ from .base import Client as Client class BackendApplicationClient(Client): grant_type: str - def prepare_request_body(self, body: str = ..., scope: Any | None = ..., include_client_id: bool = ..., **kwargs): ... # type: ignore + def prepare_request_body(self, body: str = ..., scope: Any | None = ..., include_client_id: bool = ..., **kwargs): ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi index 2ed10bcb50c1..b7f692737e81 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/legacy_application.pyi @@ -5,6 +5,6 @@ from .base import Client as Client class LegacyApplicationClient(Client): grant_type: str def __init__(self, client_id, **kwargs) -> None: ... - def prepare_request_body( # type: ignore + def prepare_request_body( # type: ignore[override] self, username, password, body: str = ..., scope: Any | None = ..., include_client_id: bool = ..., **kwargs ): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi index 3b27f1da6e9f..fa9d338b9913 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/mobile_application.pyi @@ -4,8 +4,8 @@ from .base import Client as Client class MobileApplicationClient(Client): response_type: str - def prepare_request_uri( # type: ignore + def prepare_request_uri( # type: ignore[override] self, uri, redirect_uri: Any | None = ..., scope: Any | None = ..., state: Any | None = ..., **kwargs ): ... token: Any - def parse_request_uri_response(self, uri, state: Any | None = ..., scope: Any | None = ...): ... # type: ignore + def parse_request_uri_response(self, uri, state: Any | None = ..., scope: Any | None = ...): ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi index d0f70cd0da2f..9a8d3575d90e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/service_application.pyi @@ -17,7 +17,7 @@ class ServiceApplicationClient(Client): audience: Any | None = ..., **kwargs, ) -> None: ... - def prepare_request_body( # type: ignore + def prepare_request_body( # type: ignore[override] self, private_key: Any | None = ..., subject: Any | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi index 5effe8cf14f8..054ab27c311b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/clients/web_application.pyi @@ -6,10 +6,10 @@ class WebApplicationClient(Client): grant_type: str code: Any def __init__(self, client_id, code: Any | None = ..., **kwargs) -> None: ... - def prepare_request_uri( # type: ignore + def prepare_request_uri( # type: ignore[override] self, uri, redirect_uri: Any | None = ..., scope: Any | None = ..., state: Any | None = ..., **kwargs ): ... - def prepare_request_body( # type: ignore + def prepare_request_body( # type: ignore[override] self, code: Any | None = ..., redirect_uri: Any | None = ..., body: str = ..., include_client_id: bool = ..., **kwargs ): ... - def parse_request_uri_response(self, uri, state: Any | None = ...): ... # type: ignore + def parse_request_uri_response(self, uri, state: Any | None = ...): ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi index c11b07b2d41d..d4901f5aa653 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/oauthlib/oauthlib/oauth2/rfc6749/tokens.pyi @@ -1,6 +1,6 @@ -from typing import Any, Dict +from typing import Any -class OAuth2Token(Dict[Any, Any]): +class OAuth2Token(dict[Any, Any]): def __init__(self, params, old_scope: Any | None = ...) -> None: ... @property def scope_changed(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/METADATA.toml new file mode 100644 index 000000000000..bcb78f17fb41 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/METADATA.toml @@ -0,0 +1 @@ +version = "2.4.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/__init__.pyi new file mode 100644 index 000000000000..69d6e6b78ec2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/__init__.pyi @@ -0,0 +1,24 @@ +from .propagation import ( + Format as Format, + InvalidCarrierException as InvalidCarrierException, + SpanContextCorruptedException as SpanContextCorruptedException, + UnsupportedFormatException as UnsupportedFormatException, +) +from .scope import Scope as Scope +from .scope_manager import ScopeManager as ScopeManager +from .span import Span as Span, SpanContext as SpanContext +from .tracer import ( + Reference as Reference, + ReferenceType as ReferenceType, + Tracer as Tracer, + child_of as child_of, + follows_from as follows_from, + start_child_span as start_child_span, +) + +tracer: Tracer +is_tracer_registered: bool + +def global_tracer() -> Tracer: ... +def set_global_tracer(value: Tracer) -> None: ... +def is_global_tracer_registered() -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/ext/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/ext/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/ext/tags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/ext/tags.pyi new file mode 100644 index 000000000000..08687e50927d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/ext/tags.pyi @@ -0,0 +1 @@ +from ..tags import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/api_check.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/api_check.pyi new file mode 100644 index 000000000000..dcc9734872f0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/api_check.pyi @@ -0,0 +1,34 @@ +from opentracing.span import Span + +from ..tracer import Tracer + +class APICompatibilityCheckMixin: + def tracer(self) -> Tracer: ... + def check_baggage_values(self) -> bool: ... + def check_scope_manager(self) -> bool: ... + def is_parent(self, parent: Span, span: Span) -> bool: ... + def test_active_span(self) -> None: ... + def test_start_active_span(self) -> None: ... + def test_start_active_span_parent(self) -> None: ... + def test_start_active_span_ignore_active_span(self) -> None: ... + def test_start_active_span_not_finish_on_close(self) -> None: ... + def test_start_active_span_finish_on_close(self) -> None: ... + def test_start_active_span_default_finish_on_close(self) -> None: ... + def test_start_span(self) -> None: ... + def test_start_span_propagation(self) -> None: ... + def test_start_span_propagation_ignore_active_span(self) -> None: ... + def test_start_span_with_parent(self) -> None: ... + def test_start_child_span(self) -> None: ... + def test_set_operation_name(self) -> None: ... + def test_span_as_context_manager(self) -> None: ... + def test_span_tag_value_types(self) -> None: ... + def test_span_tags_with_chaining(self) -> None: ... + def test_span_logs(self) -> None: ... + def test_span_baggage(self) -> None: ... + def test_context_baggage(self) -> None: ... + def test_text_propagation(self) -> None: ... + def test_binary_propagation(self) -> None: ... + def test_mandatory_formats(self) -> None: ... + def test_unknown_format(self) -> None: ... + def test_tracer_start_active_span_scope(self) -> None: ... + def test_tracer_start_span_scope(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/scope_check.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/scope_check.pyi new file mode 100644 index 000000000000..affe51501d6d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/harness/scope_check.pyi @@ -0,0 +1,15 @@ +from typing import Any, Callable + +from ..scope_manager import ScopeManager + +class ScopeCompatibilityCheckMixin: + def scope_manager(self) -> ScopeManager: ... + def run_test(self, test_fn: Callable[[Any], Any]) -> None: ... + def test_missing_active_external(self) -> None: ... + def test_missing_active(self) -> None: ... + def test_activate(self) -> None: ... + def test_activate_external(self) -> None: ... + def test_activate_finish_on_close(self) -> None: ... + def test_activate_nested(self) -> None: ... + def test_activate_finish_on_close_nested(self) -> None: ... + def test_close_wrong_order(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/logs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/logs.pyi new file mode 100644 index 000000000000..15cda689b09d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/logs.pyi @@ -0,0 +1,5 @@ +ERROR_KIND: str +ERROR_OBJECT: str +EVENT: str +MESSAGE: str +STACK: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/__init__.pyi new file mode 100644 index 000000000000..85fa2ac93862 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/__init__.pyi @@ -0,0 +1,2 @@ +from .propagator import Propagator as Propagator +from .tracer import MockTracer as MockTracer diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi new file mode 100644 index 000000000000..33f94f4b7801 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/binary_propagator.pyi @@ -0,0 +1,8 @@ +from typing import Any + +from .context import SpanContext +from .propagator import Propagator + +class BinaryPropagator(Propagator): + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/context.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/context.pyi new file mode 100644 index 000000000000..ef8ed9214465 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/context.pyi @@ -0,0 +1,11 @@ +from _typeshed import Self + +import opentracing + +class SpanContext(opentracing.SpanContext): + trace_id: int | None + span_id: int | None + def __init__(self, trace_id: int | None = ..., span_id: int | None = ..., baggage: dict[str, str] | None = ...) -> None: ... + @property + def baggage(self) -> dict[str, str]: ... + def with_baggage_item(self: Self, key: str, value: str) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/propagator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/propagator.pyi new file mode 100644 index 000000000000..0a2ffb446a3d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/propagator.pyi @@ -0,0 +1,7 @@ +from typing import Any + +from .context import SpanContext + +class Propagator: + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/span.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/span.pyi new file mode 100644 index 000000000000..da419aa7a356 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/span.pyi @@ -0,0 +1,38 @@ +from _typeshed import Self +from typing import Any + +from ..span import Span +from ..tracer import Tracer +from .context import SpanContext +from .tracer import MockTracer + +class MockSpan(Span): + operation_name: str | None + start_time: Any + parent_id: int | None + tags: dict[str, Any] + finish_time: float + finished: bool + logs: list[LogData] + def __init__( + self, + tracer: Tracer, + operation_name: str | None = ..., + context: SpanContext | None = ..., + parent_id: int | None = ..., + tags: dict[str, Any] | None = ..., + start_time: float | None = ..., + ) -> None: ... + @property + def tracer(self) -> MockTracer: ... + @property + def context(self) -> SpanContext: ... + def set_operation_name(self: Self, operation_name: str) -> Self: ... + def set_tag(self: Self, key: str, value: str | bool | int | float) -> Self: ... + def log_kv(self: Self, key_values: dict[str, Any], timestamp: float | None = ...) -> Self: ... + def set_baggage_item(self: Self, key: str, value: str) -> Self: ... + +class LogData: + key_values: dict[str, Any] + timestamp: float | None + def __init__(self, key_values: dict[str, Any], timestamp: float | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi new file mode 100644 index 000000000000..d828fe2f99ec --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/text_propagator.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from .context import SpanContext +from .propagator import Propagator + +prefix_tracer_state: str +prefix_baggage: str +field_name_trace_id: str +field_name_span_id: str +field_count: int + +class TextPropagator(Propagator): + def inject(self, span_context: SpanContext, carrier: dict[Any, Any]) -> None: ... + def extract(self, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/tracer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/tracer.pyi new file mode 100644 index 000000000000..9336c1a05b9f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/mocktracer/tracer.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from ..scope_manager import ScopeManager +from ..span import Span +from ..tracer import Reference, Tracer +from .context import SpanContext +from .propagator import Propagator +from .span import MockSpan + +class MockTracer(Tracer): + def __init__(self, scope_manager: ScopeManager | None = ...) -> None: ... + @property + def active_span(self) -> MockSpan | None: ... + def register_propagator(self, format: str, propagator: Propagator) -> None: ... + def finished_spans(self) -> list[MockSpan]: ... + def reset(self) -> None: ... + def start_span( # type: ignore[override] + self, + operation_name: str | None = ..., + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + ) -> MockSpan: ... + def extract(self, format: str, carrier: dict[Any, Any]) -> SpanContext: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/propagation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/propagation.pyi new file mode 100644 index 000000000000..81307c63f882 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/propagation.pyi @@ -0,0 +1,8 @@ +class UnsupportedFormatException(Exception): ... +class InvalidCarrierException(Exception): ... +class SpanContextCorruptedException(Exception): ... + +class Format: + BINARY: str + TEXT_MAP: str + HTTP_HEADERS: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope.pyi new file mode 100644 index 000000000000..d3ed839fe188 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope.pyi @@ -0,0 +1,17 @@ +from _typeshed import Self +from types import TracebackType + +from .scope_manager import ScopeManager +from .span import Span + +class Scope: + def __init__(self, manager: ScopeManager, span: Span) -> None: ... + @property + def span(self) -> Span: ... + @property + def manager(self) -> ScopeManager: ... + def close(self) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_manager.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_manager.pyi new file mode 100644 index 000000000000..cd07475479a2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_manager.pyi @@ -0,0 +1,8 @@ +from .scope import Scope +from .span import Span + +class ScopeManager: + def __init__(self) -> None: ... + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/__init__.pyi new file mode 100644 index 000000000000..2b0f720c3ad9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/__init__.pyi @@ -0,0 +1,9 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class ThreadLocalScopeManager(ScopeManager): + def __init__(self) -> None: ... + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/asyncio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/asyncio.pyi new file mode 100644 index 000000000000..4b96d7883fda --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/asyncio.pyi @@ -0,0 +1,8 @@ +from ..scope import Scope +from ..scope_managers import ThreadLocalScopeManager +from ..span import Span + +class AsyncioScopeManager(ThreadLocalScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/constants.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/constants.pyi new file mode 100644 index 000000000000..0a791982f11a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/constants.pyi @@ -0,0 +1 @@ +ACTIVE_ATTR: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/contextvars.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/contextvars.pyi new file mode 100644 index 000000000000..990045ee4c56 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/contextvars.pyi @@ -0,0 +1,10 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class ContextVarsScopeManager(ScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... + +def no_parent_scope() -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/gevent.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/gevent.pyi new file mode 100644 index 000000000000..6b835cd9787d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/gevent.pyi @@ -0,0 +1,8 @@ +from ..scope import Scope +from ..scope_manager import ScopeManager +from ..span import Span + +class GeventScopeManager(ScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/tornado.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/tornado.pyi new file mode 100644 index 000000000000..59b1cab9cf50 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/scope_managers/tornado.pyi @@ -0,0 +1,16 @@ +from typing import Any + +from ..scope import Scope +from ..scope_managers import ThreadLocalScopeManager +from ..span import Span + +class TornadoScopeManager(ThreadLocalScopeManager): + def activate(self, span: Span, finish_on_close: bool) -> Scope: ... + @property + def active(self) -> Scope: ... + +class ThreadSafeStackContext: + contexts: Any + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + +def tracer_stack_context() -> ThreadSafeStackContext: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/span.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/span.pyi new file mode 100644 index 000000000000..3f9685adb733 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/span.pyi @@ -0,0 +1,29 @@ +from _typeshed import Self +from types import TracebackType +from typing import Any + +from .tracer import Tracer + +class SpanContext: + EMPTY_BAGGAGE: dict[str, str] + @property + def baggage(self) -> dict[str, str]: ... + +class Span: + def __init__(self, tracer: Tracer, context: SpanContext) -> None: ... + @property + def context(self) -> SpanContext: ... + @property + def tracer(self) -> Tracer: ... + def set_operation_name(self: Self, operation_name: str) -> Self: ... + def finish(self, finish_time: float | None = ...) -> None: ... + def set_tag(self: Self, key: str, value: str | bool | int | float) -> Self: ... + def log_kv(self: Self, key_values: dict[str, Any], timestamp: float | None = ...) -> Self: ... + def set_baggage_item(self: Self, key: str, value: str) -> Self: ... + def get_baggage_item(self, key: str) -> str | None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def log_event(self: Self, event: Any, payload: Any | None = ...) -> Self: ... + def log(self: Self, **kwargs: Any) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tags.pyi new file mode 100644 index 000000000000..c48c210be0cd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tags.pyi @@ -0,0 +1,23 @@ +SPAN_KIND: str +SPAN_KIND_RPC_CLIENT: str +SPAN_KIND_RPC_SERVER: str +SPAN_KIND_CONSUMER: str +SPAN_KIND_PRODUCER: str +SERVICE: str +ERROR: str +COMPONENT: str +SAMPLING_PRIORITY: str +PEER_SERVICE: str +PEER_HOSTNAME: str +PEER_ADDRESS: str +PEER_HOST_IPV4: str +PEER_HOST_IPV6: str +PEER_PORT: str +HTTP_URL: str +HTTP_METHOD: str +HTTP_STATUS_CODE: str +DATABASE_INSTANCE: str +DATABASE_STATEMENT: str +DATABASE_TYPE: str +DATABASE_USER: str +MESSAGE_BUS_DESTINATION: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tracer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tracer.pyi new file mode 100644 index 000000000000..6dcc47cd0211 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/opentracing/opentracing/tracer.pyi @@ -0,0 +1,47 @@ +from typing import Any, NamedTuple + +from .scope import Scope +from .scope_manager import ScopeManager +from .span import Span, SpanContext + +class Tracer: + def __init__(self, scope_manager: ScopeManager | None = ...) -> None: ... + @property + def scope_manager(self) -> ScopeManager: ... + @property + def active_span(self) -> Span | None: ... + def start_active_span( + self, + operation_name: str, + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + finish_on_close: bool = ..., + ) -> Scope: ... + def start_span( + self, + operation_name: str | None = ..., + child_of: Span | SpanContext | None = ..., + references: list[Reference] | None = ..., + tags: dict[Any, Any] | None = ..., + start_time: float | None = ..., + ignore_active_span: bool = ..., + ) -> Span: ... + def inject(self, span_context: SpanContext, format: str, carrier: dict[Any, Any]) -> None: ... + def extract(self, format: str, carrier: dict[Any, Any]) -> SpanContext: ... + +class ReferenceType: + CHILD_OF: str + FOLLOWS_FROM: str + +class Reference(NamedTuple): + type: str + referenced_context: SpanContext | None + +def child_of(referenced_context: SpanContext | None = ...) -> Reference: ... +def follows_from(referenced_context: SpanContext | None = ...) -> Reference: ... +def start_child_span( + parent_span: Span, operation_name: str, tags: dict[Any, Any] | None = ..., start_time: float | None = ... +) -> Span: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/orjson/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/orjson/METADATA.toml index f67b2ab02c86..49ae8203c6e9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/orjson/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/orjson/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.6" +version = "3.6.*" obsolete_since = "3.6.1" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/METADATA.toml index 5438db05e0c1..b526e1ea4802 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/METADATA.toml @@ -1,3 +1,3 @@ -version = "2.7" +version = "2.8.*" python2 = true requires = ["types-cryptography"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_version.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_version.pyi index e75b91463781..1f2297095207 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_version.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_version.pyi @@ -1,3 +1 @@ -from typing import Tuple - -__version_info__: Tuple[int, int, int] +__version_info__: tuple[int, int, int] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_winapi.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_winapi.pyi index 92517ba6cc45..7bf55587666a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_winapi.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/_winapi.pyi @@ -1,19 +1,20 @@ import builtins import ctypes import sys +from _typeshed import Self from types import TracebackType -from typing import Any, Type, TypeVar +from typing import Any if sys.platform == "win32": - - _T = TypeVar("_T") def format_system_message(errno: int) -> str | None: ... + class WindowsError(builtins.WindowsError): def __init__(self, value: int | None = ...) -> None: ... @property def message(self) -> str: ... @property def code(self) -> int: ... + def handle_nonzero_success(result: int) -> None: ... GMEM_MOVEABLE: int GlobalAlloc: Any @@ -24,6 +25,7 @@ if sys.platform == "win32": MapViewOfFile: Any UnmapViewOfFile: Any RtlMoveMemory: Any + class MemoryMap: name: str length: int @@ -32,12 +34,12 @@ if sys.platform == "win32": filemap: Any = ... view: Any = ... def __init__(self, name: str, length: int, security_attributes: Any | None = ...) -> None: ... - def __enter__(self: _T) -> _T: ... + def __enter__(self: Self) -> Self: ... def seek(self, pos: int) -> None: ... def write(self, msg: bytes) -> None: ... def read(self, n: int) -> bytes: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_val: BaseException | None, tb: TracebackType | None + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, tb: TracebackType | None ) -> None: ... READ_CONTROL: int STANDARD_RIGHTS_REQUIRED: int @@ -62,15 +64,20 @@ if sys.platform == "win32": POLICY_READ: int POLICY_WRITE: int POLICY_EXECUTE: int + class TokenAccess: TOKEN_QUERY: int + class TokenInformationClass: TokenUser: int + class TOKEN_USER(ctypes.Structure): num: int + class SECURITY_DESCRIPTOR(ctypes.Structure): SECURITY_DESCRIPTOR_CONTROL: Any REVISION: int + class SECURITY_ATTRIBUTES(ctypes.Structure): nLength: int lpSecurityDescriptor: Any @@ -79,6 +86,7 @@ if sys.platform == "win32": def descriptor(self) -> Any: ... @descriptor.setter def descriptor(self, value: Any) -> None: ... + def GetTokenInformation(token: Any, information_class: Any) -> Any: ... def OpenProcessToken(proc_handle: Any, access: Any) -> Any: ... def get_current_user() -> TOKEN_USER: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/agent.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/agent.pyi index 768b49bd0ef2..24926ac1eb5c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/agent.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/agent.pyi @@ -1,6 +1,6 @@ from socket import _RetAddress, socket from threading import Thread -from typing import Protocol, Tuple +from typing import Protocol from paramiko.channel import Channel from paramiko.message import Message @@ -18,7 +18,7 @@ SSH2_AGENT_SIGN_RESPONSE: int class AgentSSH: def __init__(self) -> None: ... - def get_keys(self) -> Tuple[AgentKey, ...]: ... + def get_keys(self) -> tuple[AgentKey, ...]: ... class AgentProxyThread(Thread): def __init__(self, agent: _AgentProxy) -> None: ... @@ -26,11 +26,11 @@ class AgentProxyThread(Thread): class AgentLocalProxy(AgentProxyThread): def __init__(self, agent: AgentServerProxy) -> None: ... - def get_connection(self) -> Tuple[socket, _RetAddress]: ... + def get_connection(self) -> tuple[socket, _RetAddress]: ... class AgentRemoteProxy(AgentProxyThread): def __init__(self, agent: AgentClientProxy, chan: Channel) -> None: ... - def get_connection(self) -> Tuple[socket, _RetAddress]: ... + def get_connection(self) -> tuple[socket, _RetAddress]: ... class AgentClientProxy: thread: Thread diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/auth_handler.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/auth_handler.pyi index 6c412620ef19..11721bd2217d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/auth_handler.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/auth_handler.pyi @@ -1,11 +1,11 @@ from threading import Event -from typing import Callable, List, Tuple +from typing import Callable from paramiko.pkey import PKey from paramiko.ssh_gss import _SSH_GSSAuth from paramiko.transport import Transport -_InteractiveCallback = Callable[[str, str, List[Tuple[str, bool]]], List[str]] +_InteractiveCallback = Callable[[str, str, list[tuple[str, bool]]], list[str]] class AuthHandler: transport: Transport diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/channel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/channel.pyi index 47aacdea5f87..1870c9e1fded 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/channel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/channel.pyi @@ -1,6 +1,6 @@ from logging import Logger from threading import Condition, Event, Lock -from typing import Any, Callable, Mapping, Tuple, TypeVar +from typing import Any, Callable, Mapping, TypeVar from paramiko.buffered_pipe import BufferedPipe from paramiko.file import BufferedFile @@ -41,24 +41,24 @@ class Channel(ClosingContextManager): def __init__(self, chanid: int) -> None: ... def __del__(self) -> None: ... def get_pty( - self, term: str = ..., width: int = ..., height: int = ..., width_pixels: int = ..., height_pixels: int = ... + self, term: str | bytes = ..., width: int = ..., height: int = ..., width_pixels: int = ..., height_pixels: int = ... ) -> None: ... def invoke_shell(self) -> None: ... - def exec_command(self, command: str) -> None: ... - def invoke_subsystem(self, subsystem: str) -> None: ... + def exec_command(self, command: str | bytes) -> None: ... + def invoke_subsystem(self, subsystem: str | bytes) -> None: ... def resize_pty(self, width: int = ..., height: int = ..., width_pixels: int = ..., height_pixels: int = ...) -> None: ... - def update_environment(self, environment: Mapping[str, str]) -> None: ... - def set_environment_variable(self, name: str, value: str) -> None: ... + def update_environment(self, environment: Mapping[str | bytes, str | bytes]) -> None: ... + def set_environment_variable(self, name: str | bytes, value: str | bytes) -> None: ... def exit_status_ready(self) -> bool: ... def recv_exit_status(self) -> int: ... def send_exit_status(self, status: int) -> None: ... def request_x11( self, screen_number: int = ..., - auth_protocol: str | None = ..., - auth_cookie: str | None = ..., + auth_protocol: str | bytes | None = ..., + auth_cookie: str | bytes | None = ..., single_connection: bool = ..., - handler: Callable[[Channel, Tuple[str, int]], None] | None = ..., + handler: Callable[[Channel, tuple[str, int]], None] | None = ..., ) -> bytes: ... def request_forward_agent(self, handler: Callable[[Channel], None]) -> bool: ... def get_transport(self) -> Transport: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/client.pyi index c43eaa8babf2..42fca3991fb8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/client.pyi @@ -1,5 +1,4 @@ -from socket import socket -from typing import Iterable, Mapping, NoReturn, Tuple, Type +from typing import Iterable, Mapping, NoReturn from paramiko.channel import Channel, ChannelFile, ChannelStderrFile, ChannelStdinFile from paramiko.hostkeys import HostKeys @@ -8,6 +7,8 @@ from paramiko.sftp_client import SFTPClient from paramiko.transport import Transport from paramiko.util import ClosingContextManager +from .transport import _SocketLike + class SSHClient(ClosingContextManager): def __init__(self) -> None: ... def load_system_host_keys(self, filename: str | None = ...) -> None: ... @@ -15,7 +16,7 @@ class SSHClient(ClosingContextManager): def save_host_keys(self, filename: str) -> None: ... def get_host_keys(self) -> HostKeys: ... def set_log_channel(self, name: str) -> None: ... - def set_missing_host_key_policy(self, policy: Type[MissingHostKeyPolicy] | MissingHostKeyPolicy) -> None: ... + def set_missing_host_key_policy(self, policy: type[MissingHostKeyPolicy] | MissingHostKeyPolicy) -> None: ... def connect( self, hostname: str, @@ -28,7 +29,7 @@ class SSHClient(ClosingContextManager): allow_agent: bool = ..., look_for_keys: bool = ..., compress: bool = ..., - sock: socket | None = ..., + sock: _SocketLike | None = ..., gss_auth: bool = ..., gss_kex: bool = ..., gss_deleg_creds: bool = ..., @@ -47,7 +48,7 @@ class SSHClient(ClosingContextManager): timeout: float | None = ..., get_pty: bool = ..., environment: dict[str, str] | None = ..., - ) -> Tuple[ChannelStdinFile, ChannelFile, ChannelStderrFile]: ... + ) -> tuple[ChannelStdinFile, ChannelFile, ChannelStderrFile]: ... def invoke_shell( self, term: str = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/config.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/config.pyi index 74b6c260b534..2e2bb1118fd7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/config.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/config.pyi @@ -1,4 +1,5 @@ -from typing import IO, Any, Dict, Iterable, Pattern, Set +from _typeshed import Self +from typing import IO, Any, Iterable, Pattern from paramiko.ssh_exception import ConfigParseError as ConfigParseError, CouldNotCanonicalize as CouldNotCanonicalize @@ -9,15 +10,15 @@ class SSHConfig: TOKENS_BY_CONFIG_KEY: dict[str, list[str]] def __init__(self) -> None: ... @classmethod - def from_text(cls, text: str) -> SSHConfig: ... + def from_text(cls: type[Self], text: str) -> Self: ... @classmethod - def from_path(cls, path: str) -> SSHConfig: ... + def from_path(cls: type[Self], path: str) -> Self: ... @classmethod - def from_file(cls, flo: IO[str]) -> SSHConfig: ... + def from_file(cls: type[Self], flo: IO[str]) -> Self: ... def parse(self, file_obj: IO[str]) -> None: ... def lookup(self, hostname: str) -> SSHConfigDict: ... def canonicalize(self, hostname: str, options: SSHConfigDict, domains: Iterable[str]) -> str: ... - def get_hostnames(self) -> Set[str]: ... + def get_hostnames(self) -> set[str]: ... class LazyFqdn: fqdn: str | None @@ -25,7 +26,7 @@ class LazyFqdn: host: str | None def __init__(self, config: SSHConfigDict, host: str | None = ...) -> None: ... -class SSHConfigDict(Dict[str, str]): +class SSHConfigDict(dict[str, str]): def __init__(self, *args: Any, **kwargs: Any) -> None: ... def as_bool(self, key: str) -> bool: ... def as_int(self, key: str) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/dsskey.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/dsskey.pyi index b687655dc22d..86a57d5e3916 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/dsskey.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/dsskey.pyi @@ -1,4 +1,4 @@ -from typing import IO, Any, Callable, Tuple +from typing import IO, Any, Callable from paramiko.message import Message from paramiko.pkey import PKey @@ -17,7 +17,7 @@ class DSSKey(PKey): data: bytes | None = ..., filename: str | None = ..., password: str | None = ..., - vals: Tuple[int, int, int, int] | None = ..., + vals: tuple[int, int, int, int] | None = ..., file_obj: IO[str] | None = ..., ) -> None: ... def asbytes(self) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ecdsakey.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ecdsakey.pyi index b442356d115b..b99315194c8f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ecdsakey.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ecdsakey.pyi @@ -1,4 +1,4 @@ -from typing import IO, Any, Callable, Sequence, Tuple, Type +from typing import IO, Any, Callable, Sequence from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurve, EllipticCurvePrivateKey, EllipticCurvePublicKey from cryptography.hazmat.primitives.hashes import HashAlgorithm @@ -9,15 +9,15 @@ class _ECDSACurve: nist_name: str key_length: int key_format_identifier: str - hash_object: Type[HashAlgorithm] - curve_class: Type[EllipticCurve] - def __init__(self, curve_class: Type[EllipticCurve], nist_name: str) -> None: ... + hash_object: type[HashAlgorithm] + curve_class: type[EllipticCurve] + def __init__(self, curve_class: type[EllipticCurve], nist_name: str) -> None: ... class _ECDSACurveSet: ecdsa_curves: Sequence[_ECDSACurve] def __init__(self, ecdsa_curves: Sequence[_ECDSACurve]) -> None: ... def get_key_format_identifier_list(self) -> list[str]: ... - def get_by_curve_class(self, curve_class: Type[Any]) -> _ECDSACurve | None: ... + def get_by_curve_class(self, curve_class: type[Any]) -> _ECDSACurve | None: ... def get_by_key_format_identifier(self, key_format_identifier: str) -> _ECDSACurve | None: ... def get_by_key_length(self, key_length: int) -> _ECDSACurve | None: ... @@ -32,7 +32,7 @@ class ECDSAKey(PKey): data: bytes | None = ..., filename: str | None = ..., password: str | None = ..., - vals: Tuple[EllipticCurvePrivateKey, EllipticCurvePublicKey] | None = ..., + vals: tuple[EllipticCurvePrivateKey, EllipticCurvePublicKey] | None = ..., file_obj: IO[str] | None = ..., validate_point: bool = ..., ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/file.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/file.pyi index dbf35fc39c93..45c4acab12b6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/file.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/file.pyi @@ -1,4 +1,4 @@ -from typing import Any, AnyStr, Generic, Iterable, Tuple +from typing import Any, AnyStr, Generic, Iterable from paramiko.util import ClosingContextManager @@ -15,7 +15,7 @@ class BufferedFile(ClosingContextManager, Generic[AnyStr]): FLAG_LINE_BUFFERED: int FLAG_UNIVERSAL_NEWLINE: int - newlines: None | AnyStr | Tuple[AnyStr, ...] + newlines: None | AnyStr | tuple[AnyStr, ...] def __init__(self) -> None: ... def __del__(self) -> None: ... def __iter__(self) -> BufferedFile[Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/hostkeys.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/hostkeys.pyi index c099ac7fd0f9..8278af05418b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/hostkeys.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/hostkeys.pyi @@ -1,3 +1,4 @@ +from _typeshed import Self from typing import Iterator, Mapping, MutableMapping from paramiko.pkey import PKey @@ -10,7 +11,7 @@ class _SubDict(MutableMapping[str, PKey]): def __delitem__(self, key: str) -> None: ... def __getitem__(self, key: str) -> PKey: ... def __setitem__(self, key: str, val: PKey) -> None: ... - def keys(self) -> list[str]: ... # type: ignore + def keys(self) -> list[str]: ... # type: ignore[override] class HostKeys(MutableMapping[str, _SubDict]): def __init__(self, filename: str | None = ...) -> None: ... @@ -25,8 +26,8 @@ class HostKeys(MutableMapping[str, _SubDict]): def __getitem__(self, key: str) -> _SubDict: ... def __delitem__(self, key: str) -> None: ... def __setitem__(self, hostname: str, entry: Mapping[str, PKey]) -> None: ... - def keys(self) -> list[str]: ... # type: ignore - def values(self) -> list[_SubDict]: ... # type: ignore + def keys(self) -> list[str]: ... # type: ignore[override] + def values(self) -> list[_SubDict]: ... # type: ignore[override] @staticmethod def hash_host(hostname: str, salt: str | None = ...) -> str: ... @@ -37,9 +38,9 @@ class InvalidHostKey(Exception): class HostKeyEntry: valid: bool - hostnames: str + hostnames: list[str] key: PKey def __init__(self, hostnames: list[str] | None = ..., key: PKey | None = ...) -> None: ... @classmethod - def from_line(cls, line: str, lineno: int | None = ...) -> HostKeyEntry | None: ... + def from_line(cls: type[Self], line: str, lineno: int | None = ...) -> Self | None: ... def to_line(self) -> str | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/packet.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/packet.pyi index ffbf10ef73c4..5d2c251db650 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/packet.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/packet.pyi @@ -1,7 +1,7 @@ import sys from logging import Logger from socket import socket -from typing import Any, Callable, Tuple +from typing import Any, Callable from cryptography.hazmat.primitives.ciphers import Cipher from paramiko.compress import ZlibCompressor, ZlibDecompressor @@ -57,4 +57,4 @@ class Packetizer: def write_all(self, out: bytes) -> None: ... def readline(self, timeout: float) -> str: ... def send_message(self, data: Message) -> None: ... - def read_message(self) -> Tuple[int, Message]: ... + def read_message(self) -> tuple[int, Message]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pipe.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pipe.pyi index 92bf86fa6f4d..eb7274903f88 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pipe.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pipe.pyi @@ -1,4 +1,4 @@ -from typing import Protocol, Tuple +from typing import Protocol class _BasePipe(Protocol): def clear(self) -> None: ... @@ -32,4 +32,4 @@ class OrPipe: def set(self) -> None: ... def clear(self) -> None: ... -def make_or_pipe(pipe: _Pipe) -> Tuple[OrPipe, OrPipe]: ... +def make_or_pipe(pipe: _Pipe) -> tuple[OrPipe, OrPipe]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pkey.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pkey.pyi index 4990501b7384..1c060f700f51 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pkey.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/pkey.pyi @@ -1,13 +1,12 @@ -from typing import IO, Pattern, Type, TypeVar +from _typeshed import Self +from typing import IO, Pattern from paramiko.message import Message -OPENSSH_AUTH_MAGIC: bytes = ... +OPENSSH_AUTH_MAGIC: bytes def _unpad_openssh(data: bytes) -> bytes: ... -_PK = TypeVar("_PK", bound=PKey) - class PKey: public_blob: PublicBlob | None BEGIN_TAG: Pattern[str] @@ -24,9 +23,9 @@ class PKey: def sign_ssh_data(self, data: bytes) -> Message: ... def verify_ssh_sig(self, data: bytes, msg: Message) -> bool: ... @classmethod - def from_private_key_file(cls: Type[_PK], filename: str, password: str | None = ...) -> _PK: ... + def from_private_key_file(cls: type[Self], filename: str, password: str | None = ...) -> Self: ... @classmethod - def from_private_key(cls: Type[_PK], file_obj: IO[str], password: str | None = ...) -> _PK: ... + def from_private_key(cls: type[Self], file_obj: IO[str], password: str | None = ...) -> Self: ... def write_private_key_file(self, filename: str, password: str | None = ...) -> None: ... def write_private_key(self, file_obj: IO[str], password: str | None = ...) -> None: ... def load_certificate(self, value: Message | str) -> None: ... @@ -37,10 +36,10 @@ class PublicBlob: comment: str def __init__(self, type_: str, blob: bytes, comment: str | None = ...) -> None: ... @classmethod - def from_file(cls, filename: str) -> PublicBlob: ... + def from_file(cls: type[Self], filename: str) -> Self: ... @classmethod - def from_string(cls, string: str) -> PublicBlob: ... + def from_string(cls: type[Self], string: str) -> Self: ... @classmethod - def from_message(cls, message: Message) -> PublicBlob: ... + def from_message(cls: type[Self], message: Message) -> Self: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/primes.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/primes.pyi index efc7485342af..3d7cccd9a267 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/primes.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/primes.pyi @@ -1,8 +1,6 @@ -from typing import Tuple - class ModulusPack: - pack: dict[int, list[Tuple[int, int]]] - discarded: list[Tuple[int, str]] + pack: dict[int, list[tuple[int, int]]] + discarded: list[tuple[int, str]] def __init__(self) -> None: ... def read_file(self, filename: str) -> None: ... - def get_modulus(self, min: int, prefer: int, max: int) -> Tuple[int, int]: ... + def get_modulus(self, min: int, prefer: int, max: int) -> tuple[int, int]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/py3compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/py3compat.pyi index c260b9b42a6c..7ef9b1cc9341 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/py3compat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/py3compat.pyi @@ -1,15 +1,14 @@ import sys -from typing import Any, Iterable, Sequence, Text, Type, TypeVar +from typing import Any, Iterable, Sequence, Text, TypeVar _T = TypeVar("_T") PY2: bool -string_types: Type[Any] | Sequence[Type[Any]] -text_type: Type[Any] | Sequence[Type[Any]] -bytes_types: Type[Any] | Sequence[Type[Any]] -bytes = bytes -integer_types: Type[Any] | Sequence[Type[Any]] +string_types: type[Any] | Sequence[type[Any]] +text_type: type[Any] | Sequence[type[Any]] +bytes_types: type[Any] | Sequence[type[Any]] +integer_types: type[Any] | Sequence[type[Any]] long = int def input(prompt: Any) -> str: ... @@ -29,6 +28,8 @@ else: StringIO = cStringIO.StringIO BytesIO = StringIO +bytes = builtins.bytes + def byte_ord(c: int | str) -> int: ... def byte_chr(c: int) -> bytes: ... def byte_mask(c: int, mask: int) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/server.pyi index 1b18ec52414f..5bc25c3d60b8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/server.pyi @@ -1,5 +1,4 @@ import threading -from typing import Any, Tuple from paramiko.channel import Channel from paramiko.message import Message @@ -19,9 +18,9 @@ class ServerInterface: def enable_auth_gssapi(self) -> bool: ... def check_port_forward_request(self, address: str, port: int) -> int: ... def cancel_port_forward_request(self, address: str, port: int) -> None: ... - def check_global_request(self, kind: str, msg: Message) -> bool | Tuple[Any, ...]: ... + def check_global_request(self, kind: str, msg: Message) -> bool | tuple[bool | int | str, ...]: ... def check_channel_pty_request( - self, channel: Channel, term: str, width: int, height: int, pixelwidth: int, pixelheight: int, modes: str + self, channel: Channel, term: bytes, width: int, height: int, pixelwidth: int, pixelheight: int, modes: bytes ) -> bool: ... def check_channel_shell_request(self, channel: Channel) -> bool: ... def check_channel_exec_request(self, channel: Channel, command: bytes) -> bool: ... @@ -33,15 +32,15 @@ class ServerInterface: self, channel: Channel, single_connection: bool, auth_protocol: str, auth_cookie: bytes, screen_number: int ) -> bool: ... def check_channel_forward_agent_request(self, channel: Channel) -> bool: ... - def check_channel_direct_tcpip_request(self, chanid: int, origin: Tuple[str, int], destination: Tuple[str, int]) -> int: ... - def check_channel_env_request(self, channel: Channel, name: str, value: str) -> bool: ... - def get_banner(self) -> Tuple[str | None, str | None]: ... + def check_channel_direct_tcpip_request(self, chanid: int, origin: tuple[str, int], destination: tuple[str, int]) -> int: ... + def check_channel_env_request(self, channel: Channel, name: bytes, value: bytes) -> bool: ... + def get_banner(self) -> tuple[str | None, str | None]: ... class InteractiveQuery: name: str instructions: str - prompts: list[Tuple[str, bool]] - def __init__(self, name: str = ..., instructions: str = ..., *prompts: str | Tuple[str, bool]) -> None: ... + prompts: list[tuple[str, bool]] + def __init__(self, name: str = ..., instructions: str = ..., *prompts: str | tuple[str, bool]) -> None: ... def add_prompt(self, prompt: str, echo: bool = ...) -> None: ... class SubsystemHandler(threading.Thread): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_attr.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_attr.pyi index 361e4f0b9930..3a397ff7168c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_attr.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_attr.pyi @@ -1,3 +1,4 @@ +from _typeshed import Self from os import stat_result class SFTPAttributes: @@ -17,5 +18,5 @@ class SFTPAttributes: attr: dict[str, str] def __init__(self) -> None: ... @classmethod - def from_stat(cls, obj: stat_result, filename: str | None = ...) -> SFTPAttributes: ... + def from_stat(cls: type[Self], obj: stat_result, filename: str | None = ...) -> Self: ... def asbytes(self) -> bytes: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_client.pyi index a8f7b6583abe..0404f38b9488 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_client.pyi @@ -1,5 +1,6 @@ +from _typeshed import Self from logging import Logger -from typing import IO, Any, Callable, Iterator, Text, Tuple +from typing import IO, Any, Callable, Iterator, Text from paramiko.channel import Channel from paramiko.sftp import BaseSFTP @@ -20,8 +21,8 @@ class SFTPClient(BaseSFTP, ClosingContextManager): def __init__(self, sock: Channel) -> None: ... @classmethod def from_transport( - cls, t: Transport, window_size: int | None = ..., max_packet_size: int | None = ... - ) -> SFTPClient | None: ... + cls: type[Self], t: Transport, window_size: int | None = ..., max_packet_size: int | None = ... + ) -> Self | None: ... def close(self) -> None: ... def get_channel(self) -> Channel | None: ... def listdir(self, path: str = ...) -> list[str]: ... @@ -40,7 +41,7 @@ class SFTPClient(BaseSFTP, ClosingContextManager): def symlink(self, source: bytes | Text, dest: bytes | Text) -> None: ... def chmod(self, path: bytes | Text, mode: int) -> None: ... def chown(self, path: bytes | Text, uid: int, gid: int) -> None: ... - def utime(self, path: bytes | Text, times: Tuple[float, float] | None) -> None: ... + def utime(self, path: bytes | Text, times: tuple[float, float] | None) -> None: ... def truncate(self, path: bytes | Text, size: int) -> None: ... def readlink(self, path: bytes | Text) -> Text | None: ... def normalize(self, path: bytes | Text) -> Text: ... @@ -52,7 +53,9 @@ class SFTPClient(BaseSFTP, ClosingContextManager): def put( self, localpath: bytes | Text, remotepath: bytes | Text, callback: _Callback | None = ..., confirm: bool = ... ) -> SFTPAttributes: ... - def getfo(self, remotepath: bytes | Text, fl: IO[bytes], callback: _Callback | None = ...) -> int: ... - def get(self, remotepath: bytes | Text, localpath: bytes | Text, callback: _Callback | None = ...) -> None: ... + def getfo(self, remotepath: bytes | Text, fl: IO[bytes], callback: _Callback | None = ..., prefetch: bool = ...) -> int: ... + def get( + self, remotepath: bytes | Text, localpath: bytes | Text, callback: _Callback | None = ..., prefetch: bool = ... + ) -> None: ... class SFTP(SFTPClient): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_file.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_file.pyi index d335c14f3fae..9663fccaba31 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_file.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_file.pyi @@ -1,4 +1,4 @@ -from typing import Any, Iterator, Sequence, Tuple +from typing import Any, Iterator, Sequence from paramiko.file import BufferedFile from paramiko.sftp_attr import SFTPAttributes @@ -21,9 +21,9 @@ class SFTPFile(BufferedFile[Any]): def stat(self) -> SFTPAttributes: ... def chmod(self, mode: int) -> None: ... def chown(self, uid: int, gid: int) -> None: ... - def utime(self, times: Tuple[float, float] | None) -> None: ... + def utime(self, times: tuple[float, float] | None) -> None: ... def truncate(self, size: int) -> None: ... def check(self, hash_algorithm: str, offset: int = ..., length: int = ..., block_size: int = ...) -> bytes: ... def set_pipelined(self, pipelined: bool = ...) -> None: ... def prefetch(self, file_size: int | None = ...) -> None: ... - def readv(self, chunks: Sequence[Tuple[int, int]]) -> Iterator[bytes]: ... + def readv(self, chunks: Sequence[tuple[int, int]]) -> Iterator[bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_server.pyi index 6193ea1cbdb1..8e8dddef16e4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/sftp_server.pyi @@ -1,5 +1,5 @@ from logging import Logger -from typing import Any, Type +from typing import Any from paramiko.channel import Channel from paramiko.server import ServerInterface, SubsystemHandler @@ -18,7 +18,7 @@ class SFTPServer(BaseSFTP, SubsystemHandler): server: SFTPServerInterface sock: Channel | None def __init__( - self, channel: Channel, name: str, server: ServerInterface, sftp_si: Type[SFTPServerInterface], *largs: Any, **kwargs: Any + self, channel: Channel, name: str, server: ServerInterface, sftp_si: type[SFTPServerInterface], *largs: Any, **kwargs: Any ) -> None: ... def start_subsystem(self, name: str, transport: Transport, channel: Channel) -> None: ... def finish_subsystem(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_exception.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_exception.pyi index e75afb22b1eb..c122d63e162b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_exception.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_exception.pyi @@ -1,5 +1,5 @@ import socket -from typing import Mapping, Tuple +from typing import Mapping from paramiko.pkey import PKey @@ -33,9 +33,9 @@ class ProxyCommandFailure(SSHException): def __init__(self, command: str, error: str) -> None: ... class NoValidConnectionsError(socket.error): - errors: Mapping[Tuple[str, int] | Tuple[str, int, int, int], Exception] - def __init__(self, errors: Mapping[Tuple[str, int] | Tuple[str, int, int, int], Exception]) -> None: ... - def __reduce__(self) -> Tuple[type, Tuple[Mapping[Tuple[str, int] | Tuple[str, int, int, int], Exception]]]: ... + errors: Mapping[tuple[str, int] | tuple[str, int, int, int], Exception] + def __init__(self, errors: Mapping[tuple[str, int] | tuple[str, int, int, int], Exception]) -> None: ... + def __reduce__(self) -> tuple[type, tuple[Mapping[tuple[str, int] | tuple[str, int, int, int], Exception]]]: ... class CouldNotCanonicalize(SSHException): ... class ConfigParseError(SSHException): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_gss.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_gss.pyi index aed5ae7672fe..9c0d8bab99b9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_gss.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/ssh_gss.pyi @@ -1,7 +1,7 @@ -from typing import Any, Tuple, Type +from typing import Any GSS_AUTH_AVAILABLE: bool -GSS_EXCEPTIONS: Tuple[Type[Exception], ...] +GSS_EXCEPTIONS: tuple[type[Exception], ...] def GSSAuth(auth_method: str, gss_deleg_creds: bool = ...) -> _SSH_GSSAuth: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/transport.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/transport.pyi index d4962334484f..ea20ff3478f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/transport.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/transport.pyi @@ -2,7 +2,7 @@ from logging import Logger from socket import socket from threading import Condition, Event, Lock, Thread from types import ModuleType -from typing import Any, Callable, Iterable, Protocol, Sequence, Tuple, Type +from typing import Any, Callable, Iterable, Protocol, Sequence, Union from paramiko.auth_handler import AuthHandler, _InteractiveCallback from paramiko.channel import Channel @@ -14,7 +14,8 @@ from paramiko.sftp_client import SFTPClient from paramiko.ssh_gss import _SSH_GSSAuth from paramiko.util import ClosingContextManager -_Addr = Tuple[str, int] +_Addr = tuple[str, int] +_SocketLike = Union[str, _Addr, socket, Channel] class _KexEngine(Protocol): def start_kex(self) -> None: ... @@ -23,7 +24,7 @@ class _KexEngine(Protocol): class Transport(Thread, ClosingContextManager): active: bool hostname: str | None - sock: socket + sock: socket | Channel packetizer: Packetizer local_version: str remote_version: str @@ -67,11 +68,11 @@ class Transport(Thread, ClosingContextManager): server_key_dict: dict[str, PKey] server_accepts: list[Channel] server_accept_cv: Condition - subsystem_table: dict[str, Tuple[Type[SubsystemHandler], Tuple[Any, ...], dict[str, Any]]] + subsystem_table: dict[str, tuple[type[SubsystemHandler], tuple[Any, ...], dict[str, Any]]] sys: ModuleType def __init__( self, - sock: str | Tuple[str, int] | socket, + sock: _SocketLike, default_window_size: int = ..., default_max_packet_size: int = ..., gss_kex: bool = ..., @@ -138,7 +139,7 @@ class Transport(Thread, ClosingContextManager): gss_trust_dns: bool = ..., ) -> None: ... def get_exception(self) -> Exception | None: ... - def set_subsystem_handler(self, name: str, handler: Type[SubsystemHandler], *larg: Any, **kwarg: Any) -> None: ... + def set_subsystem_handler(self, name: str, handler: type[SubsystemHandler], *larg: Any, **kwarg: Any) -> None: ... def is_authenticated(self) -> bool: ... def get_username(self) -> str | None: ... def get_banner(self) -> bytes | None: ... @@ -156,7 +157,7 @@ class Transport(Thread, ClosingContextManager): def set_hexdump(self, hexdump: bool) -> None: ... def get_hexdump(self) -> bool: ... def use_compression(self, compress: bool = ...) -> None: ... - def getpeername(self) -> Tuple[str, int]: ... + def getpeername(self) -> tuple[str, int]: ... def stop_thread(self) -> None: ... def run(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/util.pyi index 05bd5c73ffe8..ea3eebce7b35 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/util.pyi @@ -1,7 +1,8 @@ import sys +from _typeshed import Self from logging import Logger, LogRecord from types import TracebackType -from typing import IO, AnyStr, Callable, Protocol, Type, TypeVar +from typing import IO, AnyStr, Callable, Protocol, TypeVar from paramiko.config import SSHConfig, SSHConfigDict from paramiko.hostkeys import HostKeys @@ -15,7 +16,6 @@ class SupportsClose(Protocol): def close(self) -> None: ... _T = TypeVar("_T") -_TC = TypeVar("_TC", bound=SupportsClose) def inflate_long(s: bytes, always_positive: bool = ...) -> int: ... @@ -28,7 +28,7 @@ def format_binary_line(data: bytes) -> str: ... def safe_string(s: bytes) -> bytes: ... def bit_length(n: int) -> int: ... def tb_strings() -> list[str]: ... -def generate_key_bytes(hash_alg: Type[_Hash], salt: bytes, key: bytes | str, nbytes: int) -> bytes: ... +def generate_key_bytes(hash_alg: type[_Hash], salt: bytes, key: bytes | str, nbytes: int) -> bytes: ... def load_host_keys(filename: str) -> HostKeys: ... def parse_ssh_config(file_obj: IO[str]) -> SSHConfig: ... def lookup_ssh_host_config(hostname: str, config: SSHConfig) -> SSHConfigDict: ... @@ -44,9 +44,9 @@ def retry_on_signal(function: Callable[[], _T]) -> _T: ... def constant_time_bytes_eq(a: AnyStr, b: AnyStr) -> bool: ... class ClosingContextManager: - def __enter__(self: _TC) -> _TC: ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, type: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... def clamp_value(minimum: int, val: int, maximum: int) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/win_pageant.pyi b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/win_pageant.pyi index ed62937fd7b8..6bca6a3afaee 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/win_pageant.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/paramiko/paramiko/win_pageant.pyi @@ -4,7 +4,9 @@ import sys if sys.platform == "win32": win32con_WM_COPYDATA: int def can_talk_to_agent(): ... + class COPYDATASTRUCT(ctypes.Structure): ... + class PageantConnection: def __init__(self) -> None: ... def send(self, data: bytes) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/passpy/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/passpy/METADATA.toml new file mode 100644 index 000000000000..f3e83f9c456b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/passpy/METADATA.toml @@ -0,0 +1 @@ +version = "1.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/__init__.pyi new file mode 100644 index 000000000000..98430cc71676 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/__init__.pyi @@ -0,0 +1,5 @@ +from .exceptions import RecursiveCopyMoveError as RecursiveCopyMoveError, StoreNotInitialisedError as StoreNotInitialisedError +from .store import Store as Store +from .util import gen_password as gen_password + +VERSION: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/exceptions.pyi new file mode 100644 index 000000000000..f3a532d6c274 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/exceptions.pyi @@ -0,0 +1,2 @@ +class StoreNotInitialisedError(FileNotFoundError): ... +class RecursiveCopyMoveError(OSError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/store.pyi b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/store.pyi new file mode 100644 index 000000000000..d2a27049e7d1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/store.pyi @@ -0,0 +1,31 @@ +from _typeshed import StrPath +from collections.abc import Iterator +from typing import Match + +class Store: + def __init__( + self, + gpg_bin: str = ..., + git_bin: str = ..., + store_dir: str = ..., + use_agent: bool = ..., + interactive: bool = ..., + verbose: bool = ..., + ) -> None: ... + def __iter__(self) -> Iterator[str]: ... + def is_init(self) -> bool: ... + def init_store(self, gpg_ids: None | str | list[str], path: StrPath | None = ...) -> None: ... + def init_git(self) -> None: ... + def git(self, method: str, *args: object, **kwargs: object) -> None: ... + def get_key(self, path: StrPath | None) -> str | None: ... + def set_key(self, path: StrPath | None, key_data: str, force: bool = ...) -> None: ... + def remove_path(self, path: StrPath, recursive: bool = ..., force: bool = ...) -> None: ... + def gen_key( + self, path: StrPath | None, length: int, symbols: bool = ..., force: bool = ..., inplace: bool = ... + ) -> str | None: ... + def copy_path(self, old_path: StrPath, new_path: StrPath, force: bool = ...) -> None: ... + def move_path(self, old_path: StrPath, new_path: StrPath, force: bool = ...) -> None: ... + def list_dir(self, path: StrPath) -> tuple[list[str], list[str]]: ... + def iter_dir(self, path: StrPath) -> Iterator[str]: ... + def find(self, names: None | str | list[str]) -> list[str]: ... + def search(self, term: str) -> dict[str, list[tuple[str, Match[str]]]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/util.pyi new file mode 100644 index 000000000000..e76e8abcab1a --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/passpy/passpy/util.pyi @@ -0,0 +1 @@ +def gen_password(length: int, symbols: bool = ...) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/METADATA.toml new file mode 100644 index 000000000000..050b0f8beee3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/METADATA.toml @@ -0,0 +1 @@ +version = "0.12.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/pep8ext_naming.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/pep8ext_naming.pyi new file mode 100644 index 000000000000..9c6fd6fb840f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pep8-naming/pep8ext_naming.pyi @@ -0,0 +1,28 @@ +import ast +from argparse import Namespace +from typing import Any, Generator + +__version__: str + +PYTHON_VERSION: tuple[int, int, int] +PY2: bool +CLASS_METHODS: frozenset[str] +METACLASS_BASES: frozenset[str] +METHOD_CONTAINER_NODES: set[ast.AST] + +class NamingChecker: + name: str + version: str + visitors: Any + decorator_to_type: Any + ignore_names: frozenset[str] + parents: Any + def __init__(self, tree: ast.AST, filename: str) -> None: ... + @classmethod + def add_options(cls, parser: Any) -> None: ... + @classmethod + def parse_options(cls, option: Namespace) -> None: ... + def run(self) -> Generator[tuple[int, int, str, type[Any]], None, None]: ... + def __getattr__(self, name: str) -> Any: ... # incomplete (other attributes are normally not accessed) + +def __getattr__(name: str) -> Any: ... # incomplete (other attributes are normally not accessed) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/playsound/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/playsound/METADATA.toml new file mode 100644 index 000000000000..3ea18392d7df --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/playsound/METADATA.toml @@ -0,0 +1 @@ +version = "1.3.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/playsound/playsound.pyi b/packages/pyright-internal/typeshed-fallback/stubs/playsound/playsound.pyi new file mode 100644 index 000000000000..cd6c1aad162c --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/playsound/playsound.pyi @@ -0,0 +1,5 @@ +import pathlib + +class PlaysoundException(Exception): ... + +def playsound(sound: str | pathlib.Path, block: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/polib/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/polib/METADATA.toml index cf311fdb6550..c0a0050d4a68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/polib/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/polib/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.1" +version = "1.1.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/polib/polib.pyi b/packages/pyright-internal/typeshed-fallback/stubs/polib/polib.pyi index e98a0b9671e0..a9f9f5bebd65 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/polib/polib.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/polib/polib.pyi @@ -1,10 +1,10 @@ import textwrap -from typing import IO, Any, Callable, Generic, List, Text, Tuple, Type, TypeVar, overload +from typing import IO, Any, Callable, Generic, Text, TypeVar, overload from typing_extensions import SupportsIndex -_TB = TypeVar("_TB", bound="_BaseEntry") -_TP = TypeVar("_TP", bound="POFile") -_TM = TypeVar("_TM", bound="MOFile") +_TB = TypeVar("_TB", bound=_BaseEntry) +_TP = TypeVar("_TP", bound=POFile) +_TM = TypeVar("_TM", bound=MOFile) default_encoding: str @@ -12,18 +12,18 @@ default_encoding: str # encoding: str # check_for_duplicates: bool @overload -def pofile(pofile: Text, *, klass: Type[_TP], **kwargs: Any) -> _TP: ... +def pofile(pofile: Text, *, klass: type[_TP], **kwargs: Any) -> _TP: ... @overload def pofile(pofile: Text, **kwargs: Any) -> POFile: ... @overload -def mofile(mofile: Text, *, klass: Type[_TM], **kwargs: Any) -> _TM: ... +def mofile(mofile: Text, *, klass: type[_TM], **kwargs: Any) -> _TM: ... @overload def mofile(mofile: Text, **kwargs: Any) -> MOFile: ... def detect_encoding(file: bytes | Text, binary_mode: bool = ...) -> str: ... def escape(st: Text) -> Text: ... def unescape(st: Text) -> Text: ... -class _BaseFile(List[_TB]): +class _BaseFile(list[_TB]): fpath: Text wrapwidth: int encoding: Text @@ -82,7 +82,7 @@ class _BaseEntry(object): class POEntry(_BaseEntry): comment: Text tcomment: Text - occurrences: list[Tuple[str, int]] + occurrences: list[tuple[str, int]] flags: list[Text] previous_msgctxt: Text | None previous_msgid: Text | None @@ -95,8 +95,8 @@ class POEntry(_BaseEntry): def __lt__(self, other: POEntry) -> bool: ... def __ge__(self, other: POEntry) -> bool: ... def __le__(self, other: POEntry) -> bool: ... - def __eq__(self, other: Any) -> bool: ... - def __ne__(self, other: Any) -> bool: ... + def __eq__(self, other: POEntry) -> bool: ... # type: ignore[override] + def __ne__(self, other: POEntry) -> bool: ... # type: ignore[override] def translated(self) -> bool: ... def merge(self, other: POEntry) -> None: ... @property @@ -108,7 +108,7 @@ class POEntry(_BaseEntry): class MOEntry(_BaseEntry): comment: Text tcomment: Text - occurrences: list[Tuple[str, int]] + occurrences: list[tuple[str, int]] flags: list[Text] previous_msgctxt: Text | None previous_msgid: Text | None @@ -119,7 +119,7 @@ class MOEntry(_BaseEntry): class _POFileParser(Generic[_TP]): fhandle: IO[Text] instance: _TP - transitions: dict[Tuple[str, str], Tuple[Callable[[], bool], str]] + transitions: dict[tuple[str, str], tuple[Callable[[], bool], str]] current_line: int current_entry: POEntry current_state: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/prettytable/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/prettytable/METADATA.toml index 5647aa0233b2..acdc22b5d432 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/prettytable/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/prettytable/METADATA.toml @@ -1 +1 @@ -version = "2.1" +version = "2.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/METADATA.toml index 01e9270d9891..5004c75556a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/METADATA.toml @@ -1,4 +1,2 @@ -version = "3.17" -python2 = true -requires = ["types-futures"] -extra_description = "Generated with aid from mypy-protobuf v2.8" +version = "3.19.*" +extra_description = "Generated with aid from mypy-protobuf v3.2.0" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/__init__.pyi index aae1f9314354..bda5b5a7f4cc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/__init__.pyi @@ -1 +1 @@ -__version__: bytes +__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/any_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/any_pb2.pyi index 063c3b241cd1..8027f052d697 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/any_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/any_pb2.pyi @@ -9,128 +9,132 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -# `Any` contains an arbitrary serialized protocol buffer message along with a -# URL that describes the type of the serialized message. -# -# Protobuf library provides support to pack/unpack Any values in the form -# of utility functions or additional generated methods of the Any type. -# -# Example 1: Pack and unpack a message in C++. -# -# Foo foo = ...; -# Any any; -# any.PackFrom(foo); -# ... -# if (any.UnpackTo(&foo)) { -# ... -# } -# -# Example 2: Pack and unpack a message in Java. -# -# Foo foo = ...; -# Any any = Any.pack(foo); -# ... -# if (any.is(Foo.class)) { -# foo = any.unpack(Foo.class); -# } -# -# Example 3: Pack and unpack a message in Python. -# -# foo = Foo(...) -# any = Any() -# any.Pack(foo) -# ... -# if any.Is(Foo.DESCRIPTOR): -# any.Unpack(foo) -# ... -# -# Example 4: Pack and unpack a message in Go -# -# foo := &pb.Foo{...} -# any, err := anypb.New(foo) -# if err != nil { -# ... -# } -# ... -# foo := &pb.Foo{} -# if err := any.UnmarshalTo(foo); err != nil { -# ... -# } -# -# The pack methods provided by protobuf library will by default use -# 'type.googleapis.com/full.type.name' as the type URL and the unpack -# methods only use the fully qualified type name after the last '/' -# in the type URL, for example "foo.bar.com/x/y.z" will yield type -# name "y.z". -# -# -# JSON -# ==== -# The JSON representation of an `Any` value uses the regular -# representation of the deserialized, embedded message, with an -# additional field `@type` which contains the type URL. Example: -# -# package google.profile; -# message Person { -# string first_name = 1; -# string last_name = 2; -# } -# -# { -# "@type": "type.googleapis.com/google.profile.Person", -# "firstName": , -# "lastName": -# } -# -# If the embedded message type is well-known and has a custom JSON -# representation, that representation will be embedded adding a field -# `value` which holds the custom JSON in addition to the `@type` -# field. Example (for message [google.protobuf.Duration][]): -# -# { -# "@type": "type.googleapis.com/google.protobuf.Duration", -# "value": "1.212s" -# } +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Any): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`Any` contains an arbitrary serialized protocol buffer message along with a + URL that describes the type of the serialized message. + + Protobuf library provides support to pack/unpack Any values in the form + of utility functions or additional generated methods of the Any type. + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by default use + 'type.googleapis.com/full.type.name' as the type URL and the unpack + methods only use the fully qualified type name after the last '/' + in the type URL, for example "foo.bar.com/x/y.z" will yield type + name "y.z". + + + JSON + ==== + The JSON representation of an `Any` value uses the regular + representation of the deserialized, embedded message, with an + additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom JSON + representation, that representation will be embedded adding a field + `value` which holds the custom JSON in addition to the `@type` + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor TYPE_URL_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int - # A URL/resource name that uniquely identifies the type of the serialized - # protocol buffer message. This string must contain at least - # one "/" character. The last segment of the URL's path must represent - # the fully qualified name of the type (as in - # `path/google.protobuf.Duration`). The name should be in a canonical form - # (e.g., leading "." is not accepted). - # - # In practice, teams usually precompile into the binary all types that they - # expect it to use in the context of Any. However, for URLs which use the - # scheme `http`, `https`, or no scheme, one can optionally set up a type - # server that maps type URLs to message definitions as follows: - # - # * If no scheme is provided, `https` is assumed. - # * An HTTP GET on the URL must yield a [google.protobuf.Type][] - # value in binary format, or produce an error. - # * Applications are allowed to cache lookup results based on the - # URL, or have them precompiled into a binary to avoid any - # lookup. Therefore, binary compatibility needs to be preserved - # on changes to types. (Use versioned type names to manage - # breaking changes.) - # - # Note: this functionality is not currently available in the official - # protobuf release, and it is not used for type URLs beginning with - # type.googleapis.com. - # - # Schemes other than `http`, `https` (or the empty scheme) might be - # used with implementation specific semantics. - type_url: typing.Text = ... - # Must be a valid serialized protocol buffer of the above specified type. - value: builtins.bytes = ... + type_url: typing.Text + """A URL/resource name that uniquely identifies the type of the serialized + protocol buffer message. This string must contain at least + one "/" character. The last segment of the URL's path must represent + the fully qualified name of the type (as in + `path/google.protobuf.Duration`). The name should be in a canonical form + (e.g., leading "." is not accepted). + + In practice, teams usually precompile into the binary all types that they + expect it to use in the context of Any. However, for URLs which use the + scheme `http`, `https`, or no scheme, one can optionally set up a type + server that maps type URLs to message definitions as follows: + + * If no scheme is provided, `https` is assumed. + * An HTTP GET on the URL must yield a [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in the official + protobuf release, and it is not used for type URLs beginning with + type.googleapis.com. + + Schemes other than `http`, `https` (or the empty scheme) might be + used with implementation specific semantics. + """ + + value: builtins.bytes + """Must be a valid serialized protocol buffer of the above specified type.""" + def __init__(self, *, - type_url : typing.Text = ..., - value : builtins.bytes = ..., + type_url: typing.Optional[typing.Text] = ..., + value: typing.Optional[builtins.bytes] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"type_url",b"type_url",u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["type_url",b"type_url","value",b"value"]) -> None: ... global___Any = Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/api_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/api_pb2.pyi index 10d183c44b2a..d2d0ca52f125 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/api_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/api_pb2.pyi @@ -11,19 +11,20 @@ import google.protobuf.type_pb2 import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -# Api is a light-weight descriptor for an API Interface. -# -# Interfaces are also described as "protocol buffer services" in some contexts, -# such as by the "service" keyword in a .proto file, but they are different -# from API Services, which represent a concrete implementation of an interface -# as opposed to simply a description of methods and bindings. They are also -# sometimes simply referred to as "APIs" in other contexts, such as the name of -# this message itself. See https://cloud.google.com/apis/design/glossary for -# detailed terminology. +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + class Api(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Api is a light-weight descriptor for an API Interface. + + Interfaces are also described as "protocol buffer services" in some contexts, + such as by the "service" keyword in a .proto file, but they are different + from API Services, which represent a concrete implementation of an interface + as opposed to simply a description of methods and bindings. They are also + sometimes simply referred to as "APIs" in other contexts, such as the name of + this message itself. See https://cloud.google.com/apis/design/glossary for + detailed terminology. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int METHODS_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int @@ -31,61 +32,71 @@ class Api(google.protobuf.message.Message): SOURCE_CONTEXT_FIELD_NUMBER: builtins.int MIXINS_FIELD_NUMBER: builtins.int SYNTAX_FIELD_NUMBER: builtins.int - # The fully qualified name of this interface, including package name - # followed by the interface's simple name. - name: typing.Text = ... - # The methods of this interface, in unspecified order. + name: typing.Text + """The fully qualified name of this interface, including package name + followed by the interface's simple name. + """ + @property - def methods(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Method]: ... - # Any metadata attached to the interface. + def methods(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Method]: + """The methods of this interface, in unspecified order.""" + pass @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: ... - # A version string for this interface. If specified, must have the form - # `major-version.minor-version`, as in `1.10`. If the minor version is - # omitted, it defaults to zero. If the entire version field is empty, the - # major version is derived from the package name, as outlined below. If the - # field is not empty, the version in the package name will be verified to be - # consistent with what is provided here. - # - # The versioning schema uses [semantic - # versioning](http://semver.org) where the major version number - # indicates a breaking change and the minor version an additive, - # non-breaking change. Both version numbers are signals to users - # what to expect from different versions, and should be carefully - # chosen based on the product plan. - # - # The major version is also reflected in the package name of the - # interface, which must end in `v`, as in - # `google.feature.v1`. For major versions 0 and 1, the suffix can - # be omitted. Zero major versions must only be used for - # experimental, non-GA interfaces. - version: typing.Text = ... - # Source context for the protocol buffer service represented by this - # message. + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: + """Any metadata attached to the interface.""" + pass + version: typing.Text + """A version string for this interface. If specified, must have the form + `major-version.minor-version`, as in `1.10`. If the minor version is + omitted, it defaults to zero. If the entire version field is empty, the + major version is derived from the package name, as outlined below. If the + field is not empty, the version in the package name will be verified to be + consistent with what is provided here. + + The versioning schema uses [semantic + versioning](http://semver.org) where the major version number + indicates a breaking change and the minor version an additive, + non-breaking change. Both version numbers are signals to users + what to expect from different versions, and should be carefully + chosen based on the product plan. + + The major version is also reflected in the package name of the + interface, which must end in `v`, as in + `google.feature.v1`. For major versions 0 and 1, the suffix can + be omitted. Zero major versions must only be used for + experimental, non-GA interfaces. + """ + @property - def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: ... - # Included interfaces. See [Mixin][]. + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """Source context for the protocol buffer service represented by this + message. + """ + pass @property - def mixins(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Mixin]: ... - # The source syntax of the service. - syntax: google.protobuf.type_pb2.Syntax.V = ... + def mixins(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Mixin]: + """Included interfaces. See [Mixin][].""" + pass + syntax: google.protobuf.type_pb2.Syntax.ValueType + """The source syntax of the service.""" + def __init__(self, *, - name : typing.Text = ..., - methods : typing.Optional[typing.Iterable[global___Method]] = ..., - options : typing.Optional[typing.Iterable[google.protobuf.type_pb2.Option]] = ..., - version : typing.Text = ..., - source_context : typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., - mixins : typing.Optional[typing.Iterable[global___Mixin]] = ..., - syntax : google.protobuf.type_pb2.Syntax.V = ..., + name: typing.Optional[typing.Text] = ..., + methods: typing.Optional[typing.Iterable[global___Method]] = ..., + options: typing.Optional[typing.Iterable[google.protobuf.type_pb2.Option]] = ..., + version: typing.Optional[typing.Text] = ..., + source_context: typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., + mixins: typing.Optional[typing.Iterable[global___Mixin]] = ..., + syntax: typing.Optional[google.protobuf.type_pb2.Syntax.ValueType] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"source_context",b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"methods",b"methods",u"mixins",b"mixins",u"name",b"name",u"options",b"options",u"source_context",b"source_context",u"syntax",b"syntax",u"version",b"version"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context",b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["methods",b"methods","mixins",b"mixins","name",b"name","options",b"options","source_context",b"source_context","syntax",b"syntax","version",b"version"]) -> None: ... global___Api = Api -# Method represents a method of an API interface. class Method(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Method represents a method of an API interface.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int REQUEST_TYPE_URL_FIELD_NUMBER: builtins.int REQUEST_STREAMING_FIELD_NUMBER: builtins.int @@ -93,125 +104,136 @@ class Method(google.protobuf.message.Message): RESPONSE_STREAMING_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int SYNTAX_FIELD_NUMBER: builtins.int - # The simple name of this method. - name: typing.Text = ... - # A URL of the input message type. - request_type_url: typing.Text = ... - # If true, the request is streamed. - request_streaming: builtins.bool = ... - # The URL of the output message type. - response_type_url: typing.Text = ... - # If true, the response is streamed. - response_streaming: builtins.bool = ... - # Any metadata attached to the method. + name: typing.Text + """The simple name of this method.""" + + request_type_url: typing.Text + """A URL of the input message type.""" + + request_streaming: builtins.bool + """If true, the request is streamed.""" + + response_type_url: typing.Text + """The URL of the output message type.""" + + response_streaming: builtins.bool + """If true, the response is streamed.""" + @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: ... - # The source syntax of this method. - syntax: google.protobuf.type_pb2.Syntax.V = ... + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.type_pb2.Option]: + """Any metadata attached to the method.""" + pass + syntax: google.protobuf.type_pb2.Syntax.ValueType + """The source syntax of this method.""" + def __init__(self, *, - name : typing.Text = ..., - request_type_url : typing.Text = ..., - request_streaming : builtins.bool = ..., - response_type_url : typing.Text = ..., - response_streaming : builtins.bool = ..., - options : typing.Optional[typing.Iterable[google.protobuf.type_pb2.Option]] = ..., - syntax : google.protobuf.type_pb2.Syntax.V = ..., + name: typing.Optional[typing.Text] = ..., + request_type_url: typing.Optional[typing.Text] = ..., + request_streaming: typing.Optional[builtins.bool] = ..., + response_type_url: typing.Optional[typing.Text] = ..., + response_streaming: typing.Optional[builtins.bool] = ..., + options: typing.Optional[typing.Iterable[google.protobuf.type_pb2.Option]] = ..., + syntax: typing.Optional[google.protobuf.type_pb2.Syntax.ValueType] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options",u"request_streaming",b"request_streaming",u"request_type_url",b"request_type_url",u"response_streaming",b"response_streaming",u"response_type_url",b"response_type_url",u"syntax",b"syntax"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options","request_streaming",b"request_streaming","request_type_url",b"request_type_url","response_streaming",b"response_streaming","response_type_url",b"response_type_url","syntax",b"syntax"]) -> None: ... global___Method = Method -# Declares an API Interface to be included in this interface. The including -# interface must redeclare all the methods from the included interface, but -# documentation and options are inherited as follows: -# -# - If after comment and whitespace stripping, the documentation -# string of the redeclared method is empty, it will be inherited -# from the original method. -# -# - Each annotation belonging to the service config (http, -# visibility) which is not set in the redeclared method will be -# inherited. -# -# - If an http annotation is inherited, the path pattern will be -# modified as follows. Any version prefix will be replaced by the -# version of the including interface plus the [root][] path if -# specified. -# -# Example of a simple mixin: -# -# package google.acl.v1; -# service AccessControl { -# // Get the underlying ACL object. -# rpc GetAcl(GetAclRequest) returns (Acl) { -# option (google.api.http).get = "/v1/{resource=**}:getAcl"; -# } -# } -# -# package google.storage.v2; -# service Storage { -# rpc GetAcl(GetAclRequest) returns (Acl); -# -# // Get a data record. -# rpc GetData(GetDataRequest) returns (Data) { -# option (google.api.http).get = "/v2/{resource=**}"; -# } -# } -# -# Example of a mixin configuration: -# -# apis: -# - name: google.storage.v2.Storage -# mixins: -# - name: google.acl.v1.AccessControl -# -# The mixin construct implies that all methods in `AccessControl` are -# also declared with same name and request/response types in -# `Storage`. A documentation generator or annotation processor will -# see the effective `Storage.GetAcl` method after inheriting -# documentation and annotations as follows: -# -# service Storage { -# // Get the underlying ACL object. -# rpc GetAcl(GetAclRequest) returns (Acl) { -# option (google.api.http).get = "/v2/{resource=**}:getAcl"; -# } -# ... -# } -# -# Note how the version in the path pattern changed from `v1` to `v2`. -# -# If the `root` field in the mixin is specified, it should be a -# relative path under which inherited HTTP paths are placed. Example: -# -# apis: -# - name: google.storage.v2.Storage -# mixins: -# - name: google.acl.v1.AccessControl -# root: acls -# -# This implies the following inherited HTTP annotation: -# -# service Storage { -# // Get the underlying ACL object. -# rpc GetAcl(GetAclRequest) returns (Acl) { -# option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; -# } -# ... -# } class Mixin(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Declares an API Interface to be included in this interface. The including + interface must redeclare all the methods from the included interface, but + documentation and options are inherited as follows: + + - If after comment and whitespace stripping, the documentation + string of the redeclared method is empty, it will be inherited + from the original method. + + - Each annotation belonging to the service config (http, + visibility) which is not set in the redeclared method will be + inherited. + + - If an http annotation is inherited, the path pattern will be + modified as follows. Any version prefix will be replaced by the + version of the including interface plus the [root][] path if + specified. + + Example of a simple mixin: + + package google.acl.v1; + service AccessControl { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v1/{resource=**}:getAcl"; + } + } + + package google.storage.v2; + service Storage { + rpc GetAcl(GetAclRequest) returns (Acl); + + // Get a data record. + rpc GetData(GetDataRequest) returns (Data) { + option (google.api.http).get = "/v2/{resource=**}"; + } + } + + Example of a mixin configuration: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + + The mixin construct implies that all methods in `AccessControl` are + also declared with same name and request/response types in + `Storage`. A documentation generator or annotation processor will + see the effective `Storage.GetAcl` method after inheriting + documentation and annotations as follows: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/{resource=**}:getAcl"; + } + ... + } + + Note how the version in the path pattern changed from `v1` to `v2`. + + If the `root` field in the mixin is specified, it should be a + relative path under which inherited HTTP paths are placed. Example: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + root: acls + + This implies the following inherited HTTP annotation: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; + } + ... + } + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int ROOT_FIELD_NUMBER: builtins.int - # The fully qualified name of the interface which is included. - name: typing.Text = ... - # If non-empty specifies a path under which inherited HTTP paths - # are rooted. - root: typing.Text = ... + name: typing.Text + """The fully qualified name of the interface which is included.""" + + root: typing.Text + """If non-empty specifies a path under which inherited HTTP paths + are rooted. + """ + def __init__(self, *, - name : typing.Text = ..., - root : typing.Text = ..., + name: typing.Optional[typing.Text] = ..., + root: typing.Optional[typing.Text] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"root",b"root"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","root",b"root"]) -> None: ... global___Mixin = Mixin diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi index 012d8956ba77..5431167294eb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/compiler/plugin_pb2.pyi @@ -11,189 +11,209 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -# The version number of protocol compiler. class Version(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """The version number of protocol compiler.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor MAJOR_FIELD_NUMBER: builtins.int MINOR_FIELD_NUMBER: builtins.int PATCH_FIELD_NUMBER: builtins.int SUFFIX_FIELD_NUMBER: builtins.int - major: builtins.int = ... - minor: builtins.int = ... - patch: builtins.int = ... - # A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should - # be empty for mainline stable releases. - suffix: typing.Text = ... + major: builtins.int + minor: builtins.int + patch: builtins.int + suffix: typing.Text + """A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + be empty for mainline stable releases. + """ + def __init__(self, *, - major : typing.Optional[builtins.int] = ..., - minor : typing.Optional[builtins.int] = ..., - patch : typing.Optional[builtins.int] = ..., - suffix : typing.Optional[typing.Text] = ..., + major: typing.Optional[builtins.int] = ..., + minor: typing.Optional[builtins.int] = ..., + patch: typing.Optional[builtins.int] = ..., + suffix: typing.Optional[typing.Text] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"major",b"major",u"minor",b"minor",u"patch",b"patch",u"suffix",b"suffix"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"major",b"major",u"minor",b"minor",u"patch",b"patch",u"suffix",b"suffix"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["major",b"major","minor",b"minor","patch",b"patch","suffix",b"suffix"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["major",b"major","minor",b"minor","patch",b"patch","suffix",b"suffix"]) -> None: ... global___Version = Version -# An encoded CodeGeneratorRequest is written to the plugin's stdin. class CodeGeneratorRequest(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor FILE_TO_GENERATE_FIELD_NUMBER: builtins.int PARAMETER_FIELD_NUMBER: builtins.int PROTO_FILE_FIELD_NUMBER: builtins.int COMPILER_VERSION_FIELD_NUMBER: builtins.int - # The .proto files that were explicitly listed on the command-line. The - # code generator should generate code only for these files. Each file's - # descriptor will be included in proto_file, below. @property - def file_to_generate(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... - # The generator parameter passed on the command-line. - parameter: typing.Text = ... - # FileDescriptorProtos for all files in files_to_generate and everything - # they import. The files will appear in topological order, so each file - # appears before any file that imports it. - # - # protoc guarantees that all proto_files will be written after - # the fields above, even though this is not technically guaranteed by the - # protobuf wire format. This theoretically could allow a plugin to stream - # in the FileDescriptorProtos and handle them one by one rather than read - # the entire set into memory at once. However, as of this writing, this - # is not similarly optimized on protoc's end -- it will store all fields in - # memory at once before sending them to the plugin. - # - # Type names of fields and extensions in the FileDescriptorProto are always - # fully qualified. + def file_to_generate(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """The .proto files that were explicitly listed on the command-line. The + code generator should generate code only for these files. Each file's + descriptor will be included in proto_file, below. + """ + pass + parameter: typing.Text + """The generator parameter passed on the command-line.""" + @property - def proto_file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: ... - # The version number of protocol compiler. + def proto_file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.descriptor_pb2.FileDescriptorProto]: + """FileDescriptorProtos for all files in files_to_generate and everything + they import. The files will appear in topological order, so each file + appears before any file that imports it. + + protoc guarantees that all proto_files will be written after + the fields above, even though this is not technically guaranteed by the + protobuf wire format. This theoretically could allow a plugin to stream + in the FileDescriptorProtos and handle them one by one rather than read + the entire set into memory at once. However, as of this writing, this + is not similarly optimized on protoc's end -- it will store all fields in + memory at once before sending them to the plugin. + + Type names of fields and extensions in the FileDescriptorProto are always + fully qualified. + """ + pass @property - def compiler_version(self) -> global___Version: ... + def compiler_version(self) -> global___Version: + """The version number of protocol compiler.""" + pass def __init__(self, *, - file_to_generate : typing.Optional[typing.Iterable[typing.Text]] = ..., - parameter : typing.Optional[typing.Text] = ..., - proto_file : typing.Optional[typing.Iterable[google.protobuf.descriptor_pb2.FileDescriptorProto]] = ..., - compiler_version : typing.Optional[global___Version] = ..., + file_to_generate: typing.Optional[typing.Iterable[typing.Text]] = ..., + parameter: typing.Optional[typing.Text] = ..., + proto_file: typing.Optional[typing.Iterable[google.protobuf.descriptor_pb2.FileDescriptorProto]] = ..., + compiler_version: typing.Optional[global___Version] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"compiler_version",b"compiler_version",u"parameter",b"parameter"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"compiler_version",b"compiler_version",u"file_to_generate",b"file_to_generate",u"parameter",b"parameter",u"proto_file",b"proto_file"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["compiler_version",b"compiler_version","parameter",b"parameter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["compiler_version",b"compiler_version","file_to_generate",b"file_to_generate","parameter",b"parameter","proto_file",b"proto_file"]) -> None: ... global___CodeGeneratorRequest = CodeGeneratorRequest -# The plugin writes an encoded CodeGeneratorResponse to stdout. class CodeGeneratorResponse(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # Sync with code_generator.h. + """The plugin writes an encoded CodeGeneratorResponse to stdout.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _Feature: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _FeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CodeGeneratorResponse._Feature.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FEATURE_NONE: CodeGeneratorResponse._Feature.ValueType # 0 + FEATURE_PROTO3_OPTIONAL: CodeGeneratorResponse._Feature.ValueType # 1 class Feature(_Feature, metaclass=_FeatureEnumTypeWrapper): + """Sync with code_generator.h.""" pass - class _Feature: - V = typing.NewType('V', builtins.int) - class _FeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Feature.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - FEATURE_NONE = CodeGeneratorResponse.Feature.V(0) - FEATURE_PROTO3_OPTIONAL = CodeGeneratorResponse.Feature.V(1) - FEATURE_NONE = CodeGeneratorResponse.Feature.V(0) - FEATURE_PROTO3_OPTIONAL = CodeGeneratorResponse.Feature.V(1) + FEATURE_NONE: CodeGeneratorResponse.Feature.ValueType # 0 + FEATURE_PROTO3_OPTIONAL: CodeGeneratorResponse.Feature.ValueType # 1 - # Represents a single generated file. class File(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Represents a single generated file.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int INSERTION_POINT_FIELD_NUMBER: builtins.int CONTENT_FIELD_NUMBER: builtins.int GENERATED_CODE_INFO_FIELD_NUMBER: builtins.int - # The file name, relative to the output directory. The name must not - # contain "." or ".." components and must be relative, not be absolute (so, - # the file cannot lie outside the output directory). "/" must be used as - # the path separator, not "\". - # - # If the name is omitted, the content will be appended to the previous - # file. This allows the generator to break large files into small chunks, - # and allows the generated text to be streamed back to protoc so that large - # files need not reside completely in memory at one time. Note that as of - # this writing protoc does not optimize for this -- it will read the entire - # CodeGeneratorResponse before writing files to disk. - name: typing.Text = ... - # If non-empty, indicates that the named file should already exist, and the - # content here is to be inserted into that file at a defined insertion - # point. This feature allows a code generator to extend the output - # produced by another code generator. The original generator may provide - # insertion points by placing special annotations in the file that look - # like: - # @@protoc_insertion_point(NAME) - # The annotation can have arbitrary text before and after it on the line, - # which allows it to be placed in a comment. NAME should be replaced with - # an identifier naming the point -- this is what other generators will use - # as the insertion_point. Code inserted at this point will be placed - # immediately above the line containing the insertion point (thus multiple - # insertions to the same point will come out in the order they were added). - # The double-@ is intended to make it unlikely that the generated code - # could contain things that look like insertion points by accident. - # - # For example, the C++ code generator places the following line in the - # .pb.h files that it generates: - # // @@protoc_insertion_point(namespace_scope) - # This line appears within the scope of the file's package namespace, but - # outside of any particular class. Another plugin can then specify the - # insertion_point "namespace_scope" to generate additional classes or - # other declarations that should be placed in this scope. - # - # Note that if the line containing the insertion point begins with - # whitespace, the same whitespace will be added to every line of the - # inserted text. This is useful for languages like Python, where - # indentation matters. In these languages, the insertion point comment - # should be indented the same amount as any inserted code will need to be - # in order to work correctly in that context. - # - # The code generator that generates the initial file and the one which - # inserts into it must both run as part of a single invocation of protoc. - # Code generators are executed in the order in which they appear on the - # command line. - # - # If |insertion_point| is present, |name| must also be present. - insertion_point: typing.Text = ... - # The file contents. - content: typing.Text = ... - # Information describing the file content being inserted. If an insertion - # point is used, this information will be appropriately offset and inserted - # into the code generation metadata for the generated files. + name: typing.Text + """The file name, relative to the output directory. The name must not + contain "." or ".." components and must be relative, not be absolute (so, + the file cannot lie outside the output directory). "/" must be used as + the path separator, not "\\". + + If the name is omitted, the content will be appended to the previous + file. This allows the generator to break large files into small chunks, + and allows the generated text to be streamed back to protoc so that large + files need not reside completely in memory at one time. Note that as of + this writing protoc does not optimize for this -- it will read the entire + CodeGeneratorResponse before writing files to disk. + """ + + insertion_point: typing.Text + """If non-empty, indicates that the named file should already exist, and the + content here is to be inserted into that file at a defined insertion + point. This feature allows a code generator to extend the output + produced by another code generator. The original generator may provide + insertion points by placing special annotations in the file that look + like: + @@protoc_insertion_point(NAME) + The annotation can have arbitrary text before and after it on the line, + which allows it to be placed in a comment. NAME should be replaced with + an identifier naming the point -- this is what other generators will use + as the insertion_point. Code inserted at this point will be placed + immediately above the line containing the insertion point (thus multiple + insertions to the same point will come out in the order they were added). + The double-@ is intended to make it unlikely that the generated code + could contain things that look like insertion points by accident. + + For example, the C++ code generator places the following line in the + .pb.h files that it generates: + // @@protoc_insertion_point(namespace_scope) + This line appears within the scope of the file's package namespace, but + outside of any particular class. Another plugin can then specify the + insertion_point "namespace_scope" to generate additional classes or + other declarations that should be placed in this scope. + + Note that if the line containing the insertion point begins with + whitespace, the same whitespace will be added to every line of the + inserted text. This is useful for languages like Python, where + indentation matters. In these languages, the insertion point comment + should be indented the same amount as any inserted code will need to be + in order to work correctly in that context. + + The code generator that generates the initial file and the one which + inserts into it must both run as part of a single invocation of protoc. + Code generators are executed in the order in which they appear on the + command line. + + If |insertion_point| is present, |name| must also be present. + """ + + content: typing.Text + """The file contents.""" + @property - def generated_code_info(self) -> google.protobuf.descriptor_pb2.GeneratedCodeInfo: ... + def generated_code_info(self) -> google.protobuf.descriptor_pb2.GeneratedCodeInfo: + """Information describing the file content being inserted. If an insertion + point is used, this information will be appropriately offset and inserted + into the code generation metadata for the generated files. + """ + pass def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - insertion_point : typing.Optional[typing.Text] = ..., - content : typing.Optional[typing.Text] = ..., - generated_code_info : typing.Optional[google.protobuf.descriptor_pb2.GeneratedCodeInfo] = ..., + name: typing.Optional[typing.Text] = ..., + insertion_point: typing.Optional[typing.Text] = ..., + content: typing.Optional[typing.Text] = ..., + generated_code_info: typing.Optional[google.protobuf.descriptor_pb2.GeneratedCodeInfo] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"content",b"content",u"generated_code_info",b"generated_code_info",u"insertion_point",b"insertion_point",u"name",b"name"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"content",b"content",u"generated_code_info",b"generated_code_info",u"insertion_point",b"insertion_point",u"name",b"name"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["content",b"content","generated_code_info",b"generated_code_info","insertion_point",b"insertion_point","name",b"name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["content",b"content","generated_code_info",b"generated_code_info","insertion_point",b"insertion_point","name",b"name"]) -> None: ... ERROR_FIELD_NUMBER: builtins.int SUPPORTED_FEATURES_FIELD_NUMBER: builtins.int FILE_FIELD_NUMBER: builtins.int - # Error message. If non-empty, code generation failed. The plugin process - # should exit with status code zero even if it reports an error in this way. - # - # This should be used to indicate errors in .proto files which prevent the - # code generator from generating correct code. Errors which indicate a - # problem in protoc itself -- such as the input CodeGeneratorRequest being - # unparseable -- should be reported by writing a message to stderr and - # exiting with a non-zero status code. - error: typing.Text = ... - # A bitmask of supported features that the code generator supports. - # This is a bitwise "or" of values from the Feature enum. - supported_features: builtins.int = ... + error: typing.Text + """Error message. If non-empty, code generation failed. The plugin process + should exit with status code zero even if it reports an error in this way. + + This should be used to indicate errors in .proto files which prevent the + code generator from generating correct code. Errors which indicate a + problem in protoc itself -- such as the input CodeGeneratorRequest being + unparseable -- should be reported by writing a message to stderr and + exiting with a non-zero status code. + """ + + supported_features: builtins.int + """A bitmask of supported features that the code generator supports. + This is a bitwise "or" of values from the Feature enum. + """ + @property def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CodeGeneratorResponse.File]: ... def __init__(self, *, - error : typing.Optional[typing.Text] = ..., - supported_features : typing.Optional[builtins.int] = ..., - file : typing.Optional[typing.Iterable[global___CodeGeneratorResponse.File]] = ..., + error: typing.Optional[typing.Text] = ..., + supported_features: typing.Optional[builtins.int] = ..., + file: typing.Optional[typing.Iterable[global___CodeGeneratorResponse.File]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"error",b"error",u"supported_features",b"supported_features"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"error",b"error",u"file",b"file",u"supported_features",b"supported_features"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["error",b"error","supported_features",b"supported_features"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["error",b"error","file",b"file","supported_features",b"supported_features"]) -> None: ... global___CodeGeneratorResponse = CodeGeneratorResponse diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor.pyi index f79fa775b252..690c312b8868 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor.pyi @@ -42,20 +42,19 @@ class _NestedDescriptorBase(DescriptorBase): serialized_end=..., serialized_options=..., ) -> None: ... - def GetTopLevelContainingType(self): ... def CopyToProto(self, proto): ... class Descriptor(_NestedDescriptorBase): def __new__( cls, - name, - full_name, - filename, - containing_type, - fields, - nested_types, - enum_types, - extensions, + name=..., + full_name=..., + filename=..., + containing_type=..., + fields=..., + nested_types=..., + enum_types=..., + extensions=..., options=..., serialized_options=..., is_extendable=..., @@ -162,9 +161,10 @@ class FieldDescriptor(DescriptorBase): extension_scope, options=..., serialized_options=..., - file=..., has_default_value=..., containing_oneof=..., + json_name=..., + file=..., create_key=..., ): ... name: Any @@ -199,9 +199,10 @@ class FieldDescriptor(DescriptorBase): extension_scope, options=..., serialized_options=..., - file=..., has_default_value=..., containing_oneof=..., + json_name=..., + file=..., create_key=..., ) -> None: ... @staticmethod @@ -267,10 +268,10 @@ class OneofDescriptor: class ServiceDescriptor(_NestedDescriptorBase): def __new__( cls, - name, - full_name, - index, - methods, + name=..., + full_name=..., + index=..., + methods=..., options=..., serialized_options=..., file=..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor_pb2.pyi index 2a8a5b11f2b0..a3e816b9e82c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/descriptor_pb2.pyi @@ -10,25 +10,26 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -# The protocol compiler can output a FileDescriptorSet containing the .proto -# files it parses. class FileDescriptorSet(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """The protocol compiler can output a FileDescriptorSet containing the .proto + files it parses. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor FILE_FIELD_NUMBER: builtins.int @property def file(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FileDescriptorProto]: ... def __init__(self, *, - file : typing.Optional[typing.Iterable[global___FileDescriptorProto]] = ..., + file: typing.Optional[typing.Iterable[global___FileDescriptorProto]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"file",b"file"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file",b"file"]) -> None: ... global___FileDescriptorSet = FileDescriptorSet -# Describes a complete .proto file. class FileDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a complete .proto file.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int PACKAGE_FIELD_NUMBER: builtins.int DEPENDENCY_FIELD_NUMBER: builtins.int @@ -41,23 +42,30 @@ class FileDescriptorProto(google.protobuf.message.Message): OPTIONS_FIELD_NUMBER: builtins.int SOURCE_CODE_INFO_FIELD_NUMBER: builtins.int SYNTAX_FIELD_NUMBER: builtins.int - # file name, relative to root of source tree - name: typing.Text = ... - # e.g. "foo", "foo.bar", etc. - package: typing.Text = ... - # Names of files imported by this file. + name: typing.Text + """file name, relative to root of source tree""" + + package: typing.Text + """e.g. "foo", "foo.bar", etc.""" + @property - def dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... - # Indexes of the public imported files in the dependency list above. + def dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """Names of files imported by this file.""" + pass @property - def public_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - # Indexes of the weak imported files in the dependency list. - # For Google-internal migration only. Do not use. + def public_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Indexes of the public imported files in the dependency list above.""" + pass @property - def weak_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - # All top-level definitions in this file. + def weak_dependency(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Indexes of the weak imported files in the dependency list. + For Google-internal migration only. Do not use. + """ + pass @property - def message_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto]: ... + def message_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto]: + """All top-level definitions in this file.""" + pass @property def enum_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto]: ... @property @@ -66,75 +74,84 @@ class FileDescriptorProto(google.protobuf.message.Message): def extension(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldDescriptorProto]: ... @property def options(self) -> global___FileOptions: ... - # This field contains optional information about the original source code. - # You may safely remove this entire field without harming runtime - # functionality of the descriptors -- the information is needed only by - # development tools. @property - def source_code_info(self) -> global___SourceCodeInfo: ... - # The syntax of the proto file. - # The supported values are "proto2" and "proto3". - syntax: typing.Text = ... + def source_code_info(self) -> global___SourceCodeInfo: + """This field contains optional information about the original source code. + You may safely remove this entire field without harming runtime + functionality of the descriptors -- the information is needed only by + development tools. + """ + pass + syntax: typing.Text + """The syntax of the proto file. + The supported values are "proto2" and "proto3". + """ + def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - package : typing.Optional[typing.Text] = ..., - dependency : typing.Optional[typing.Iterable[typing.Text]] = ..., - public_dependency : typing.Optional[typing.Iterable[builtins.int]] = ..., - weak_dependency : typing.Optional[typing.Iterable[builtins.int]] = ..., - message_type : typing.Optional[typing.Iterable[global___DescriptorProto]] = ..., - enum_type : typing.Optional[typing.Iterable[global___EnumDescriptorProto]] = ..., - service : typing.Optional[typing.Iterable[global___ServiceDescriptorProto]] = ..., - extension : typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., - options : typing.Optional[global___FileOptions] = ..., - source_code_info : typing.Optional[global___SourceCodeInfo] = ..., - syntax : typing.Optional[typing.Text] = ..., + name: typing.Optional[typing.Text] = ..., + package: typing.Optional[typing.Text] = ..., + dependency: typing.Optional[typing.Iterable[typing.Text]] = ..., + public_dependency: typing.Optional[typing.Iterable[builtins.int]] = ..., + weak_dependency: typing.Optional[typing.Iterable[builtins.int]] = ..., + message_type: typing.Optional[typing.Iterable[global___DescriptorProto]] = ..., + enum_type: typing.Optional[typing.Iterable[global___EnumDescriptorProto]] = ..., + service: typing.Optional[typing.Iterable[global___ServiceDescriptorProto]] = ..., + extension: typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., + options: typing.Optional[global___FileOptions] = ..., + source_code_info: typing.Optional[global___SourceCodeInfo] = ..., + syntax: typing.Optional[typing.Text] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options",u"package",b"package",u"source_code_info",b"source_code_info",u"syntax",b"syntax"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"dependency",b"dependency",u"enum_type",b"enum_type",u"extension",b"extension",u"message_type",b"message_type",u"name",b"name",u"options",b"options",u"package",b"package",u"public_dependency",b"public_dependency",u"service",b"service",u"source_code_info",b"source_code_info",u"syntax",b"syntax",u"weak_dependency",b"weak_dependency"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options","package",b"package","source_code_info",b"source_code_info","syntax",b"syntax"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["dependency",b"dependency","enum_type",b"enum_type","extension",b"extension","message_type",b"message_type","name",b"name","options",b"options","package",b"package","public_dependency",b"public_dependency","service",b"service","source_code_info",b"source_code_info","syntax",b"syntax","weak_dependency",b"weak_dependency"]) -> None: ... global___FileDescriptorProto = FileDescriptorProto -# Describes a message type. class DescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a message type.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor class ExtensionRange(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor START_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int - # Inclusive. - start: builtins.int = ... - # Exclusive. - end: builtins.int = ... + start: builtins.int + """Inclusive.""" + + end: builtins.int + """Exclusive.""" + @property def options(self) -> global___ExtensionRangeOptions: ... def __init__(self, *, - start : typing.Optional[builtins.int] = ..., - end : typing.Optional[builtins.int] = ..., - options : typing.Optional[global___ExtensionRangeOptions] = ..., + start: typing.Optional[builtins.int] = ..., + end: typing.Optional[builtins.int] = ..., + options: typing.Optional[global___ExtensionRangeOptions] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"end",b"end",u"options",b"options",u"start",b"start"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"end",b"end",u"options",b"options",u"start",b"start"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end",b"end","options",b"options","start",b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end",b"end","options",b"options","start",b"start"]) -> None: ... - # Range of reserved tag numbers. Reserved tag numbers may not be used by - # fields or extension ranges in the same message. Reserved ranges may - # not overlap. class ReservedRange(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Range of reserved tag numbers. Reserved tag numbers may not be used by + fields or extension ranges in the same message. Reserved ranges may + not overlap. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor START_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int - # Inclusive. - start: builtins.int = ... - # Exclusive. - end: builtins.int = ... + start: builtins.int + """Inclusive.""" + + end: builtins.int + """Exclusive.""" + def __init__(self, *, - start : typing.Optional[builtins.int] = ..., - end : typing.Optional[builtins.int] = ..., + start: typing.Optional[builtins.int] = ..., + end: typing.Optional[builtins.int] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"end",b"end",u"start",b"start"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"end",b"end",u"start",b"start"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end",b"end","start",b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end",b"end","start",b"start"]) -> None: ... NAME_FIELD_NUMBER: builtins.int FIELD_FIELD_NUMBER: builtins.int @@ -146,7 +163,7 @@ class DescriptorProto(google.protobuf.message.Message): OPTIONS_FIELD_NUMBER: builtins.int RESERVED_RANGE_FIELD_NUMBER: builtins.int RESERVED_NAME_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: typing.Text @property def field(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FieldDescriptorProto]: ... @property @@ -163,130 +180,161 @@ class DescriptorProto(google.protobuf.message.Message): def options(self) -> global___MessageOptions: ... @property def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DescriptorProto.ReservedRange]: ... - # Reserved field names, which may not be used by fields in the same message. - # A given name may only be reserved once. @property - def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... + def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """Reserved field names, which may not be used by fields in the same message. + A given name may only be reserved once. + """ + pass def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - field : typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., - extension : typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., - nested_type : typing.Optional[typing.Iterable[global___DescriptorProto]] = ..., - enum_type : typing.Optional[typing.Iterable[global___EnumDescriptorProto]] = ..., - extension_range : typing.Optional[typing.Iterable[global___DescriptorProto.ExtensionRange]] = ..., - oneof_decl : typing.Optional[typing.Iterable[global___OneofDescriptorProto]] = ..., - options : typing.Optional[global___MessageOptions] = ..., - reserved_range : typing.Optional[typing.Iterable[global___DescriptorProto.ReservedRange]] = ..., - reserved_name : typing.Optional[typing.Iterable[typing.Text]] = ..., + name: typing.Optional[typing.Text] = ..., + field: typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., + extension: typing.Optional[typing.Iterable[global___FieldDescriptorProto]] = ..., + nested_type: typing.Optional[typing.Iterable[global___DescriptorProto]] = ..., + enum_type: typing.Optional[typing.Iterable[global___EnumDescriptorProto]] = ..., + extension_range: typing.Optional[typing.Iterable[global___DescriptorProto.ExtensionRange]] = ..., + oneof_decl: typing.Optional[typing.Iterable[global___OneofDescriptorProto]] = ..., + options: typing.Optional[global___MessageOptions] = ..., + reserved_range: typing.Optional[typing.Iterable[global___DescriptorProto.ReservedRange]] = ..., + reserved_name: typing.Optional[typing.Iterable[typing.Text]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"enum_type",b"enum_type",u"extension",b"extension",u"extension_range",b"extension_range",u"field",b"field",u"name",b"name",u"nested_type",b"nested_type",u"oneof_decl",b"oneof_decl",u"options",b"options",u"reserved_name",b"reserved_name",u"reserved_range",b"reserved_range"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["enum_type",b"enum_type","extension",b"extension","extension_range",b"extension_range","field",b"field","name",b"name","nested_type",b"nested_type","oneof_decl",b"oneof_decl","options",b"options","reserved_name",b"reserved_name","reserved_range",b"reserved_range"]) -> None: ... global___DescriptorProto = DescriptorProto class ExtensionRangeOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # The parser stores options it doesn't recognize here. See above. @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___ExtensionRangeOptions = ExtensionRangeOptions -# Describes a field within a message. class FieldDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a field within a message.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _Type: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Type.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TYPE_DOUBLE: FieldDescriptorProto._Type.ValueType # 1 + """0 is reserved for errors. + Order is weird for historical reasons. + """ + + TYPE_FLOAT: FieldDescriptorProto._Type.ValueType # 2 + TYPE_INT64: FieldDescriptorProto._Type.ValueType # 3 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + + TYPE_UINT64: FieldDescriptorProto._Type.ValueType # 4 + TYPE_INT32: FieldDescriptorProto._Type.ValueType # 5 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + + TYPE_FIXED64: FieldDescriptorProto._Type.ValueType # 6 + TYPE_FIXED32: FieldDescriptorProto._Type.ValueType # 7 + TYPE_BOOL: FieldDescriptorProto._Type.ValueType # 8 + TYPE_STRING: FieldDescriptorProto._Type.ValueType # 9 + TYPE_GROUP: FieldDescriptorProto._Type.ValueType # 10 + """Tag-delimited aggregate. + Group type is deprecated and not supported in proto3. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. + """ + + TYPE_MESSAGE: FieldDescriptorProto._Type.ValueType # 11 + """Length-delimited aggregate.""" + + TYPE_BYTES: FieldDescriptorProto._Type.ValueType # 12 + """New in version 2.""" + + TYPE_UINT32: FieldDescriptorProto._Type.ValueType # 13 + TYPE_ENUM: FieldDescriptorProto._Type.ValueType # 14 + TYPE_SFIXED32: FieldDescriptorProto._Type.ValueType # 15 + TYPE_SFIXED64: FieldDescriptorProto._Type.ValueType # 16 + TYPE_SINT32: FieldDescriptorProto._Type.ValueType # 17 + """Uses ZigZag encoding.""" + + TYPE_SINT64: FieldDescriptorProto._Type.ValueType # 18 + """Uses ZigZag encoding.""" + class Type(_Type, metaclass=_TypeEnumTypeWrapper): pass - class _Type: - V = typing.NewType('V', builtins.int) - class _TypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Type.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # 0 is reserved for errors. - # Order is weird for historical reasons. - TYPE_DOUBLE = FieldDescriptorProto.Type.V(1) - TYPE_FLOAT = FieldDescriptorProto.Type.V(2) - # Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if - # negative values are likely. - TYPE_INT64 = FieldDescriptorProto.Type.V(3) - TYPE_UINT64 = FieldDescriptorProto.Type.V(4) - # Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if - # negative values are likely. - TYPE_INT32 = FieldDescriptorProto.Type.V(5) - TYPE_FIXED64 = FieldDescriptorProto.Type.V(6) - TYPE_FIXED32 = FieldDescriptorProto.Type.V(7) - TYPE_BOOL = FieldDescriptorProto.Type.V(8) - TYPE_STRING = FieldDescriptorProto.Type.V(9) - # Tag-delimited aggregate. - # Group type is deprecated and not supported in proto3. However, Proto3 - # implementations should still be able to parse the group wire format and - # treat group fields as unknown fields. - TYPE_GROUP = FieldDescriptorProto.Type.V(10) - # Length-delimited aggregate. - TYPE_MESSAGE = FieldDescriptorProto.Type.V(11) - # New in version 2. - TYPE_BYTES = FieldDescriptorProto.Type.V(12) - TYPE_UINT32 = FieldDescriptorProto.Type.V(13) - TYPE_ENUM = FieldDescriptorProto.Type.V(14) - TYPE_SFIXED32 = FieldDescriptorProto.Type.V(15) - TYPE_SFIXED64 = FieldDescriptorProto.Type.V(16) - # Uses ZigZag encoding. - TYPE_SINT32 = FieldDescriptorProto.Type.V(17) - # Uses ZigZag encoding. - TYPE_SINT64 = FieldDescriptorProto.Type.V(18) - - # 0 is reserved for errors. - # Order is weird for historical reasons. - TYPE_DOUBLE = FieldDescriptorProto.Type.V(1) - TYPE_FLOAT = FieldDescriptorProto.Type.V(2) - # Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if - # negative values are likely. - TYPE_INT64 = FieldDescriptorProto.Type.V(3) - TYPE_UINT64 = FieldDescriptorProto.Type.V(4) - # Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if - # negative values are likely. - TYPE_INT32 = FieldDescriptorProto.Type.V(5) - TYPE_FIXED64 = FieldDescriptorProto.Type.V(6) - TYPE_FIXED32 = FieldDescriptorProto.Type.V(7) - TYPE_BOOL = FieldDescriptorProto.Type.V(8) - TYPE_STRING = FieldDescriptorProto.Type.V(9) - # Tag-delimited aggregate. - # Group type is deprecated and not supported in proto3. However, Proto3 - # implementations should still be able to parse the group wire format and - # treat group fields as unknown fields. - TYPE_GROUP = FieldDescriptorProto.Type.V(10) - # Length-delimited aggregate. - TYPE_MESSAGE = FieldDescriptorProto.Type.V(11) - # New in version 2. - TYPE_BYTES = FieldDescriptorProto.Type.V(12) - TYPE_UINT32 = FieldDescriptorProto.Type.V(13) - TYPE_ENUM = FieldDescriptorProto.Type.V(14) - TYPE_SFIXED32 = FieldDescriptorProto.Type.V(15) - TYPE_SFIXED64 = FieldDescriptorProto.Type.V(16) - # Uses ZigZag encoding. - TYPE_SINT32 = FieldDescriptorProto.Type.V(17) - # Uses ZigZag encoding. - TYPE_SINT64 = FieldDescriptorProto.Type.V(18) + TYPE_DOUBLE: FieldDescriptorProto.Type.ValueType # 1 + """0 is reserved for errors. + Order is weird for historical reasons. + """ + + TYPE_FLOAT: FieldDescriptorProto.Type.ValueType # 2 + TYPE_INT64: FieldDescriptorProto.Type.ValueType # 3 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + + TYPE_UINT64: FieldDescriptorProto.Type.ValueType # 4 + TYPE_INT32: FieldDescriptorProto.Type.ValueType # 5 + """Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + + TYPE_FIXED64: FieldDescriptorProto.Type.ValueType # 6 + TYPE_FIXED32: FieldDescriptorProto.Type.ValueType # 7 + TYPE_BOOL: FieldDescriptorProto.Type.ValueType # 8 + TYPE_STRING: FieldDescriptorProto.Type.ValueType # 9 + TYPE_GROUP: FieldDescriptorProto.Type.ValueType # 10 + """Tag-delimited aggregate. + Group type is deprecated and not supported in proto3. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. + """ + + TYPE_MESSAGE: FieldDescriptorProto.Type.ValueType # 11 + """Length-delimited aggregate.""" + + TYPE_BYTES: FieldDescriptorProto.Type.ValueType # 12 + """New in version 2.""" + + TYPE_UINT32: FieldDescriptorProto.Type.ValueType # 13 + TYPE_ENUM: FieldDescriptorProto.Type.ValueType # 14 + TYPE_SFIXED32: FieldDescriptorProto.Type.ValueType # 15 + TYPE_SFIXED64: FieldDescriptorProto.Type.ValueType # 16 + TYPE_SINT32: FieldDescriptorProto.Type.ValueType # 17 + """Uses ZigZag encoding.""" + + TYPE_SINT64: FieldDescriptorProto.Type.ValueType # 18 + """Uses ZigZag encoding.""" + + + class _Label: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _LabelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldDescriptorProto._Label.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LABEL_OPTIONAL: FieldDescriptorProto._Label.ValueType # 1 + """0 is reserved for errors""" + + LABEL_REQUIRED: FieldDescriptorProto._Label.ValueType # 2 + LABEL_REPEATED: FieldDescriptorProto._Label.ValueType # 3 class Label(_Label, metaclass=_LabelEnumTypeWrapper): pass - class _Label: - V = typing.NewType('V', builtins.int) - class _LabelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Label.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # 0 is reserved for errors - LABEL_OPTIONAL = FieldDescriptorProto.Label.V(1) - LABEL_REQUIRED = FieldDescriptorProto.Label.V(2) - LABEL_REPEATED = FieldDescriptorProto.Label.V(3) - - # 0 is reserved for errors - LABEL_OPTIONAL = FieldDescriptorProto.Label.V(1) - LABEL_REQUIRED = FieldDescriptorProto.Label.V(2) - LABEL_REPEATED = FieldDescriptorProto.Label.V(3) + + LABEL_OPTIONAL: FieldDescriptorProto.Label.ValueType # 1 + """0 is reserved for errors""" + + LABEL_REQUIRED: FieldDescriptorProto.Label.ValueType # 2 + LABEL_REPEATED: FieldDescriptorProto.Label.ValueType # 3 NAME_FIELD_NUMBER: builtins.int NUMBER_FIELD_NUMBER: builtins.int @@ -299,280 +347,315 @@ class FieldDescriptorProto(google.protobuf.message.Message): JSON_NAME_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int PROTO3_OPTIONAL_FIELD_NUMBER: builtins.int - name: typing.Text = ... - number: builtins.int = ... - label: global___FieldDescriptorProto.Label.V = ... - # If type_name is set, this need not be set. If both this and type_name - # are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. - type: global___FieldDescriptorProto.Type.V = ... - # For message and enum types, this is the name of the type. If the name - # starts with a '.', it is fully-qualified. Otherwise, C++-like scoping - # rules are used to find the type (i.e. first the nested types within this - # message are searched, then within the parent, on up to the root - # namespace). - type_name: typing.Text = ... - # For extensions, this is the name of the type being extended. It is - # resolved in the same manner as type_name. - extendee: typing.Text = ... - # For numeric types, contains the original text representation of the value. - # For booleans, "true" or "false". - # For strings, contains the default text contents (not escaped in any way). - # For bytes, contains the C escaped value. All bytes >= 128 are escaped. - # TODO(kenton): Base-64 encode? - default_value: typing.Text = ... - # If set, gives the index of a oneof in the containing type's oneof_decl - # list. This field is a member of that oneof. - oneof_index: builtins.int = ... - # JSON name of this field. The value is set by protocol compiler. If the - # user has set a "json_name" option on this field, that option's value - # will be used. Otherwise, it's deduced from the field's name by converting - # it to camelCase. - json_name: typing.Text = ... + name: typing.Text + number: builtins.int + label: global___FieldDescriptorProto.Label.ValueType + type: global___FieldDescriptorProto.Type.ValueType + """If type_name is set, this need not be set. If both this and type_name + are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + """ + + type_name: typing.Text + """For message and enum types, this is the name of the type. If the name + starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + rules are used to find the type (i.e. first the nested types within this + message are searched, then within the parent, on up to the root + namespace). + """ + + extendee: typing.Text + """For extensions, this is the name of the type being extended. It is + resolved in the same manner as type_name. + """ + + default_value: typing.Text + """For numeric types, contains the original text representation of the value. + For booleans, "true" or "false". + For strings, contains the default text contents (not escaped in any way). + For bytes, contains the C escaped value. All bytes >= 128 are escaped. + TODO(kenton): Base-64 encode? + """ + + oneof_index: builtins.int + """If set, gives the index of a oneof in the containing type's oneof_decl + list. This field is a member of that oneof. + """ + + json_name: typing.Text + """JSON name of this field. The value is set by protocol compiler. If the + user has set a "json_name" option on this field, that option's value + will be used. Otherwise, it's deduced from the field's name by converting + it to camelCase. + """ + @property def options(self) -> global___FieldOptions: ... - # If true, this is a proto3 "optional". When a proto3 field is optional, it - # tracks presence regardless of field type. - # - # When proto3_optional is true, this field must be belong to a oneof to - # signal to old proto3 clients that presence is tracked for this field. This - # oneof is known as a "synthetic" oneof, and this field must be its sole - # member (each proto3 optional field gets its own synthetic oneof). Synthetic - # oneofs exist in the descriptor only, and do not generate any API. Synthetic - # oneofs must be ordered after all "real" oneofs. - # - # For message fields, proto3_optional doesn't create any semantic change, - # since non-repeated message fields always track presence. However it still - # indicates the semantic detail of whether the user wrote "optional" or not. - # This can be useful for round-tripping the .proto file. For consistency we - # give message fields a synthetic oneof also, even though it is not required - # to track presence. This is especially important because the parser can't - # tell if a field is a message or an enum, so it must always create a - # synthetic oneof. - # - # Proto2 optional fields do not set this flag, because they already indicate - # optional with `LABEL_OPTIONAL`. - proto3_optional: builtins.bool = ... + proto3_optional: builtins.bool + """If true, this is a proto3 "optional". When a proto3 field is optional, it + tracks presence regardless of field type. + + When proto3_optional is true, this field must be belong to a oneof to + signal to old proto3 clients that presence is tracked for this field. This + oneof is known as a "synthetic" oneof, and this field must be its sole + member (each proto3 optional field gets its own synthetic oneof). Synthetic + oneofs exist in the descriptor only, and do not generate any API. Synthetic + oneofs must be ordered after all "real" oneofs. + + For message fields, proto3_optional doesn't create any semantic change, + since non-repeated message fields always track presence. However it still + indicates the semantic detail of whether the user wrote "optional" or not. + This can be useful for round-tripping the .proto file. For consistency we + give message fields a synthetic oneof also, even though it is not required + to track presence. This is especially important because the parser can't + tell if a field is a message or an enum, so it must always create a + synthetic oneof. + + Proto2 optional fields do not set this flag, because they already indicate + optional with `LABEL_OPTIONAL`. + """ + def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - number : typing.Optional[builtins.int] = ..., - label : typing.Optional[global___FieldDescriptorProto.Label.V] = ..., - type : typing.Optional[global___FieldDescriptorProto.Type.V] = ..., - type_name : typing.Optional[typing.Text] = ..., - extendee : typing.Optional[typing.Text] = ..., - default_value : typing.Optional[typing.Text] = ..., - oneof_index : typing.Optional[builtins.int] = ..., - json_name : typing.Optional[typing.Text] = ..., - options : typing.Optional[global___FieldOptions] = ..., - proto3_optional : typing.Optional[builtins.bool] = ..., + name: typing.Optional[typing.Text] = ..., + number: typing.Optional[builtins.int] = ..., + label: typing.Optional[global___FieldDescriptorProto.Label.ValueType] = ..., + type: typing.Optional[global___FieldDescriptorProto.Type.ValueType] = ..., + type_name: typing.Optional[typing.Text] = ..., + extendee: typing.Optional[typing.Text] = ..., + default_value: typing.Optional[typing.Text] = ..., + oneof_index: typing.Optional[builtins.int] = ..., + json_name: typing.Optional[typing.Text] = ..., + options: typing.Optional[global___FieldOptions] = ..., + proto3_optional: typing.Optional[builtins.bool] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"default_value",b"default_value",u"extendee",b"extendee",u"json_name",b"json_name",u"label",b"label",u"name",b"name",u"number",b"number",u"oneof_index",b"oneof_index",u"options",b"options",u"proto3_optional",b"proto3_optional",u"type",b"type",u"type_name",b"type_name"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"default_value",b"default_value",u"extendee",b"extendee",u"json_name",b"json_name",u"label",b"label",u"name",b"name",u"number",b"number",u"oneof_index",b"oneof_index",u"options",b"options",u"proto3_optional",b"proto3_optional",u"type",b"type",u"type_name",b"type_name"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["default_value",b"default_value","extendee",b"extendee","json_name",b"json_name","label",b"label","name",b"name","number",b"number","oneof_index",b"oneof_index","options",b"options","proto3_optional",b"proto3_optional","type",b"type","type_name",b"type_name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["default_value",b"default_value","extendee",b"extendee","json_name",b"json_name","label",b"label","name",b"name","number",b"number","oneof_index",b"oneof_index","options",b"options","proto3_optional",b"proto3_optional","type",b"type","type_name",b"type_name"]) -> None: ... global___FieldDescriptorProto = FieldDescriptorProto -# Describes a oneof. class OneofDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a oneof.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: typing.Text @property def options(self) -> global___OneofOptions: ... def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - options : typing.Optional[global___OneofOptions] = ..., + name: typing.Optional[typing.Text] = ..., + options: typing.Optional[global___OneofOptions] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options"]) -> None: ... global___OneofDescriptorProto = OneofDescriptorProto -# Describes an enum type. class EnumDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # Range of reserved numeric values. Reserved values may not be used by - # entries in the same enum. Reserved ranges may not overlap. - # - # Note that this is distinct from DescriptorProto.ReservedRange in that it - # is inclusive such that it can appropriately represent the entire int32 - # domain. + """Describes an enum type.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor class EnumReservedRange(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Range of reserved numeric values. Reserved values may not be used by + entries in the same enum. Reserved ranges may not overlap. + + Note that this is distinct from DescriptorProto.ReservedRange in that it + is inclusive such that it can appropriately represent the entire int32 + domain. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor START_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int - # Inclusive. - start: builtins.int = ... - # Inclusive. - end: builtins.int = ... + start: builtins.int + """Inclusive.""" + + end: builtins.int + """Inclusive.""" + def __init__(self, *, - start : typing.Optional[builtins.int] = ..., - end : typing.Optional[builtins.int] = ..., + start: typing.Optional[builtins.int] = ..., + end: typing.Optional[builtins.int] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"end",b"end",u"start",b"start"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"end",b"end",u"start",b"start"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["end",b"end","start",b"start"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["end",b"end","start",b"start"]) -> None: ... NAME_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int RESERVED_RANGE_FIELD_NUMBER: builtins.int RESERVED_NAME_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: typing.Text @property def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumValueDescriptorProto]: ... @property def options(self) -> global___EnumOptions: ... - # Range of reserved numeric values. Reserved numeric values may not be used - # by enum values in the same enum declaration. Reserved ranges may not - # overlap. @property - def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto.EnumReservedRange]: ... - # Reserved enum value names, which may not be reused. A given name may only - # be reserved once. + def reserved_range(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumDescriptorProto.EnumReservedRange]: + """Range of reserved numeric values. Reserved numeric values may not be used + by enum values in the same enum declaration. Reserved ranges may not + overlap. + """ + pass @property - def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... + def reserved_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """Reserved enum value names, which may not be reused. A given name may only + be reserved once. + """ + pass def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - value : typing.Optional[typing.Iterable[global___EnumValueDescriptorProto]] = ..., - options : typing.Optional[global___EnumOptions] = ..., - reserved_range : typing.Optional[typing.Iterable[global___EnumDescriptorProto.EnumReservedRange]] = ..., - reserved_name : typing.Optional[typing.Iterable[typing.Text]] = ..., + name: typing.Optional[typing.Text] = ..., + value: typing.Optional[typing.Iterable[global___EnumValueDescriptorProto]] = ..., + options: typing.Optional[global___EnumOptions] = ..., + reserved_range: typing.Optional[typing.Iterable[global___EnumDescriptorProto.EnumReservedRange]] = ..., + reserved_name: typing.Optional[typing.Iterable[typing.Text]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options",u"reserved_name",b"reserved_name",u"reserved_range",b"reserved_range",u"value",b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options","reserved_name",b"reserved_name","reserved_range",b"reserved_range","value",b"value"]) -> None: ... global___EnumDescriptorProto = EnumDescriptorProto -# Describes a value within an enum. class EnumValueDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a value within an enum.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int NUMBER_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int - name: typing.Text = ... - number: builtins.int = ... + name: typing.Text + number: builtins.int @property def options(self) -> global___EnumValueOptions: ... def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - number : typing.Optional[builtins.int] = ..., - options : typing.Optional[global___EnumValueOptions] = ..., + name: typing.Optional[typing.Text] = ..., + number: typing.Optional[builtins.int] = ..., + options: typing.Optional[global___EnumValueOptions] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"number",b"number",u"options",b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"number",b"number",u"options",b"options"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","number",b"number","options",b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","number",b"number","options",b"options"]) -> None: ... global___EnumValueDescriptorProto = EnumValueDescriptorProto -# Describes a service. class ServiceDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a service.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int METHOD_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int - name: typing.Text = ... + name: typing.Text @property def method(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MethodDescriptorProto]: ... @property def options(self) -> global___ServiceOptions: ... def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - method : typing.Optional[typing.Iterable[global___MethodDescriptorProto]] = ..., - options : typing.Optional[global___ServiceOptions] = ..., + name: typing.Optional[typing.Text] = ..., + method: typing.Optional[typing.Iterable[global___MethodDescriptorProto]] = ..., + options: typing.Optional[global___ServiceOptions] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"name",b"name",u"options",b"options"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"method",b"method",u"name",b"name",u"options",b"options"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["name",b"name","options",b"options"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["method",b"method","name",b"name","options",b"options"]) -> None: ... global___ServiceDescriptorProto = ServiceDescriptorProto -# Describes a method of a service. class MethodDescriptorProto(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes a method of a service.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int INPUT_TYPE_FIELD_NUMBER: builtins.int OUTPUT_TYPE_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int CLIENT_STREAMING_FIELD_NUMBER: builtins.int SERVER_STREAMING_FIELD_NUMBER: builtins.int - name: typing.Text = ... - # Input and output type names. These are resolved in the same way as - # FieldDescriptorProto.type_name, but must refer to a message type. - input_type: typing.Text = ... - output_type: typing.Text = ... + name: typing.Text + input_type: typing.Text + """Input and output type names. These are resolved in the same way as + FieldDescriptorProto.type_name, but must refer to a message type. + """ + + output_type: typing.Text @property def options(self) -> global___MethodOptions: ... - # Identifies if client streams multiple client messages - client_streaming: builtins.bool = ... - # Identifies if server streams multiple server messages - server_streaming: builtins.bool = ... + client_streaming: builtins.bool + """Identifies if client streams multiple client messages""" + + server_streaming: builtins.bool + """Identifies if server streams multiple server messages""" + def __init__(self, *, - name : typing.Optional[typing.Text] = ..., - input_type : typing.Optional[typing.Text] = ..., - output_type : typing.Optional[typing.Text] = ..., - options : typing.Optional[global___MethodOptions] = ..., - client_streaming : typing.Optional[builtins.bool] = ..., - server_streaming : typing.Optional[builtins.bool] = ..., + name: typing.Optional[typing.Text] = ..., + input_type: typing.Optional[typing.Text] = ..., + output_type: typing.Optional[typing.Text] = ..., + options: typing.Optional[global___MethodOptions] = ..., + client_streaming: typing.Optional[builtins.bool] = ..., + server_streaming: typing.Optional[builtins.bool] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"client_streaming",b"client_streaming",u"input_type",b"input_type",u"name",b"name",u"options",b"options",u"output_type",b"output_type",u"server_streaming",b"server_streaming"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"client_streaming",b"client_streaming",u"input_type",b"input_type",u"name",b"name",u"options",b"options",u"output_type",b"output_type",u"server_streaming",b"server_streaming"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["client_streaming",b"client_streaming","input_type",b"input_type","name",b"name","options",b"options","output_type",b"output_type","server_streaming",b"server_streaming"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["client_streaming",b"client_streaming","input_type",b"input_type","name",b"name","options",b"options","output_type",b"output_type","server_streaming",b"server_streaming"]) -> None: ... global___MethodDescriptorProto = MethodDescriptorProto -# =================================================================== -# Options - -# Each of the definitions above may have "options" attached. These are -# just annotations which may cause code to be generated slightly differently -# or may contain hints for code that manipulates protocol messages. -# -# Clients may define custom options as extensions of the *Options messages. -# These extensions may not yet be known at parsing time, so the parser cannot -# store the values in them. Instead it stores them in a field in the *Options -# message called uninterpreted_option. This field must have the same name -# across all *Options messages. We then use this field to populate the -# extensions when we build a descriptor, at which point all protos have been -# parsed and so all extensions are known. -# -# Extension numbers for custom options may be chosen as follows: -# * For options which will only be used within a single application or -# organization, or for experimental options, use field numbers 50000 -# through 99999. It is up to you to ensure that you do not use the -# same number for multiple options. -# * For options which will be published and used publicly by multiple -# independent entities, e-mail protobuf-global-extension-registry@google.com -# to reserve extension numbers. Simply provide your project name (e.g. -# Objective-C plugin) and your project website (if available) -- there's no -# need to explain how you intend to use them. Usually you only need one -# extension number. You can declare multiple options with only one extension -# number by putting them in a sub-message. See the Custom Options section of -# the docs for examples: -# https://developers.google.com/protocol-buffers/docs/proto#options -# If this turns out to be popular, a web service will be set up -# to automatically assign option numbers. - class FileOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # Generated classes can be optimized for speed or code size. + """=================================================================== + Options + + Each of the definitions above may have "options" attached. These are + just annotations which may cause code to be generated slightly differently + or may contain hints for code that manipulates protocol messages. + + Clients may define custom options as extensions of the *Options messages. + These extensions may not yet be known at parsing time, so the parser cannot + store the values in them. Instead it stores them in a field in the *Options + message called uninterpreted_option. This field must have the same name + across all *Options messages. We then use this field to populate the + extensions when we build a descriptor, at which point all protos have been + parsed and so all extensions are known. + + Extension numbers for custom options may be chosen as follows: + * For options which will only be used within a single application or + organization, or for experimental options, use field numbers 50000 + through 99999. It is up to you to ensure that you do not use the + same number for multiple options. + * For options which will be published and used publicly by multiple + independent entities, e-mail protobuf-global-extension-registry@google.com + to reserve extension numbers. Simply provide your project name (e.g. + Objective-C plugin) and your project website (if available) -- there's no + need to explain how you intend to use them. Usually you only need one + extension number. You can declare multiple options with only one extension + number by putting them in a sub-message. See the Custom Options section of + the docs for examples: + https://developers.google.com/protocol-buffers/docs/proto#options + If this turns out to be popular, a web service will be set up + to automatically assign option numbers. + + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _OptimizeMode: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _OptimizeModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FileOptions._OptimizeMode.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SPEED: FileOptions._OptimizeMode.ValueType # 1 + """Generate complete code for parsing, serialization,""" + + CODE_SIZE: FileOptions._OptimizeMode.ValueType # 2 + """etc. + Use ReflectionOps to implement these methods. + """ + + LITE_RUNTIME: FileOptions._OptimizeMode.ValueType # 3 + """Generate code using MessageLite and the lite runtime.""" + class OptimizeMode(_OptimizeMode, metaclass=_OptimizeModeEnumTypeWrapper): + """Generated classes can be optimized for speed or code size.""" pass - class _OptimizeMode: - V = typing.NewType('V', builtins.int) - class _OptimizeModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_OptimizeMode.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Generate complete code for parsing, serialization, - SPEED = FileOptions.OptimizeMode.V(1) - # etc. - # Use ReflectionOps to implement these methods. - CODE_SIZE = FileOptions.OptimizeMode.V(2) - # Generate code using MessageLite and the lite runtime. - LITE_RUNTIME = FileOptions.OptimizeMode.V(3) - - # Generate complete code for parsing, serialization, - SPEED = FileOptions.OptimizeMode.V(1) - # etc. - # Use ReflectionOps to implement these methods. - CODE_SIZE = FileOptions.OptimizeMode.V(2) - # Generate code using MessageLite and the lite runtime. - LITE_RUNTIME = FileOptions.OptimizeMode.V(3) + + SPEED: FileOptions.OptimizeMode.ValueType # 1 + """Generate complete code for parsing, serialization,""" + + CODE_SIZE: FileOptions.OptimizeMode.ValueType # 2 + """etc. + Use ReflectionOps to implement these methods. + """ + + LITE_RUNTIME: FileOptions.OptimizeMode.ValueType # 3 + """Generate code using MessageLite and the lite runtime.""" + JAVA_PACKAGE_FIELD_NUMBER: builtins.int JAVA_OUTER_CLASSNAME_FIELD_NUMBER: builtins.int @@ -595,228 +678,279 @@ class FileOptions(google.protobuf.message.Message): PHP_METADATA_NAMESPACE_FIELD_NUMBER: builtins.int RUBY_PACKAGE_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Sets the Java package where classes generated from this .proto will be - # placed. By default, the proto package is used, but this is often - # inappropriate because proto packages do not normally start with backwards - # domain names. - java_package: typing.Text = ... - # Controls the name of the wrapper Java class generated for the .proto file. - # That class will always contain the .proto file's getDescriptor() method as - # well as any top-level extensions defined in the .proto file. - # If java_multiple_files is disabled, then all the other classes from the - # .proto file will be nested inside the single wrapper outer class. - java_outer_classname: typing.Text = ... - # If enabled, then the Java code generator will generate a separate .java - # file for each top-level message, enum, and service defined in the .proto - # file. Thus, these types will *not* be nested inside the wrapper class - # named by java_outer_classname. However, the wrapper class will still be - # generated to contain the file's getDescriptor() method as well as any - # top-level extensions defined in the file. - java_multiple_files: builtins.bool = ... - # This option does nothing. - java_generate_equals_and_hash: builtins.bool = ... - # If set true, then the Java2 code generator will generate code that - # throws an exception whenever an attempt is made to assign a non-UTF-8 - # byte sequence to a string field. - # Message reflection will do the same. - # However, an extension field still accepts non-UTF-8 byte sequences. - # This option has no effect on when used with the lite runtime. - java_string_check_utf8: builtins.bool = ... - optimize_for: global___FileOptions.OptimizeMode.V = ... - # Sets the Go package where structs generated from this .proto will be - # placed. If omitted, the Go package will be derived from the following: - # - The basename of the package import path, if provided. - # - Otherwise, the package statement in the .proto file, if present. - # - Otherwise, the basename of the .proto file, without extension. - go_package: typing.Text = ... - # Should generic services be generated in each language? "Generic" services - # are not specific to any particular RPC system. They are generated by the - # main code generators in each language (without additional plugins). - # Generic services were the only kind of service generation supported by - # early versions of google.protobuf. - # - # Generic services are now considered deprecated in favor of using plugins - # that generate code specific to your particular RPC system. Therefore, - # these default to false. Old code which depends on generic services should - # explicitly set them to true. - cc_generic_services: builtins.bool = ... - java_generic_services: builtins.bool = ... - py_generic_services: builtins.bool = ... - php_generic_services: builtins.bool = ... - # Is this file deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for everything in the file, or it will be completely ignored; in the very - # least, this is a formalization for deprecating files. - deprecated: builtins.bool = ... - # Enables the use of arenas for the proto messages in this file. This applies - # only to generated classes for C++. - cc_enable_arenas: builtins.bool = ... - # Sets the objective c class prefix which is prepended to all objective c - # generated classes from this .proto. There is no default. - objc_class_prefix: typing.Text = ... - # Namespace for generated classes; defaults to the package. - csharp_namespace: typing.Text = ... - # By default Swift generators will take the proto package and CamelCase it - # replacing '.' with underscore and use that to prefix the types/symbols - # defined. When this options is provided, they will use this value instead - # to prefix the types/symbols defined. - swift_prefix: typing.Text = ... - # Sets the php class prefix which is prepended to all php generated classes - # from this .proto. Default is empty. - php_class_prefix: typing.Text = ... - # Use this option to change the namespace of php generated classes. Default - # is empty. When this option is empty, the package name will be used for - # determining the namespace. - php_namespace: typing.Text = ... - # Use this option to change the namespace of php generated metadata classes. - # Default is empty. When this option is empty, the proto file name will be - # used for determining the namespace. - php_metadata_namespace: typing.Text = ... - # Use this option to change the package of ruby generated classes. Default - # is empty. When this option is not set, the package name will be used for - # determining the ruby package. - ruby_package: typing.Text = ... - # The parser stores options it doesn't recognize here. - # See the documentation for the "Options" section above. + java_package: typing.Text + """Sets the Java package where classes generated from this .proto will be + placed. By default, the proto package is used, but this is often + inappropriate because proto packages do not normally start with backwards + domain names. + """ + + java_outer_classname: typing.Text + """Controls the name of the wrapper Java class generated for the .proto file. + That class will always contain the .proto file's getDescriptor() method as + well as any top-level extensions defined in the .proto file. + If java_multiple_files is disabled, then all the other classes from the + .proto file will be nested inside the single wrapper outer class. + """ + + java_multiple_files: builtins.bool + """If enabled, then the Java code generator will generate a separate .java + file for each top-level message, enum, and service defined in the .proto + file. Thus, these types will *not* be nested inside the wrapper class + named by java_outer_classname. However, the wrapper class will still be + generated to contain the file's getDescriptor() method as well as any + top-level extensions defined in the file. + """ + + java_generate_equals_and_hash: builtins.bool + """This option does nothing.""" + + java_string_check_utf8: builtins.bool + """If set true, then the Java2 code generator will generate code that + throws an exception whenever an attempt is made to assign a non-UTF-8 + byte sequence to a string field. + Message reflection will do the same. + However, an extension field still accepts non-UTF-8 byte sequences. + This option has no effect on when used with the lite runtime. + """ + + optimize_for: global___FileOptions.OptimizeMode.ValueType + go_package: typing.Text + """Sets the Go package where structs generated from this .proto will be + placed. If omitted, the Go package will be derived from the following: + - The basename of the package import path, if provided. + - Otherwise, the package statement in the .proto file, if present. + - Otherwise, the basename of the .proto file, without extension. + """ + + cc_generic_services: builtins.bool + """Should generic services be generated in each language? "Generic" services + are not specific to any particular RPC system. They are generated by the + main code generators in each language (without additional plugins). + Generic services were the only kind of service generation supported by + early versions of google.protobuf. + + Generic services are now considered deprecated in favor of using plugins + that generate code specific to your particular RPC system. Therefore, + these default to false. Old code which depends on generic services should + explicitly set them to true. + """ + + java_generic_services: builtins.bool + py_generic_services: builtins.bool + php_generic_services: builtins.bool + deprecated: builtins.bool + """Is this file deprecated? + Depending on the target platform, this can emit Deprecated annotations + for everything in the file, or it will be completely ignored; in the very + least, this is a formalization for deprecating files. + """ + + cc_enable_arenas: builtins.bool + """Enables the use of arenas for the proto messages in this file. This applies + only to generated classes for C++. + """ + + objc_class_prefix: typing.Text + """Sets the objective c class prefix which is prepended to all objective c + generated classes from this .proto. There is no default. + """ + + csharp_namespace: typing.Text + """Namespace for generated classes; defaults to the package.""" + + swift_prefix: typing.Text + """By default Swift generators will take the proto package and CamelCase it + replacing '.' with underscore and use that to prefix the types/symbols + defined. When this options is provided, they will use this value instead + to prefix the types/symbols defined. + """ + + php_class_prefix: typing.Text + """Sets the php class prefix which is prepended to all php generated classes + from this .proto. Default is empty. + """ + + php_namespace: typing.Text + """Use this option to change the namespace of php generated classes. Default + is empty. When this option is empty, the package name will be used for + determining the namespace. + """ + + php_metadata_namespace: typing.Text + """Use this option to change the namespace of php generated metadata classes. + Default is empty. When this option is empty, the proto file name will be + used for determining the namespace. + """ + + ruby_package: typing.Text + """Use this option to change the package of ruby generated classes. Default + is empty. When this option is not set, the package name will be used for + determining the ruby package. + """ + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. + See the documentation for the "Options" section above. + """ + pass def __init__(self, *, - java_package : typing.Optional[typing.Text] = ..., - java_outer_classname : typing.Optional[typing.Text] = ..., - java_multiple_files : typing.Optional[builtins.bool] = ..., - java_generate_equals_and_hash : typing.Optional[builtins.bool] = ..., - java_string_check_utf8 : typing.Optional[builtins.bool] = ..., - optimize_for : typing.Optional[global___FileOptions.OptimizeMode.V] = ..., - go_package : typing.Optional[typing.Text] = ..., - cc_generic_services : typing.Optional[builtins.bool] = ..., - java_generic_services : typing.Optional[builtins.bool] = ..., - py_generic_services : typing.Optional[builtins.bool] = ..., - php_generic_services : typing.Optional[builtins.bool] = ..., - deprecated : typing.Optional[builtins.bool] = ..., - cc_enable_arenas : typing.Optional[builtins.bool] = ..., - objc_class_prefix : typing.Optional[typing.Text] = ..., - csharp_namespace : typing.Optional[typing.Text] = ..., - swift_prefix : typing.Optional[typing.Text] = ..., - php_class_prefix : typing.Optional[typing.Text] = ..., - php_namespace : typing.Optional[typing.Text] = ..., - php_metadata_namespace : typing.Optional[typing.Text] = ..., - ruby_package : typing.Optional[typing.Text] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + java_package: typing.Optional[typing.Text] = ..., + java_outer_classname: typing.Optional[typing.Text] = ..., + java_multiple_files: typing.Optional[builtins.bool] = ..., + java_generate_equals_and_hash: typing.Optional[builtins.bool] = ..., + java_string_check_utf8: typing.Optional[builtins.bool] = ..., + optimize_for: typing.Optional[global___FileOptions.OptimizeMode.ValueType] = ..., + go_package: typing.Optional[typing.Text] = ..., + cc_generic_services: typing.Optional[builtins.bool] = ..., + java_generic_services: typing.Optional[builtins.bool] = ..., + py_generic_services: typing.Optional[builtins.bool] = ..., + php_generic_services: typing.Optional[builtins.bool] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + cc_enable_arenas: typing.Optional[builtins.bool] = ..., + objc_class_prefix: typing.Optional[typing.Text] = ..., + csharp_namespace: typing.Optional[typing.Text] = ..., + swift_prefix: typing.Optional[typing.Text] = ..., + php_class_prefix: typing.Optional[typing.Text] = ..., + php_namespace: typing.Optional[typing.Text] = ..., + php_metadata_namespace: typing.Optional[typing.Text] = ..., + ruby_package: typing.Optional[typing.Text] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"cc_enable_arenas",b"cc_enable_arenas",u"cc_generic_services",b"cc_generic_services",u"csharp_namespace",b"csharp_namespace",u"deprecated",b"deprecated",u"go_package",b"go_package",u"java_generate_equals_and_hash",b"java_generate_equals_and_hash",u"java_generic_services",b"java_generic_services",u"java_multiple_files",b"java_multiple_files",u"java_outer_classname",b"java_outer_classname",u"java_package",b"java_package",u"java_string_check_utf8",b"java_string_check_utf8",u"objc_class_prefix",b"objc_class_prefix",u"optimize_for",b"optimize_for",u"php_class_prefix",b"php_class_prefix",u"php_generic_services",b"php_generic_services",u"php_metadata_namespace",b"php_metadata_namespace",u"php_namespace",b"php_namespace",u"py_generic_services",b"py_generic_services",u"ruby_package",b"ruby_package",u"swift_prefix",b"swift_prefix"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"cc_enable_arenas",b"cc_enable_arenas",u"cc_generic_services",b"cc_generic_services",u"csharp_namespace",b"csharp_namespace",u"deprecated",b"deprecated",u"go_package",b"go_package",u"java_generate_equals_and_hash",b"java_generate_equals_and_hash",u"java_generic_services",b"java_generic_services",u"java_multiple_files",b"java_multiple_files",u"java_outer_classname",b"java_outer_classname",u"java_package",b"java_package",u"java_string_check_utf8",b"java_string_check_utf8",u"objc_class_prefix",b"objc_class_prefix",u"optimize_for",b"optimize_for",u"php_class_prefix",b"php_class_prefix",u"php_generic_services",b"php_generic_services",u"php_metadata_namespace",b"php_metadata_namespace",u"php_namespace",b"php_namespace",u"py_generic_services",b"py_generic_services",u"ruby_package",b"ruby_package",u"swift_prefix",b"swift_prefix",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["cc_enable_arenas",b"cc_enable_arenas","cc_generic_services",b"cc_generic_services","csharp_namespace",b"csharp_namespace","deprecated",b"deprecated","go_package",b"go_package","java_generate_equals_and_hash",b"java_generate_equals_and_hash","java_generic_services",b"java_generic_services","java_multiple_files",b"java_multiple_files","java_outer_classname",b"java_outer_classname","java_package",b"java_package","java_string_check_utf8",b"java_string_check_utf8","objc_class_prefix",b"objc_class_prefix","optimize_for",b"optimize_for","php_class_prefix",b"php_class_prefix","php_generic_services",b"php_generic_services","php_metadata_namespace",b"php_metadata_namespace","php_namespace",b"php_namespace","py_generic_services",b"py_generic_services","ruby_package",b"ruby_package","swift_prefix",b"swift_prefix"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["cc_enable_arenas",b"cc_enable_arenas","cc_generic_services",b"cc_generic_services","csharp_namespace",b"csharp_namespace","deprecated",b"deprecated","go_package",b"go_package","java_generate_equals_and_hash",b"java_generate_equals_and_hash","java_generic_services",b"java_generic_services","java_multiple_files",b"java_multiple_files","java_outer_classname",b"java_outer_classname","java_package",b"java_package","java_string_check_utf8",b"java_string_check_utf8","objc_class_prefix",b"objc_class_prefix","optimize_for",b"optimize_for","php_class_prefix",b"php_class_prefix","php_generic_services",b"php_generic_services","php_metadata_namespace",b"php_metadata_namespace","php_namespace",b"php_namespace","py_generic_services",b"py_generic_services","ruby_package",b"ruby_package","swift_prefix",b"swift_prefix","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___FileOptions = FileOptions class MessageOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor MESSAGE_SET_WIRE_FORMAT_FIELD_NUMBER: builtins.int NO_STANDARD_DESCRIPTOR_ACCESSOR_FIELD_NUMBER: builtins.int DEPRECATED_FIELD_NUMBER: builtins.int MAP_ENTRY_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Set true to use the old proto1 MessageSet wire format for extensions. - # This is provided for backwards-compatibility with the MessageSet wire - # format. You should not use this for any other reason: It's less - # efficient, has fewer features, and is more complicated. - # - # The message must be defined exactly as follows: - # message Foo { - # option message_set_wire_format = true; - # extensions 4 to max; - # } - # Note that the message cannot have any defined fields; MessageSets only - # have extensions. - # - # All extensions of your type must be singular messages; e.g. they cannot - # be int32s, enums, or repeated messages. - # - # Because this is an option, the above two restrictions are not enforced by - # the protocol compiler. - message_set_wire_format: builtins.bool = ... - # Disables the generation of the standard "descriptor()" accessor, which can - # conflict with a field of the same name. This is meant to make migration - # from proto1 easier; new code should avoid fields named "descriptor". - no_standard_descriptor_accessor: builtins.bool = ... - # Is this message deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for the message, or it will be completely ignored; in the very least, - # this is a formalization for deprecating messages. - deprecated: builtins.bool = ... - # Whether the message is an automatically generated map entry type for the - # maps field. - # - # For maps fields: - # map map_field = 1; - # The parsed descriptor looks like: - # message MapFieldEntry { - # option map_entry = true; - # optional KeyType key = 1; - # optional ValueType value = 2; - # } - # repeated MapFieldEntry map_field = 1; - # - # Implementations may choose not to generate the map_entry=true message, but - # use a native map in the target language to hold the keys and values. - # The reflection APIs in such implementations still need to work as - # if the field is a repeated message field. - # - # NOTE: Do not set the option in .proto files. Always use the maps syntax - # instead. The option should only be implicitly set by the proto compiler - # parser. - map_entry: builtins.bool = ... - # The parser stores options it doesn't recognize here. See above. + message_set_wire_format: builtins.bool + """Set true to use the old proto1 MessageSet wire format for extensions. + This is provided for backwards-compatibility with the MessageSet wire + format. You should not use this for any other reason: It's less + efficient, has fewer features, and is more complicated. + + The message must be defined exactly as follows: + message Foo { + option message_set_wire_format = true; + extensions 4 to max; + } + Note that the message cannot have any defined fields; MessageSets only + have extensions. + + All extensions of your type must be singular messages; e.g. they cannot + be int32s, enums, or repeated messages. + + Because this is an option, the above two restrictions are not enforced by + the protocol compiler. + """ + + no_standard_descriptor_accessor: builtins.bool + """Disables the generation of the standard "descriptor()" accessor, which can + conflict with a field of the same name. This is meant to make migration + from proto1 easier; new code should avoid fields named "descriptor". + """ + + deprecated: builtins.bool + """Is this message deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the message, or it will be completely ignored; in the very least, + this is a formalization for deprecating messages. + """ + + map_entry: builtins.bool + """Whether the message is an automatically generated map entry type for the + maps field. + + For maps fields: + map map_field = 1; + The parsed descriptor looks like: + message MapFieldEntry { + option map_entry = true; + optional KeyType key = 1; + optional ValueType value = 2; + } + repeated MapFieldEntry map_field = 1; + + Implementations may choose not to generate the map_entry=true message, but + use a native map in the target language to hold the keys and values. + The reflection APIs in such implementations still need to work as + if the field is a repeated message field. + + NOTE: Do not set the option in .proto files. Always use the maps syntax + instead. The option should only be implicitly set by the proto compiler + parser. + """ + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - message_set_wire_format : typing.Optional[builtins.bool] = ..., - no_standard_descriptor_accessor : typing.Optional[builtins.bool] = ..., - deprecated : typing.Optional[builtins.bool] = ..., - map_entry : typing.Optional[builtins.bool] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + message_set_wire_format: typing.Optional[builtins.bool] = ..., + no_standard_descriptor_accessor: typing.Optional[builtins.bool] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + map_entry: typing.Optional[builtins.bool] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"map_entry",b"map_entry",u"message_set_wire_format",b"message_set_wire_format",u"no_standard_descriptor_accessor",b"no_standard_descriptor_accessor"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"map_entry",b"map_entry",u"message_set_wire_format",b"message_set_wire_format",u"no_standard_descriptor_accessor",b"no_standard_descriptor_accessor",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","map_entry",b"map_entry","message_set_wire_format",b"message_set_wire_format","no_standard_descriptor_accessor",b"no_standard_descriptor_accessor"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","map_entry",b"map_entry","message_set_wire_format",b"message_set_wire_format","no_standard_descriptor_accessor",b"no_standard_descriptor_accessor","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___MessageOptions = MessageOptions class FieldOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _CType: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _CTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._CType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + STRING: FieldOptions._CType.ValueType # 0 + """Default mode.""" + + CORD: FieldOptions._CType.ValueType # 1 + STRING_PIECE: FieldOptions._CType.ValueType # 2 class CType(_CType, metaclass=_CTypeEnumTypeWrapper): pass - class _CType: - V = typing.NewType('V', builtins.int) - class _CTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_CType.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Default mode. - STRING = FieldOptions.CType.V(0) - CORD = FieldOptions.CType.V(1) - STRING_PIECE = FieldOptions.CType.V(2) - - # Default mode. - STRING = FieldOptions.CType.V(0) - CORD = FieldOptions.CType.V(1) - STRING_PIECE = FieldOptions.CType.V(2) + + STRING: FieldOptions.CType.ValueType # 0 + """Default mode.""" + + CORD: FieldOptions.CType.ValueType # 1 + STRING_PIECE: FieldOptions.CType.ValueType # 2 + + class _JSType: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _JSTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FieldOptions._JSType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + JS_NORMAL: FieldOptions._JSType.ValueType # 0 + """Use the default type.""" + + JS_STRING: FieldOptions._JSType.ValueType # 1 + """Use JavaScript strings.""" + + JS_NUMBER: FieldOptions._JSType.ValueType # 2 + """Use JavaScript numbers.""" class JSType(_JSType, metaclass=_JSTypeEnumTypeWrapper): pass - class _JSType: - V = typing.NewType('V', builtins.int) - class _JSTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_JSType.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Use the default type. - JS_NORMAL = FieldOptions.JSType.V(0) - # Use JavaScript strings. - JS_STRING = FieldOptions.JSType.V(1) - # Use JavaScript numbers. - JS_NUMBER = FieldOptions.JSType.V(2) - - # Use the default type. - JS_NORMAL = FieldOptions.JSType.V(0) - # Use JavaScript strings. - JS_STRING = FieldOptions.JSType.V(1) - # Use JavaScript numbers. - JS_NUMBER = FieldOptions.JSType.V(2) + + JS_NORMAL: FieldOptions.JSType.ValueType # 0 + """Use the default type.""" + + JS_STRING: FieldOptions.JSType.ValueType # 1 + """Use JavaScript strings.""" + + JS_NUMBER: FieldOptions.JSType.ValueType # 2 + """Use JavaScript numbers.""" + CTYPE_FIELD_NUMBER: builtins.int PACKED_FIELD_NUMBER: builtins.int @@ -825,244 +959,279 @@ class FieldOptions(google.protobuf.message.Message): DEPRECATED_FIELD_NUMBER: builtins.int WEAK_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # The ctype option instructs the C++ code generator to use a different - # representation of the field than it normally would. See the specific - # options below. This option is not yet implemented in the open source - # release -- sorry, we'll try to include it in a future version! - ctype: global___FieldOptions.CType.V = ... - # The packed option can be enabled for repeated primitive fields to enable - # a more efficient representation on the wire. Rather than repeatedly - # writing the tag and type for each element, the entire array is encoded as - # a single length-delimited blob. In proto3, only explicit setting it to - # false will avoid using packed encoding. - packed: builtins.bool = ... - # The jstype option determines the JavaScript type used for values of the - # field. The option is permitted only for 64 bit integral and fixed types - # (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING - # is represented as JavaScript string, which avoids loss of precision that - # can happen when a large value is converted to a floating point JavaScript. - # Specifying JS_NUMBER for the jstype causes the generated JavaScript code to - # use the JavaScript "number" type. The behavior of the default option - # JS_NORMAL is implementation dependent. - # - # This option is an enum to permit additional types to be added, e.g. - # goog.math.Integer. - jstype: global___FieldOptions.JSType.V = ... - # Should this field be parsed lazily? Lazy applies only to message-type - # fields. It means that when the outer message is initially parsed, the - # inner message's contents will not be parsed but instead stored in encoded - # form. The inner message will actually be parsed when it is first accessed. - # - # This is only a hint. Implementations are free to choose whether to use - # eager or lazy parsing regardless of the value of this option. However, - # setting this option true suggests that the protocol author believes that - # using lazy parsing on this field is worth the additional bookkeeping - # overhead typically needed to implement it. - # - # This option does not affect the public interface of any generated code; - # all method signatures remain the same. Furthermore, thread-safety of the - # interface is not affected by this option; const methods remain safe to - # call from multiple threads concurrently, while non-const methods continue - # to require exclusive access. - # - # - # Note that implementations may choose not to check required fields within - # a lazy sub-message. That is, calling IsInitialized() on the outer message - # may return true even if the inner message has missing required fields. - # This is necessary because otherwise the inner message would have to be - # parsed in order to perform the check, defeating the purpose of lazy - # parsing. An implementation which chooses not to check required fields - # must be consistent about it. That is, for any particular sub-message, the - # implementation must either *always* check its required fields, or *never* - # check its required fields, regardless of whether or not the message has - # been parsed. - lazy: builtins.bool = ... - # Is this field deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for accessors, or it will be completely ignored; in the very least, this - # is a formalization for deprecating fields. - deprecated: builtins.bool = ... - # For Google-internal migration only. Do not use. - weak: builtins.bool = ... - # The parser stores options it doesn't recognize here. See above. + ctype: global___FieldOptions.CType.ValueType + """The ctype option instructs the C++ code generator to use a different + representation of the field than it normally would. See the specific + options below. This option is not yet implemented in the open source + release -- sorry, we'll try to include it in a future version! + """ + + packed: builtins.bool + """The packed option can be enabled for repeated primitive fields to enable + a more efficient representation on the wire. Rather than repeatedly + writing the tag and type for each element, the entire array is encoded as + a single length-delimited blob. In proto3, only explicit setting it to + false will avoid using packed encoding. + """ + + jstype: global___FieldOptions.JSType.ValueType + """The jstype option determines the JavaScript type used for values of the + field. The option is permitted only for 64 bit integral and fixed types + (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + is represented as JavaScript string, which avoids loss of precision that + can happen when a large value is converted to a floating point JavaScript. + Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + use the JavaScript "number" type. The behavior of the default option + JS_NORMAL is implementation dependent. + + This option is an enum to permit additional types to be added, e.g. + goog.math.Integer. + """ + + lazy: builtins.bool + """Should this field be parsed lazily? Lazy applies only to message-type + fields. It means that when the outer message is initially parsed, the + inner message's contents will not be parsed but instead stored in encoded + form. The inner message will actually be parsed when it is first accessed. + + This is only a hint. Implementations are free to choose whether to use + eager or lazy parsing regardless of the value of this option. However, + setting this option true suggests that the protocol author believes that + using lazy parsing on this field is worth the additional bookkeeping + overhead typically needed to implement it. + + This option does not affect the public interface of any generated code; + all method signatures remain the same. Furthermore, thread-safety of the + interface is not affected by this option; const methods remain safe to + call from multiple threads concurrently, while non-const methods continue + to require exclusive access. + + + Note that implementations may choose not to check required fields within + a lazy sub-message. That is, calling IsInitialized() on the outer message + may return true even if the inner message has missing required fields. + This is necessary because otherwise the inner message would have to be + parsed in order to perform the check, defeating the purpose of lazy + parsing. An implementation which chooses not to check required fields + must be consistent about it. That is, for any particular sub-message, the + implementation must either *always* check its required fields, or *never* + check its required fields, regardless of whether or not the message has + been parsed. + """ + + deprecated: builtins.bool + """Is this field deprecated? + Depending on the target platform, this can emit Deprecated annotations + for accessors, or it will be completely ignored; in the very least, this + is a formalization for deprecating fields. + """ + + weak: builtins.bool + """For Google-internal migration only. Do not use.""" + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - ctype : typing.Optional[global___FieldOptions.CType.V] = ..., - packed : typing.Optional[builtins.bool] = ..., - jstype : typing.Optional[global___FieldOptions.JSType.V] = ..., - lazy : typing.Optional[builtins.bool] = ..., - deprecated : typing.Optional[builtins.bool] = ..., - weak : typing.Optional[builtins.bool] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + ctype: typing.Optional[global___FieldOptions.CType.ValueType] = ..., + packed: typing.Optional[builtins.bool] = ..., + jstype: typing.Optional[global___FieldOptions.JSType.ValueType] = ..., + lazy: typing.Optional[builtins.bool] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + weak: typing.Optional[builtins.bool] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"ctype",b"ctype",u"deprecated",b"deprecated",u"jstype",b"jstype",u"lazy",b"lazy",u"packed",b"packed",u"weak",b"weak"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"ctype",b"ctype",u"deprecated",b"deprecated",u"jstype",b"jstype",u"lazy",b"lazy",u"packed",b"packed",u"uninterpreted_option",b"uninterpreted_option",u"weak",b"weak"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","weak",b"weak"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","uninterpreted_option",b"uninterpreted_option","weak",b"weak"]) -> None: ... global___FieldOptions = FieldOptions class OneofOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # The parser stores options it doesn't recognize here. See above. @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___OneofOptions = OneofOptions class EnumOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor ALLOW_ALIAS_FIELD_NUMBER: builtins.int DEPRECATED_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Set this option to true to allow mapping different tag names to the same - # value. - allow_alias: builtins.bool = ... - # Is this enum deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for the enum, or it will be completely ignored; in the very least, this - # is a formalization for deprecating enums. - deprecated: builtins.bool = ... - # The parser stores options it doesn't recognize here. See above. + allow_alias: builtins.bool + """Set this option to true to allow mapping different tag names to the same + value. + """ + + deprecated: builtins.bool + """Is this enum deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum, or it will be completely ignored; in the very least, this + is a formalization for deprecating enums. + """ + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - allow_alias : typing.Optional[builtins.bool] = ..., - deprecated : typing.Optional[builtins.bool] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + allow_alias: typing.Optional[builtins.bool] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"allow_alias",b"allow_alias",u"deprecated",b"deprecated"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"allow_alias",b"allow_alias",u"deprecated",b"deprecated",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["allow_alias",b"allow_alias","deprecated",b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["allow_alias",b"allow_alias","deprecated",b"deprecated","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___EnumOptions = EnumOptions class EnumValueOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor DEPRECATED_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Is this enum value deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for the enum value, or it will be completely ignored; in the very least, - # this is a formalization for deprecating enum values. - deprecated: builtins.bool = ... - # The parser stores options it doesn't recognize here. See above. + deprecated: builtins.bool + """Is this enum value deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum value, or it will be completely ignored; in the very least, + this is a formalization for deprecating enum values. + """ + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - deprecated : typing.Optional[builtins.bool] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___EnumValueOptions = EnumValueOptions class ServiceOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor DEPRECATED_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Note: Field numbers 1 through 32 are reserved for Google's internal RPC - # framework. We apologize for hoarding these numbers to ourselves, but - # we were already using them long before we decided to release Protocol - # Buffers. - - # Is this service deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for the service, or it will be completely ignored; in the very least, - # this is a formalization for deprecating services. - deprecated: builtins.bool = ... - # The parser stores options it doesn't recognize here. See above. + deprecated: builtins.bool + """Note: Field numbers 1 through 32 are reserved for Google's internal RPC + framework. We apologize for hoarding these numbers to ourselves, but + we were already using them long before we decided to release Protocol + Buffers. + + Is this service deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the service, or it will be completely ignored; in the very least, + this is a formalization for deprecating services. + """ + @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - deprecated : typing.Optional[builtins.bool] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___ServiceOptions = ServiceOptions class MethodOptions(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # Is this method side-effect-free (or safe in HTTP parlance), or idempotent, - # or neither? HTTP based RPC implementation may choose GET verb for safe - # methods, and PUT verb for idempotent methods instead of the default POST. + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _IdempotencyLevel: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _IdempotencyLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[MethodOptions._IdempotencyLevel.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + IDEMPOTENCY_UNKNOWN: MethodOptions._IdempotencyLevel.ValueType # 0 + NO_SIDE_EFFECTS: MethodOptions._IdempotencyLevel.ValueType # 1 + """implies idempotent""" + + IDEMPOTENT: MethodOptions._IdempotencyLevel.ValueType # 2 + """idempotent, but may have side effects""" + class IdempotencyLevel(_IdempotencyLevel, metaclass=_IdempotencyLevelEnumTypeWrapper): + """Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + or neither? HTTP based RPC implementation may choose GET verb for safe + methods, and PUT verb for idempotent methods instead of the default POST. + """ pass - class _IdempotencyLevel: - V = typing.NewType('V', builtins.int) - class _IdempotencyLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_IdempotencyLevel.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - IDEMPOTENCY_UNKNOWN = MethodOptions.IdempotencyLevel.V(0) - # implies idempotent - NO_SIDE_EFFECTS = MethodOptions.IdempotencyLevel.V(1) - # idempotent, but may have side effects - IDEMPOTENT = MethodOptions.IdempotencyLevel.V(2) - - IDEMPOTENCY_UNKNOWN = MethodOptions.IdempotencyLevel.V(0) - # implies idempotent - NO_SIDE_EFFECTS = MethodOptions.IdempotencyLevel.V(1) - # idempotent, but may have side effects - IDEMPOTENT = MethodOptions.IdempotencyLevel.V(2) + + IDEMPOTENCY_UNKNOWN: MethodOptions.IdempotencyLevel.ValueType # 0 + NO_SIDE_EFFECTS: MethodOptions.IdempotencyLevel.ValueType # 1 + """implies idempotent""" + + IDEMPOTENT: MethodOptions.IdempotencyLevel.ValueType # 2 + """idempotent, but may have side effects""" + DEPRECATED_FIELD_NUMBER: builtins.int IDEMPOTENCY_LEVEL_FIELD_NUMBER: builtins.int UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int - # Note: Field numbers 1 through 32 are reserved for Google's internal RPC - # framework. We apologize for hoarding these numbers to ourselves, but - # we were already using them long before we decided to release Protocol - # Buffers. - - # Is this method deprecated? - # Depending on the target platform, this can emit Deprecated annotations - # for the method, or it will be completely ignored; in the very least, - # this is a formalization for deprecating methods. - deprecated: builtins.bool = ... - idempotency_level: global___MethodOptions.IdempotencyLevel.V = ... - # The parser stores options it doesn't recognize here. See above. + deprecated: builtins.bool + """Note: Field numbers 1 through 32 are reserved for Google's internal RPC + framework. We apologize for hoarding these numbers to ourselves, but + we were already using them long before we decided to release Protocol + Buffers. + + Is this method deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the method, or it will be completely ignored; in the very least, + this is a formalization for deprecating methods. + """ + + idempotency_level: global___MethodOptions.IdempotencyLevel.ValueType @property - def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: ... + def uninterpreted_option(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption]: + """The parser stores options it doesn't recognize here. See above.""" + pass def __init__(self, *, - deprecated : typing.Optional[builtins.bool] = ..., - idempotency_level : typing.Optional[global___MethodOptions.IdempotencyLevel.V] = ..., - uninterpreted_option : typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., + deprecated: typing.Optional[builtins.bool] = ..., + idempotency_level: typing.Optional[global___MethodOptions.IdempotencyLevel.ValueType] = ..., + uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"idempotency_level",b"idempotency_level"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"deprecated",b"deprecated",u"idempotency_level",b"idempotency_level",u"uninterpreted_option",b"uninterpreted_option"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","idempotency_level",b"idempotency_level"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["deprecated",b"deprecated","idempotency_level",b"idempotency_level","uninterpreted_option",b"uninterpreted_option"]) -> None: ... global___MethodOptions = MethodOptions -# A message representing a option the parser does not recognize. This only -# appears in options protos created by the compiler::Parser class. -# DescriptorPool resolves these when building Descriptor objects. Therefore, -# options protos in descriptor objects (e.g. returned by Descriptor::options(), -# or produced by Descriptor::CopyTo()) will never have UninterpretedOptions -# in them. class UninterpretedOption(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # The name of the uninterpreted option. Each string represents a segment in - # a dot-separated name. is_extension is true iff a segment represents an - # extension (denoted with parentheses in options specs in .proto files). - # E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents - # "foo.(bar.baz).qux". + """A message representing a option the parser does not recognize. This only + appears in options protos created by the compiler::Parser class. + DescriptorPool resolves these when building Descriptor objects. Therefore, + options protos in descriptor objects (e.g. returned by Descriptor::options(), + or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + in them. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor class NamePart(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """The name of the uninterpreted option. Each string represents a segment in + a dot-separated name. is_extension is true iff a segment represents an + extension (denoted with parentheses in options specs in .proto files). + E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents + "foo.(bar.baz).qux". + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_PART_FIELD_NUMBER: builtins.int IS_EXTENSION_FIELD_NUMBER: builtins.int - name_part: typing.Text = ... - is_extension: builtins.bool = ... + name_part: typing.Text + is_extension: builtins.bool def __init__(self, *, - name_part : typing.Optional[typing.Text] = ..., - is_extension : typing.Optional[builtins.bool] = ..., + name_part: typing.Optional[typing.Text] = ..., + is_extension: typing.Optional[builtins.bool] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"is_extension",b"is_extension",u"name_part",b"name_part"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"is_extension",b"is_extension",u"name_part",b"name_part"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["is_extension",b"is_extension","name_part",b"name_part"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["is_extension",b"is_extension","name_part",b"name_part"]) -> None: ... NAME_FIELD_NUMBER: builtins.int IDENTIFIER_VALUE_FIELD_NUMBER: builtins.int @@ -1073,231 +1242,252 @@ class UninterpretedOption(google.protobuf.message.Message): AGGREGATE_VALUE_FIELD_NUMBER: builtins.int @property def name(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___UninterpretedOption.NamePart]: ... - # The value of the uninterpreted option, in whatever type the tokenizer - # identified it as during parsing. Exactly one of these should be set. - identifier_value: typing.Text = ... - positive_int_value: builtins.int = ... - negative_int_value: builtins.int = ... - double_value: builtins.float = ... - string_value: builtins.bytes = ... - aggregate_value: typing.Text = ... + identifier_value: typing.Text + """The value of the uninterpreted option, in whatever type the tokenizer + identified it as during parsing. Exactly one of these should be set. + """ + + positive_int_value: builtins.int + negative_int_value: builtins.int + double_value: builtins.float + string_value: builtins.bytes + aggregate_value: typing.Text def __init__(self, *, - name : typing.Optional[typing.Iterable[global___UninterpretedOption.NamePart]] = ..., - identifier_value : typing.Optional[typing.Text] = ..., - positive_int_value : typing.Optional[builtins.int] = ..., - negative_int_value : typing.Optional[builtins.int] = ..., - double_value : typing.Optional[builtins.float] = ..., - string_value : typing.Optional[builtins.bytes] = ..., - aggregate_value : typing.Optional[typing.Text] = ..., + name: typing.Optional[typing.Iterable[global___UninterpretedOption.NamePart]] = ..., + identifier_value: typing.Optional[typing.Text] = ..., + positive_int_value: typing.Optional[builtins.int] = ..., + negative_int_value: typing.Optional[builtins.int] = ..., + double_value: typing.Optional[builtins.float] = ..., + string_value: typing.Optional[builtins.bytes] = ..., + aggregate_value: typing.Optional[typing.Text] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"aggregate_value",b"aggregate_value",u"double_value",b"double_value",u"identifier_value",b"identifier_value",u"negative_int_value",b"negative_int_value",u"positive_int_value",b"positive_int_value",u"string_value",b"string_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"aggregate_value",b"aggregate_value",u"double_value",b"double_value",u"identifier_value",b"identifier_value",u"name",b"name",u"negative_int_value",b"negative_int_value",u"positive_int_value",b"positive_int_value",u"string_value",b"string_value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["aggregate_value",b"aggregate_value","double_value",b"double_value","identifier_value",b"identifier_value","negative_int_value",b"negative_int_value","positive_int_value",b"positive_int_value","string_value",b"string_value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["aggregate_value",b"aggregate_value","double_value",b"double_value","identifier_value",b"identifier_value","name",b"name","negative_int_value",b"negative_int_value","positive_int_value",b"positive_int_value","string_value",b"string_value"]) -> None: ... global___UninterpretedOption = UninterpretedOption -# =================================================================== -# Optional source code info - -# Encapsulates information about the original source file from which a -# FileDescriptorProto was generated. class SourceCodeInfo(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """=================================================================== + Optional source code info + + Encapsulates information about the original source file from which a + FileDescriptorProto was generated. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor class Location(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor PATH_FIELD_NUMBER: builtins.int SPAN_FIELD_NUMBER: builtins.int LEADING_COMMENTS_FIELD_NUMBER: builtins.int TRAILING_COMMENTS_FIELD_NUMBER: builtins.int LEADING_DETACHED_COMMENTS_FIELD_NUMBER: builtins.int - # Identifies which part of the FileDescriptorProto was defined at this - # location. - # - # Each element is a field number or an index. They form a path from - # the root FileDescriptorProto to the place where the definition. For - # example, this path: - # [ 4, 3, 2, 7, 1 ] - # refers to: - # file.message_type(3) // 4, 3 - # .field(7) // 2, 7 - # .name() // 1 - # This is because FileDescriptorProto.message_type has field number 4: - # repeated DescriptorProto message_type = 4; - # and DescriptorProto.field has field number 2: - # repeated FieldDescriptorProto field = 2; - # and FieldDescriptorProto.name has field number 1: - # optional string name = 1; - # - # Thus, the above path gives the location of a field name. If we removed - # the last element: - # [ 4, 3, 2, 7 ] - # this path refers to the whole field declaration (from the beginning - # of the label to the terminating semicolon). @property - def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - # Always has exactly three or four elements: start line, start column, - # end line (optional, otherwise assumed same as start line), end column. - # These are packed into a single field for efficiency. Note that line - # and column numbers are zero-based -- typically you will want to add - # 1 to each before displaying to a user. + def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Identifies which part of the FileDescriptorProto was defined at this + location. + + Each element is a field number or an index. They form a path from + the root FileDescriptorProto to the place where the definition. For + example, this path: + [ 4, 3, 2, 7, 1 ] + refers to: + file.message_type(3) // 4, 3 + .field(7) // 2, 7 + .name() // 1 + This is because FileDescriptorProto.message_type has field number 4: + repeated DescriptorProto message_type = 4; + and DescriptorProto.field has field number 2: + repeated FieldDescriptorProto field = 2; + and FieldDescriptorProto.name has field number 1: + optional string name = 1; + + Thus, the above path gives the location of a field name. If we removed + the last element: + [ 4, 3, 2, 7 ] + this path refers to the whole field declaration (from the beginning + of the label to the terminating semicolon). + """ + pass @property - def span(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - # If this SourceCodeInfo represents a complete declaration, these are any - # comments appearing before and after the declaration which appear to be - # attached to the declaration. - # - # A series of line comments appearing on consecutive lines, with no other - # tokens appearing on those lines, will be treated as a single comment. - # - # leading_detached_comments will keep paragraphs of comments that appear - # before (but not connected to) the current element. Each paragraph, - # separated by empty lines, will be one comment element in the repeated - # field. - # - # Only the comment content is provided; comment markers (e.g. //) are - # stripped out. For block comments, leading whitespace and an asterisk - # will be stripped from the beginning of each line other than the first. - # Newlines are included in the output. - # - # Examples: - # - # optional int32 foo = 1; // Comment attached to foo. - # // Comment attached to bar. - # optional int32 bar = 2; - # - # optional string baz = 3; - # // Comment attached to baz. - # // Another line attached to baz. - # - # // Comment attached to qux. - # // - # // Another line attached to qux. - # optional double qux = 4; - # - # // Detached comment for corge. This is not leading or trailing comments - # // to qux or corge because there are blank lines separating it from - # // both. - # - # // Detached comment for corge paragraph 2. - # - # optional string corge = 5; - # /* Block comment attached - # * to corge. Leading asterisks - # * will be removed. */ - # /* Block comment attached to - # * grault. */ - # optional int32 grault = 6; - # - # // ignored detached comments. - leading_comments: typing.Text = ... - trailing_comments: typing.Text = ... + def span(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Always has exactly three or four elements: start line, start column, + end line (optional, otherwise assumed same as start line), end column. + These are packed into a single field for efficiency. Note that line + and column numbers are zero-based -- typically you will want to add + 1 to each before displaying to a user. + """ + pass + leading_comments: typing.Text + """If this SourceCodeInfo represents a complete declaration, these are any + comments appearing before and after the declaration which appear to be + attached to the declaration. + + A series of line comments appearing on consecutive lines, with no other + tokens appearing on those lines, will be treated as a single comment. + + leading_detached_comments will keep paragraphs of comments that appear + before (but not connected to) the current element. Each paragraph, + separated by empty lines, will be one comment element in the repeated + field. + + Only the comment content is provided; comment markers (e.g. //) are + stripped out. For block comments, leading whitespace and an asterisk + will be stripped from the beginning of each line other than the first. + Newlines are included in the output. + + Examples: + + optional int32 foo = 1; // Comment attached to foo. + // Comment attached to bar. + optional int32 bar = 2; + + optional string baz = 3; + // Comment attached to baz. + // Another line attached to baz. + + // Comment attached to qux. + // + // Another line attached to qux. + optional double qux = 4; + + // Detached comment for corge. This is not leading or trailing comments + // to qux or corge because there are blank lines separating it from + // both. + + // Detached comment for corge paragraph 2. + + optional string corge = 5; + /* Block comment attached + * to corge. Leading asterisks + * will be removed. */ + /* Block comment attached to + * grault. */ + optional int32 grault = 6; + + // ignored detached comments. + """ + + trailing_comments: typing.Text @property def leading_detached_comments(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... def __init__(self, *, - path : typing.Optional[typing.Iterable[builtins.int]] = ..., - span : typing.Optional[typing.Iterable[builtins.int]] = ..., - leading_comments : typing.Optional[typing.Text] = ..., - trailing_comments : typing.Optional[typing.Text] = ..., - leading_detached_comments : typing.Optional[typing.Iterable[typing.Text]] = ..., + path: typing.Optional[typing.Iterable[builtins.int]] = ..., + span: typing.Optional[typing.Iterable[builtins.int]] = ..., + leading_comments: typing.Optional[typing.Text] = ..., + trailing_comments: typing.Optional[typing.Text] = ..., + leading_detached_comments: typing.Optional[typing.Iterable[typing.Text]] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"leading_comments",b"leading_comments",u"trailing_comments",b"trailing_comments"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"leading_comments",b"leading_comments",u"leading_detached_comments",b"leading_detached_comments",u"path",b"path",u"span",b"span",u"trailing_comments",b"trailing_comments"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["leading_comments",b"leading_comments","trailing_comments",b"trailing_comments"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["leading_comments",b"leading_comments","leading_detached_comments",b"leading_detached_comments","path",b"path","span",b"span","trailing_comments",b"trailing_comments"]) -> None: ... LOCATION_FIELD_NUMBER: builtins.int - # A Location identifies a piece of source code in a .proto file which - # corresponds to a particular definition. This information is intended - # to be useful to IDEs, code indexers, documentation generators, and similar - # tools. - # - # For example, say we have a file like: - # message Foo { - # optional string foo = 1; - # } - # Let's look at just the field definition: - # optional string foo = 1; - # ^ ^^ ^^ ^ ^^^ - # a bc de f ghi - # We have the following locations: - # span path represents - # [a,i) [ 4, 0, 2, 0 ] The whole field definition. - # [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). - # [c,d) [ 4, 0, 2, 0, 5 ] The type (string). - # [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). - # [g,h) [ 4, 0, 2, 0, 3 ] The number (1). - # - # Notes: - # - A location may refer to a repeated field itself (i.e. not to any - # particular index within it). This is used whenever a set of elements are - # logically enclosed in a single code segment. For example, an entire - # extend block (possibly containing multiple extension definitions) will - # have an outer location whose path refers to the "extensions" repeated - # field without an index. - # - Multiple locations may have the same path. This happens when a single - # logical declaration is spread out across multiple places. The most - # obvious example is the "extend" block again -- there may be multiple - # extend blocks in the same scope, each of which will have the same path. - # - A location's span is not always a subset of its parent's span. For - # example, the "extendee" of an extension declaration appears at the - # beginning of the "extend" block and is shared by all extensions within - # the block. - # - Just because a location's span is a subset of some other location's span - # does not mean that it is a descendant. For example, a "group" defines - # both a type and a field in a single declaration. Thus, the locations - # corresponding to the type and field and their components will overlap. - # - Code which tries to interpret locations should probably be designed to - # ignore those that it doesn't understand, as more types of locations could - # be recorded in the future. @property - def location(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SourceCodeInfo.Location]: ... + def location(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SourceCodeInfo.Location]: + """A Location identifies a piece of source code in a .proto file which + corresponds to a particular definition. This information is intended + to be useful to IDEs, code indexers, documentation generators, and similar + tools. + + For example, say we have a file like: + message Foo { + optional string foo = 1; + } + Let's look at just the field definition: + optional string foo = 1; + ^ ^^ ^^ ^ ^^^ + a bc de f ghi + We have the following locations: + span path represents + [a,i) [ 4, 0, 2, 0 ] The whole field definition. + [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + + Notes: + - A location may refer to a repeated field itself (i.e. not to any + particular index within it). This is used whenever a set of elements are + logically enclosed in a single code segment. For example, an entire + extend block (possibly containing multiple extension definitions) will + have an outer location whose path refers to the "extensions" repeated + field without an index. + - Multiple locations may have the same path. This happens when a single + logical declaration is spread out across multiple places. The most + obvious example is the "extend" block again -- there may be multiple + extend blocks in the same scope, each of which will have the same path. + - A location's span is not always a subset of its parent's span. For + example, the "extendee" of an extension declaration appears at the + beginning of the "extend" block and is shared by all extensions within + the block. + - Just because a location's span is a subset of some other location's span + does not mean that it is a descendant. For example, a "group" defines + both a type and a field in a single declaration. Thus, the locations + corresponding to the type and field and their components will overlap. + - Code which tries to interpret locations should probably be designed to + ignore those that it doesn't understand, as more types of locations could + be recorded in the future. + """ + pass def __init__(self, *, - location : typing.Optional[typing.Iterable[global___SourceCodeInfo.Location]] = ..., + location: typing.Optional[typing.Iterable[global___SourceCodeInfo.Location]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"location",b"location"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["location",b"location"]) -> None: ... global___SourceCodeInfo = SourceCodeInfo -# Describes the relationship between generated code and its original source -# file. A GeneratedCodeInfo message is associated with only one generated -# source file, but may contain references to different source .proto files. class GeneratedCodeInfo(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Describes the relationship between generated code and its original source + file. A GeneratedCodeInfo message is associated with only one generated + source file, but may contain references to different source .proto files. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor class Annotation(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor PATH_FIELD_NUMBER: builtins.int SOURCE_FILE_FIELD_NUMBER: builtins.int BEGIN_FIELD_NUMBER: builtins.int END_FIELD_NUMBER: builtins.int - # Identifies the element in the original source .proto file. This field - # is formatted the same as SourceCodeInfo.Location.path. @property - def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ... - # Identifies the filesystem path to the original source .proto. - source_file: typing.Text = ... - # Identifies the starting offset in bytes in the generated code - # that relates to the identified object. - begin: builtins.int = ... - # Identifies the ending offset in bytes in the generated code that - # relates to the identified offset. The end offset should be one past - # the last relevant byte (so the length of the text = end - begin). - end: builtins.int = ... + def path(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: + """Identifies the element in the original source .proto file. This field + is formatted the same as SourceCodeInfo.Location.path. + """ + pass + source_file: typing.Text + """Identifies the filesystem path to the original source .proto.""" + + begin: builtins.int + """Identifies the starting offset in bytes in the generated code + that relates to the identified object. + """ + + end: builtins.int + """Identifies the ending offset in bytes in the generated code that + relates to the identified offset. The end offset should be one past + the last relevant byte (so the length of the text = end - begin). + """ + def __init__(self, *, - path : typing.Optional[typing.Iterable[builtins.int]] = ..., - source_file : typing.Optional[typing.Text] = ..., - begin : typing.Optional[builtins.int] = ..., - end : typing.Optional[builtins.int] = ..., + path: typing.Optional[typing.Iterable[builtins.int]] = ..., + source_file: typing.Optional[typing.Text] = ..., + begin: typing.Optional[builtins.int] = ..., + end: typing.Optional[builtins.int] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"begin",b"begin",u"end",b"end",u"source_file",b"source_file"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"begin",b"begin",u"end",b"end",u"path",b"path",u"source_file",b"source_file"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["begin",b"begin","end",b"end","source_file",b"source_file"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["begin",b"begin","end",b"end","path",b"path","source_file",b"source_file"]) -> None: ... ANNOTATION_FIELD_NUMBER: builtins.int - # An Annotation connects some span of text in generated code to an element - # of its generating .proto file. @property - def annotation(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GeneratedCodeInfo.Annotation]: ... + def annotation(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GeneratedCodeInfo.Annotation]: + """An Annotation connects some span of text in generated code to an element + of its generating .proto file. + """ + pass def __init__(self, *, - annotation : typing.Optional[typing.Iterable[global___GeneratedCodeInfo.Annotation]] = ..., + annotation: typing.Optional[typing.Iterable[global___GeneratedCodeInfo.Annotation]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"annotation",b"annotation"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["annotation",b"annotation"]) -> None: ... global___GeneratedCodeInfo = GeneratedCodeInfo diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/duration_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/duration_pb2.pyi index c5773fdf8bcd..07e905033bf3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/duration_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/duration_pb2.pyi @@ -6,87 +6,93 @@ import builtins import google.protobuf.descriptor import google.protobuf.internal.well_known_types import google.protobuf.message +import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -# A Duration represents a signed, fixed-length span of time represented -# as a count of seconds and fractions of seconds at nanosecond -# resolution. It is independent of any calendar and concepts like "day" -# or "month". It is related to Timestamp in that the difference between -# two Timestamp values is a Duration and it can be added or subtracted -# from a Timestamp. Range is approximately +-10,000 years. -# -# # Examples -# -# Example 1: Compute Duration from two Timestamps in pseudo code. -# -# Timestamp start = ...; -# Timestamp end = ...; -# Duration duration = ...; -# -# duration.seconds = end.seconds - start.seconds; -# duration.nanos = end.nanos - start.nanos; -# -# if (duration.seconds < 0 && duration.nanos > 0) { -# duration.seconds += 1; -# duration.nanos -= 1000000000; -# } else if (duration.seconds > 0 && duration.nanos < 0) { -# duration.seconds -= 1; -# duration.nanos += 1000000000; -# } -# -# Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. -# -# Timestamp start = ...; -# Duration duration = ...; -# Timestamp end = ...; -# -# end.seconds = start.seconds + duration.seconds; -# end.nanos = start.nanos + duration.nanos; -# -# if (end.nanos < 0) { -# end.seconds -= 1; -# end.nanos += 1000000000; -# } else if (end.nanos >= 1000000000) { -# end.seconds += 1; -# end.nanos -= 1000000000; -# } -# -# Example 3: Compute Duration from datetime.timedelta in Python. -# -# td = datetime.timedelta(days=3, minutes=10) -# duration = Duration() -# duration.FromTimedelta(td) -# -# # JSON Mapping -# -# In JSON format, the Duration type is encoded as a string rather than an -# object, where the string ends in the suffix "s" (indicating seconds) and -# is preceded by the number of seconds, with nanoseconds expressed as -# fractional seconds. For example, 3 seconds with 0 nanoseconds should be -# encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should -# be expressed in JSON format as "3.000000001s", and 3 seconds and 1 -# microsecond should be expressed in JSON format as "3.000001s". +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + class Duration(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Duration): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """A Duration represents a signed, fixed-length span of time represented + as a count of seconds and fractions of seconds at nanosecond + resolution. It is independent of any calendar and concepts like "day" + or "month". It is related to Timestamp in that the difference between + two Timestamp values is a Duration and it can be added or subtracted + from a Timestamp. Range is approximately +-10,000 years. + + # Examples + + Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + + Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + + Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + + # JSON Mapping + + In JSON format, the Duration type is encoded as a string rather than an + object, where the string ends in the suffix "s" (indicating seconds) and + is preceded by the number of seconds, with nanoseconds expressed as + fractional seconds. For example, 3 seconds with 0 nanoseconds should be + encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + microsecond should be expressed in JSON format as "3.000001s". + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor SECONDS_FIELD_NUMBER: builtins.int NANOS_FIELD_NUMBER: builtins.int - # Signed seconds of the span of time. Must be from -315,576,000,000 - # to +315,576,000,000 inclusive. Note: these bounds are computed from: - # 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - seconds: builtins.int = ... - # Signed fractions of a second at nanosecond resolution of the span - # of time. Durations less than one second are represented with a 0 - # `seconds` field and a positive or negative `nanos` field. For durations - # of one second or more, a non-zero value for the `nanos` field must be - # of the same sign as the `seconds` field. Must be from -999,999,999 - # to +999,999,999 inclusive. - nanos: builtins.int = ... + seconds: builtins.int + """Signed seconds of the span of time. Must be from -315,576,000,000 + to +315,576,000,000 inclusive. Note: these bounds are computed from: + 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + """ + + nanos: builtins.int + """Signed fractions of a second at nanosecond resolution of the span + of time. Durations less than one second are represented with a 0 + `seconds` field and a positive or negative `nanos` field. For durations + of one second or more, a non-zero value for the `nanos` field must be + of the same sign as the `seconds` field. Must be from -999,999,999 + to +999,999,999 inclusive. + """ + def __init__(self, *, - seconds : builtins.int = ..., - nanos : builtins.int = ..., + seconds: typing.Optional[builtins.int] = ..., + nanos: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"nanos",b"nanos",u"seconds",b"seconds"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["nanos",b"nanos","seconds",b"seconds"]) -> None: ... global___Duration = Duration diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/empty_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/empty_pb2.pyi index 1d916e9f7aeb..6615bb96069b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/empty_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/empty_pb2.pyi @@ -5,19 +5,20 @@ isort:skip_file import google.protobuf.descriptor import google.protobuf.message -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -# A generic empty message that you can re-use to avoid defining duplicated -# empty messages in your APIs. A typical example is to use it as the request -# or the response type of an API method. For instance: -# -# service Foo { -# rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); -# } -# -# The JSON representation for `Empty` is empty JSON object `{}`. class Empty(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to use it as the request + or the response type of an API method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + + The JSON representation for `Empty` is empty JSON object `{}`. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor def __init__(self, ) -> None: ... global___Empty = Empty diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/field_mask_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/field_mask_pb2.pyi index efa622a680f8..c14cfe6ab4b0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/field_mask_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/field_mask_pb2.pyi @@ -10,216 +10,218 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -# `FieldMask` represents a set of symbolic field paths, for example: -# -# paths: "f.a" -# paths: "f.b.d" -# -# Here `f` represents a field in some root message, `a` and `b` -# fields in the message found in `f`, and `d` a field found in the -# message in `f.b`. -# -# Field masks are used to specify a subset of fields that should be -# returned by a get operation or modified by an update operation. -# Field masks also have a custom JSON encoding (see below). -# -# # Field Masks in Projections -# -# When used in the context of a projection, a response message or -# sub-message is filtered by the API to only contain those fields as -# specified in the mask. For example, if the mask in the previous -# example is applied to a response message as follows: -# -# f { -# a : 22 -# b { -# d : 1 -# x : 2 -# } -# y : 13 -# } -# z: 8 -# -# The result will not contain specific values for fields x,y and z -# (their value will be set to the default, and omitted in proto text -# output): -# -# -# f { -# a : 22 -# b { -# d : 1 -# } -# } -# -# A repeated field is not allowed except at the last position of a -# paths string. -# -# If a FieldMask object is not present in a get operation, the -# operation applies to all fields (as if a FieldMask of all fields -# had been specified). -# -# Note that a field mask does not necessarily apply to the -# top-level response message. In case of a REST get operation, the -# field mask applies directly to the response, but in case of a REST -# list operation, the mask instead applies to each individual message -# in the returned resource list. In case of a REST custom method, -# other definitions may be used. Where the mask applies will be -# clearly documented together with its declaration in the API. In -# any case, the effect on the returned resource/resources is required -# behavior for APIs. -# -# # Field Masks in Update Operations -# -# A field mask in update operations specifies which fields of the -# targeted resource are going to be updated. The API is required -# to only change the values of the fields as specified in the mask -# and leave the others untouched. If a resource is passed in to -# describe the updated values, the API ignores the values of all -# fields not covered by the mask. -# -# If a repeated field is specified for an update operation, new values will -# be appended to the existing repeated field in the target resource. Note that -# a repeated field is only allowed in the last position of a `paths` string. -# -# If a sub-message is specified in the last position of the field mask for an -# update operation, then new value will be merged into the existing sub-message -# in the target resource. -# -# For example, given the target message: -# -# f { -# b { -# d: 1 -# x: 2 -# } -# c: [1] -# } -# -# And an update message: -# -# f { -# b { -# d: 10 -# } -# c: [2] -# } -# -# then if the field mask is: -# -# paths: ["f.b", "f.c"] -# -# then the result will be: -# -# f { -# b { -# d: 10 -# x: 2 -# } -# c: [1, 2] -# } -# -# An implementation may provide options to override this default behavior for -# repeated and message fields. -# -# In order to reset a field's value to the default, the field must -# be in the mask and set to the default value in the provided resource. -# Hence, in order to reset all fields of a resource, provide a default -# instance of the resource and set all fields in the mask, or do -# not provide a mask as described below. -# -# If a field mask is not present on update, the operation applies to -# all fields (as if a field mask of all fields has been specified). -# Note that in the presence of schema evolution, this may mean that -# fields the client does not know and has therefore not filled into -# the request will be reset to their default. If this is unwanted -# behavior, a specific service may require a client to always specify -# a field mask, producing an error if not. -# -# As with get operations, the location of the resource which -# describes the updated values in the request message depends on the -# operation kind. In any case, the effect of the field mask is -# required to be honored by the API. -# -# ## Considerations for HTTP REST -# -# The HTTP kind of an update operation which uses a field mask must -# be set to PATCH instead of PUT in order to satisfy HTTP semantics -# (PUT must only be used for full updates). -# -# # JSON Encoding of Field Masks -# -# In JSON, a field mask is encoded as a single string where paths are -# separated by a comma. Fields name in each path are converted -# to/from lower-camel naming conventions. -# -# As an example, consider the following message declarations: -# -# message Profile { -# User user = 1; -# Photo photo = 2; -# } -# message User { -# string display_name = 1; -# string address = 2; -# } -# -# In proto a field mask for `Profile` may look as such: -# -# mask { -# paths: "user.display_name" -# paths: "photo" -# } -# -# In JSON, the same mask is represented as below: -# -# { -# mask: "user.displayName,photo" -# } -# -# # Field Masks and Oneof Fields -# -# Field masks treat fields in oneofs just as regular fields. Consider the -# following message: -# -# message SampleMessage { -# oneof test_oneof { -# string name = 4; -# SubMessage sub_message = 9; -# } -# } -# -# The field mask can be: -# -# mask { -# paths: "name" -# } -# -# Or: -# -# mask { -# paths: "sub_message" -# } -# -# Note that oneof type names ("test_oneof" in this case) cannot be used in -# paths. -# -# ## Field Mask Verification -# -# The implementation of any API method which has a FieldMask type field in the -# request should verify the included field paths, and return an -# `INVALID_ARGUMENT` error if any path is unmappable. +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + class FieldMask(google.protobuf.message.Message, google.protobuf.internal.well_known_types.FieldMask): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`FieldMask` represents a set of symbolic field paths, for example: + + paths: "f.a" + paths: "f.b.d" + + Here `f` represents a field in some root message, `a` and `b` + fields in the message found in `f`, and `d` a field found in the + message in `f.b`. + + Field masks are used to specify a subset of fields that should be + returned by a get operation or modified by an update operation. + Field masks also have a custom JSON encoding (see below). + + # Field Masks in Projections + + When used in the context of a projection, a response message or + sub-message is filtered by the API to only contain those fields as + specified in the mask. For example, if the mask in the previous + example is applied to a response message as follows: + + f { + a : 22 + b { + d : 1 + x : 2 + } + y : 13 + } + z: 8 + + The result will not contain specific values for fields x,y and z + (their value will be set to the default, and omitted in proto text + output): + + + f { + a : 22 + b { + d : 1 + } + } + + A repeated field is not allowed except at the last position of a + paths string. + + If a FieldMask object is not present in a get operation, the + operation applies to all fields (as if a FieldMask of all fields + had been specified). + + Note that a field mask does not necessarily apply to the + top-level response message. In case of a REST get operation, the + field mask applies directly to the response, but in case of a REST + list operation, the mask instead applies to each individual message + in the returned resource list. In case of a REST custom method, + other definitions may be used. Where the mask applies will be + clearly documented together with its declaration in the API. In + any case, the effect on the returned resource/resources is required + behavior for APIs. + + # Field Masks in Update Operations + + A field mask in update operations specifies which fields of the + targeted resource are going to be updated. The API is required + to only change the values of the fields as specified in the mask + and leave the others untouched. If a resource is passed in to + describe the updated values, the API ignores the values of all + fields not covered by the mask. + + If a repeated field is specified for an update operation, new values will + be appended to the existing repeated field in the target resource. Note that + a repeated field is only allowed in the last position of a `paths` string. + + If a sub-message is specified in the last position of the field mask for an + update operation, then new value will be merged into the existing sub-message + in the target resource. + + For example, given the target message: + + f { + b { + d: 1 + x: 2 + } + c: [1] + } + + And an update message: + + f { + b { + d: 10 + } + c: [2] + } + + then if the field mask is: + + paths: ["f.b", "f.c"] + + then the result will be: + + f { + b { + d: 10 + x: 2 + } + c: [1, 2] + } + + An implementation may provide options to override this default behavior for + repeated and message fields. + + In order to reset a field's value to the default, the field must + be in the mask and set to the default value in the provided resource. + Hence, in order to reset all fields of a resource, provide a default + instance of the resource and set all fields in the mask, or do + not provide a mask as described below. + + If a field mask is not present on update, the operation applies to + all fields (as if a field mask of all fields has been specified). + Note that in the presence of schema evolution, this may mean that + fields the client does not know and has therefore not filled into + the request will be reset to their default. If this is unwanted + behavior, a specific service may require a client to always specify + a field mask, producing an error if not. + + As with get operations, the location of the resource which + describes the updated values in the request message depends on the + operation kind. In any case, the effect of the field mask is + required to be honored by the API. + + ## Considerations for HTTP REST + + The HTTP kind of an update operation which uses a field mask must + be set to PATCH instead of PUT in order to satisfy HTTP semantics + (PUT must only be used for full updates). + + # JSON Encoding of Field Masks + + In JSON, a field mask is encoded as a single string where paths are + separated by a comma. Fields name in each path are converted + to/from lower-camel naming conventions. + + As an example, consider the following message declarations: + + message Profile { + User user = 1; + Photo photo = 2; + } + message User { + string display_name = 1; + string address = 2; + } + + In proto a field mask for `Profile` may look as such: + + mask { + paths: "user.display_name" + paths: "photo" + } + + In JSON, the same mask is represented as below: + + { + mask: "user.displayName,photo" + } + + # Field Masks and Oneof Fields + + Field masks treat fields in oneofs just as regular fields. Consider the + following message: + + message SampleMessage { + oneof test_oneof { + string name = 4; + SubMessage sub_message = 9; + } + } + + The field mask can be: + + mask { + paths: "name" + } + + Or: + + mask { + paths: "sub_message" + } + + Note that oneof type names ("test_oneof" in this case) cannot be used in + paths. + + ## Field Mask Verification + + The implementation of any API method which has a FieldMask type field in the + request should verify the included field paths, and return an + `INVALID_ARGUMENT` error if any path is unmappable. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor PATHS_FIELD_NUMBER: builtins.int - # The set of field mask paths. @property - def paths(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... + def paths(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """The set of field mask paths.""" + pass def __init__(self, *, - paths : typing.Optional[typing.Iterable[typing.Text]] = ..., + paths: typing.Optional[typing.Iterable[typing.Text]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"paths",b"paths"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["paths",b"paths"]) -> None: ... global___FieldMask = FieldMask diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/api_implementation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/api_implementation.pyi new file mode 100644 index 000000000000..4940124ff9e1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/api_implementation.pyi @@ -0,0 +1,3 @@ +def Type() -> str: ... +def Version() -> int: ... +def IsPythonDefaultSerializationDeterministic() -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi index 5e54051b6afa..1a86d45a1393 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/containers.pyi @@ -1,22 +1,11 @@ -from typing import ( - Any, - Callable, - Iterable, - Iterator, - List, - Mapping as Mapping, - MutableMapping as MutableMapping, - Optional, - Sequence, - Text, - TypeVar, - Union, - overload, -) +from collections.abc import MutableMapping +from typing import Any, Callable, Iterable, Iterator, Optional, Sequence, Text, TypeVar, Union, overload +from typing_extensions import SupportsIndex from google.protobuf.descriptor import Descriptor from google.protobuf.internal.message_listener import MessageListener from google.protobuf.internal.python_message import GeneratedProtocolMessageType +from google.protobuf.internal.type_checkers import _ValueChecker from google.protobuf.message import Message _T = TypeVar("_T") @@ -30,15 +19,14 @@ class BaseContainer(Sequence[_T]): def __len__(self) -> int: ... def __ne__(self, other: object) -> bool: ... def __hash__(self) -> int: ... - def __repr__(self) -> str: ... def sort(self, *, key: Optional[Callable[[_T], Any]] = ..., reverse: bool = ...) -> None: ... @overload - def __getitem__(self, key: int) -> _T: ... + def __getitem__(self, key: SupportsIndex) -> _T: ... @overload - def __getitem__(self, key: slice) -> List[_T]: ... + def __getitem__(self, key: slice) -> list[_T]: ... class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): - def __init__(self, message_listener: MessageListener, message_descriptor: Descriptor) -> None: ... + def __init__(self, message_listener: MessageListener, type_checker: _ValueChecker[_ScalarV]) -> None: ... def append(self, value: _ScalarV) -> None: ... def insert(self, key: int, value: _ScalarV) -> None: ... def extend(self, elem_seq: Optional[Iterable[_ScalarV]]) -> None: ... @@ -49,14 +37,14 @@ class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]): def __setitem__(self, key: int, value: _ScalarV) -> None: ... @overload def __setitem__(self, key: slice, value: Iterable[_ScalarV]) -> None: ... - def __getslice__(self, start: int, stop: int) -> List[_ScalarV]: ... + def __getslice__(self, start: int, stop: int) -> list[_ScalarV]: ... def __setslice__(self, start: int, stop: int, values: Iterable[_ScalarV]) -> None: ... def __delitem__(self, key: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __eq__(self, other: object) -> bool: ... class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): - def __init__(self, message_listener: MessageListener, type_checker: Any) -> None: ... + def __init__(self, message_listener: MessageListener, message_descriptor: Descriptor) -> None: ... def add(self, **kwargs: Any) -> _MessageV: ... def append(self, value: _MessageV) -> None: ... def insert(self, key: int, value: _MessageV) -> None: ... @@ -64,29 +52,51 @@ class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]): def MergeFrom(self: _M, other: _M) -> None: ... def remove(self, elem: _MessageV) -> None: ... def pop(self, key: int = ...) -> _MessageV: ... - def __getslice__(self, start: int, stop: int) -> List[_MessageV]: ... + def __getslice__(self, start: int, stop: int) -> list[_MessageV]: ... def __delitem__(self, key: Union[int, slice]) -> None: ... def __delslice__(self, start: int, stop: int) -> None: ... def __eq__(self, other: object) -> bool: ... class ScalarMap(MutableMapping[_K, _ScalarV]): + def __init__( + self, + message_listener: MessageListener, + key_checker: _ValueChecker[_K], + value_checker: _ValueChecker[_ScalarV], + entry_descriptor: Descriptor, + ) -> None: ... def __setitem__(self, k: _K, v: _ScalarV) -> None: ... def __delitem__(self, v: _K) -> None: ... def __getitem__(self, k: _K) -> _ScalarV: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_K]: ... def __eq__(self, other: object) -> bool: ... + @overload + def get(self, key: _K, default: None = ...) -> _ScalarV: ... + @overload + def get(self, key: _K, default: Union[_ScalarV, _T]) -> Union[_ScalarV, _T]: ... def MergeFrom(self: _M, other: _M): ... def InvalidateIterators(self) -> None: ... def GetEntryClass(self) -> GeneratedProtocolMessageType: ... class MessageMap(MutableMapping[_K, _MessageV]): + def __init__( + self, + message_listener: MessageListener, + message_descriptor: Descriptor, + key_checker: _ValueChecker[_K], + entry_descriptor: Descriptor, + ) -> None: ... def __setitem__(self, k: _K, v: _MessageV) -> None: ... def __delitem__(self, v: _K) -> None: ... def __getitem__(self, k: _K) -> _MessageV: ... def __len__(self) -> int: ... def __iter__(self) -> Iterator[_K]: ... def __eq__(self, other: object) -> bool: ... + @overload + def get(self, key: _K, default: None = ...) -> _MessageV: ... + @overload + def get(self, key: _K, default: Union[_MessageV, _T]) -> Union[_MessageV, _T]: ... def get_or_create(self, key: _K) -> _MessageV: ... def MergeFrom(self: _M, other: _M): ... def InvalidateIterators(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/decoder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/decoder.pyi index 24774ee286eb..db08f0a46654 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/decoder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/decoder.pyi @@ -1,30 +1,61 @@ -from typing import Any +from typing import Any, Callable + +from google.protobuf.descriptor import Descriptor, FieldDescriptor +from google.protobuf.message import Message + +_Decoder = Callable[[str, int, int, Message, dict[FieldDescriptor, Any]], int] +_NewDefault = Callable[[Message], Message] def ReadTag(buffer, pos): ... -def EnumDecoder(field_number, is_repeated, is_packed, key, new_default): ... - -Int32Decoder: Any -Int64Decoder: Any -UInt32Decoder: Any -UInt64Decoder: Any -SInt32Decoder: Any -SInt64Decoder: Any -Fixed32Decoder: Any -Fixed64Decoder: Any -SFixed32Decoder: Any -SFixed64Decoder: Any -FloatDecoder: Any -DoubleDecoder: Any -BoolDecoder: Any - -def StringDecoder(field_number, is_repeated, is_packed, key, new_default): ... -def BytesDecoder(field_number, is_repeated, is_packed, key, new_default): ... -def GroupDecoder(field_number, is_repeated, is_packed, key, new_default): ... -def MessageDecoder(field_number, is_repeated, is_packed, key, new_default): ... - -MESSAGE_SET_ITEM_TAG: Any - -def MessageSetItemDecoder(extensions_by_number): ... -def MapDecoder(field_descriptor, new_default, is_message_map): ... + +Int32Decoder: _Decoder +Int64Decoder: _Decoder +UInt32Decoder: _Decoder +UInt64Decoder: _Decoder +SInt32Decoder: _Decoder +SInt64Decoder: _Decoder +Fixed32Decoder: _Decoder +Fixed64Decoder: _Decoder +SFixed32Decoder: _Decoder +SFixed64Decoder: _Decoder +FloatDecoder: _Decoder +DoubleDecoder: _Decoder +BoolDecoder: _Decoder + +def EnumDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def StringDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def BytesDecoder( + field_number: int, + is_repeated: bool, + is_packed: bool, + key: FieldDescriptor, + new_default: _NewDefault, + clear_if_default: bool = ..., +) -> _Decoder: ... +def GroupDecoder( + field_number: int, is_repeated: bool, is_packed: bool, key: FieldDescriptor, new_default: _NewDefault +) -> _Decoder: ... +def MessageDecoder( + field_number: int, is_repeated: bool, is_packed: bool, key: FieldDescriptor, new_default: _NewDefault +) -> _Decoder: ... + +MESSAGE_SET_ITEM_TAG: bytes + +def MessageSetItemDecoder(descriptor: Descriptor) -> _Decoder: ... +def MapDecoder(field_descriptor, new_default, is_message_map) -> _Decoder: ... SkipField: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/encoder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/encoder.pyi index 7a7923fe5d4b..058d2ff9fa35 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/encoder.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/encoder.pyi @@ -1,34 +1,40 @@ -from typing import Any +from typing import Callable -Int32Sizer: Any -UInt32Sizer: Any -SInt32Sizer: Any -Fixed32Sizer: Any -Fixed64Sizer: Any -BoolSizer: Any +from google.protobuf.descriptor import FieldDescriptor -def StringSizer(field_number, is_repeated, is_packed): ... -def BytesSizer(field_number, is_repeated, is_packed): ... -def GroupSizer(field_number, is_repeated, is_packed): ... -def MessageSizer(field_number, is_repeated, is_packed): ... -def MessageSetItemSizer(field_number): ... -def MapSizer(field_descriptor): ... -def TagBytes(field_number, wire_type): ... +_Sizer = Callable[[int, bool, bool], int] -Int32Encoder: Any -UInt32Encoder: Any -SInt32Encoder: Any -Fixed32Encoder: Any -Fixed64Encoder: Any -SFixed32Encoder: Any -SFixed64Encoder: Any -FloatEncoder: Any -DoubleEncoder: Any +Int32Sizer: _Sizer +UInt32Sizer: _Sizer +SInt32Sizer: _Sizer +Fixed32Sizer: _Sizer +Fixed64Sizer: _Sizer +BoolSizer: _Sizer -def BoolEncoder(field_number, is_repeated, is_packed): ... -def StringEncoder(field_number, is_repeated, is_packed): ... -def BytesEncoder(field_number, is_repeated, is_packed): ... -def GroupEncoder(field_number, is_repeated, is_packed): ... -def MessageEncoder(field_number, is_repeated, is_packed): ... -def MessageSetItemEncoder(field_number): ... -def MapEncoder(field_descriptor): ... +def StringSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def BytesSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def GroupSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def MessageSizer(field_number: int, is_repeated: bool, is_packed: bool) -> _Sizer: ... +def MessageSetItemSizer(field_number: int) -> _Sizer: ... +def MapSizer(field_descriptor: FieldDescriptor, is_message_map: bool) -> _Sizer: ... +def TagBytes(field_number: int, wire_type: int) -> bytes: ... + +_Encoder = Callable[[Callable[[bytes], int], bytes, bool], int] + +Int32Encoder: _Encoder +UInt32Encoder: _Encoder +SInt32Encoder: _Encoder +Fixed32Encoder: _Encoder +Fixed64Encoder: _Encoder +SFixed32Encoder: _Encoder +SFixed64Encoder: _Encoder +FloatEncoder: _Encoder +DoubleEncoder: _Encoder + +def BoolEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def StringEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def BytesEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def GroupEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def MessageEncoder(field_number: int, is_repeated: bool, is_packed: bool) -> _Encoder: ... +def MessageSetItemEncoder(field_number: int) -> _Encoder: ... +def MapEncoder(field_descriptor: FieldDescriptor) -> _Encoder: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi index f85bb54bb817..47f40a972dff 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/enum_type_wrapper.pyi @@ -1,4 +1,4 @@ -from typing import Generic, List, Text, Tuple, TypeVar +from typing import Generic, Text, TypeVar from google.protobuf.descriptor import EnumDescriptor @@ -11,8 +11,8 @@ class _EnumTypeWrapper(Generic[_V]): def __init__(self, enum_type: EnumDescriptor) -> None: ... def Name(self, number: _V) -> str: ... def Value(self, name: Text | bytes) -> _V: ... - def keys(self) -> List[str]: ... - def values(self) -> List[_V]: ... - def items(self) -> List[Tuple[str, _V]]: ... + def keys(self) -> list[str]: ... + def values(self) -> list[_V]: ... + def items(self) -> list[tuple[str, _V]]: ... class EnumTypeWrapper(_EnumTypeWrapper[int]): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/python_message.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/python_message.pyi index 538d70b447ef..0395ff64707c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/python_message.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/python_message.pyi @@ -1 +1,3 @@ -class GeneratedProtocolMessageType(type): ... +class GeneratedProtocolMessageType(type): + def __new__(cls, name, bases, dictionary): ... + def __init__(cls, name, bases, dictionary): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/type_checkers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/type_checkers.pyi new file mode 100644 index 000000000000..fbcc35ebf8b0 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/type_checkers.pyi @@ -0,0 +1,15 @@ +from typing import Generic, Protocol, TypeVar + +_T = TypeVar("_T") + +class _ValueChecker(Protocol[_T]): + def CheckValue(self, proposed_value: _T) -> _T: ... + def DefaultValue(self) -> _T: ... + +class TypeChecker(Generic[_T]): + def __init__(self, *acceptable_types: _T): ... + def CheckValue(self, proposed_value: _T) -> _T: ... + +class TypeCheckerWithDefault(TypeChecker[_T]): + def __init__(self, default_value: _T, *acceptable_types: _T): ... + def DefaultValue(self) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi index 6b6b95a197cf..146907ce31bc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/internal/well_known_types.pyi @@ -1,13 +1,10 @@ from datetime import datetime, timedelta -from typing import Any as tAny, Dict, Optional, Type - -class Error(Exception): ... -class ParseError(Error): ... +from typing import Any as tAny, Optional class Any: type_url: tAny = ... value: tAny = ... - def Pack(self, msg: tAny, type_url_prefix: bytes = ..., deterministic: Optional[tAny] = ...) -> None: ... + def Pack(self, msg: tAny, type_url_prefix: str = ..., deterministic: Optional[tAny] = ...) -> None: ... def Unpack(self, msg: tAny): ... def TypeName(self): ... def Is(self, descriptor: tAny): ... @@ -91,4 +88,4 @@ class ListValue: def add_struct(self): ... def add_list(self): ... -WKTBASES: Dict[str, Type[tAny]] +WKTBASES: dict[str, type[tAny]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/json_format.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/json_format.pyi index 75fda13431a6..6ecc10c520ca 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/json_format.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/json_format.pyi @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Text, TypeVar, Union +from typing import Any, Optional, Text, TypeVar, Union from google.protobuf.descriptor_pool import DescriptorPool from google.protobuf.message import Message @@ -26,7 +26,7 @@ def MessageToDict( use_integers_for_enums: bool = ..., descriptor_pool: Optional[DescriptorPool] = ..., float_precision: Optional[int] = ..., -) -> Dict[Text, Any]: ... +) -> dict[Text, Any]: ... def Parse( text: Union[bytes, Text], message: _MessageT, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi index 8d63932e1621..340b3ac77518 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message.pyi @@ -1,5 +1,5 @@ -import sys -from typing import Any, ByteString, Sequence, Tuple, Type, TypeVar, Union +from _typeshed import Self +from typing import Any, Sequence, TypeVar from .descriptor import Descriptor, FieldDescriptor from .internal.extension_dict import _ExtensionDict, _ExtensionFieldDescriptor @@ -10,33 +10,30 @@ class EncodeError(Error): ... _M = TypeVar("_M", bound=Message) # message type (of self) -if sys.version_info >= (3, 0): - _Serialized = ByteString -else: - _Serialized = Union[bytes, buffer, unicode] - class Message: DESCRIPTOR: Descriptor - def __deepcopy__(self, memo=...): ... + def __deepcopy__(self: Self, memo: Any = ...) -> Self: ... def __eq__(self, other_msg): ... def __ne__(self, other_msg): ... - def MergeFrom(self: _M, other_msg: _M) -> None: ... - def CopyFrom(self: _M, other_msg: _M) -> None: ... + def MergeFrom(self: Self, other_msg: Self) -> None: ... + def CopyFrom(self: Self, other_msg: Self) -> None: ... def Clear(self) -> None: ... def SetInParent(self) -> None: ... def IsInitialized(self) -> bool: ... - def MergeFromString(self, serialized: _Serialized) -> int: ... - def ParseFromString(self, serialized: _Serialized) -> int: ... + def MergeFromString(self, serialized: bytes) -> int: ... + def ParseFromString(self, serialized: bytes) -> int: ... def SerializeToString(self, deterministic: bool = ...) -> bytes: ... def SerializePartialToString(self, deterministic: bool = ...) -> bytes: ... - def ListFields(self) -> Sequence[Tuple[FieldDescriptor, Any]]: ... + def ListFields(self) -> Sequence[tuple[FieldDescriptor, Any]]: ... + # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `HasExtension` & `ClearExtension` def HasExtension(self: _M, extension_handle: _ExtensionFieldDescriptor[_M, Any]) -> bool: ... def ClearExtension(self: _M, extension_handle: _ExtensionFieldDescriptor[_M, Any]) -> None: ... - def ByteSize(self) -> int: ... - @classmethod - def FromString(cls: Type[_M], s: _Serialized) -> _M: ... + # The TypeVar must be bound to `Message` or we get mypy errors, so we cannot use `Self` for `Extensions` @property def Extensions(self: _M) -> _ExtensionDict[_M]: ... + def ByteSize(self) -> int: ... + @classmethod + def FromString(cls: type[Self], s: bytes) -> Self: ... # Intentionally left out typing on these three methods, because they are # stringly typed and it is not useful to call them on a Message directly. # We prefer more specific typing on individual subclasses of Message @@ -45,4 +42,4 @@ class Message: def ClearField(self, field_name: Any) -> None: ... def WhichOneof(self, oneof_group: Any) -> Any: ... # TODO: check kwargs - def __init__(self, **kwargs) -> None: ... + def __init__(self, *args, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message_factory.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message_factory.pyi index d8a42d30bc65..ae299d8aeb69 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message_factory.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/message_factory.pyi @@ -1,4 +1,4 @@ -from typing import Any, Dict, Iterable, Optional, Type +from typing import Any, Iterable, Optional from google.protobuf.descriptor import Descriptor from google.protobuf.descriptor_pb2 import FileDescriptorProto @@ -8,7 +8,7 @@ from google.protobuf.message import Message class MessageFactory: pool: Any def __init__(self, pool: Optional[DescriptorPool] = ...) -> None: ... - def GetPrototype(self, descriptor: Descriptor) -> Type[Message]: ... - def GetMessages(self, files: Iterable[str]) -> Dict[str, Type[Message]]: ... + def GetPrototype(self, descriptor: Descriptor) -> type[Message]: ... + def GetMessages(self, files: Iterable[str]) -> dict[str, type[Message]]: ... -def GetMessages(file_protos: Iterable[FileDescriptorProto]) -> Dict[str, Type[Message]]: ... +def GetMessages(file_protos: Iterable[FileDescriptorProto]) -> dict[str, type[Message]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/service.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/service.pyi index 4874d5356ded..7f7a762ac9fa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/service.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/service.pyi @@ -1,5 +1,5 @@ from concurrent.futures import Future -from typing import Callable, Optional, Text, Type +from typing import Callable, Optional, Text from google.protobuf.descriptor import MethodDescriptor, ServiceDescriptor from google.protobuf.message import Message @@ -16,8 +16,8 @@ class Service: request: Message, done: Optional[Callable[[Message], None]], ) -> Optional[Future[Message]]: ... - def GetRequestClass(self, method_descriptor: MethodDescriptor) -> Type[Message]: ... - def GetResponseClass(self, method_descriptor: MethodDescriptor) -> Type[Message]: ... + def GetRequestClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... + def GetResponseClass(self, method_descriptor: MethodDescriptor) -> type[Message]: ... class RpcController: def Reset(self) -> None: ... @@ -34,6 +34,6 @@ class RpcChannel: method_descriptor: MethodDescriptor, rpc_controller: RpcController, request: Message, - response_class: Type[Message], + response_class: type[Message], done: Optional[Callable[[Message], None]], ) -> Optional[Future[Message]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/source_context_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/source_context_pb2.pyi index 07e1a4cf3936..d2f3e8db9b5c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/source_context_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/source_context_pb2.pyi @@ -8,19 +8,22 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -# `SourceContext` represents information about the source of a -# protobuf element, like the file in which it is defined. class SourceContext(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`SourceContext` represents information about the source of a + protobuf element, like the file in which it is defined. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor FILE_NAME_FIELD_NUMBER: builtins.int - # The path-qualified name of the .proto file that contained the associated - # protobuf element. For example: `"google/protobuf/source_context.proto"`. - file_name: typing.Text = ... + file_name: typing.Text + """The path-qualified name of the .proto file that contained the associated + protobuf element. For example: `"google/protobuf/source_context.proto"`. + """ + def __init__(self, *, - file_name : typing.Text = ..., + file_name: typing.Optional[typing.Text] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"file_name",b"file_name"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["file_name",b"file_name"]) -> None: ... global___SourceContext = SourceContext diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/struct_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/struct_pb2.pyi index d6f93a3d179b..e1377c07493f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/struct_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/struct_pb2.pyi @@ -11,116 +11,131 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _NullValue: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _NullValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_NullValue.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + NULL_VALUE: _NullValue.ValueType # 0 + """Null value.""" -# `NullValue` is a singleton enumeration to represent the null value for the -# `Value` type union. -# -# The JSON representation for `NullValue` is JSON `null`. class NullValue(_NullValue, metaclass=_NullValueEnumTypeWrapper): + """`NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + """ pass -class _NullValue: - V = typing.NewType('V', builtins.int) -class _NullValueEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_NullValue.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Null value. - NULL_VALUE = NullValue.V(0) - -# Null value. -NULL_VALUE = NullValue.V(0) + +NULL_VALUE: NullValue.ValueType # 0 +"""Null value.""" + global___NullValue = NullValue -# `Struct` represents a structured data value, consisting of fields -# which map to dynamically typed values. In some languages, `Struct` -# might be supported by a native representation. For example, in -# scripting languages like JS a struct is represented as an -# object. The details of that representation are described together -# with the proto support for the language. -# -# The JSON representation for `Struct` is JSON object. class Struct(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Struct): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`Struct` represents a structured data value, consisting of fields + which map to dynamically typed values. In some languages, `Struct` + might be supported by a native representation. For example, in + scripting languages like JS a struct is represented as an + object. The details of that representation are described together + with the proto support for the language. + + The JSON representation for `Struct` is JSON object. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor class FieldsEntry(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + DESCRIPTOR: google.protobuf.descriptor.Descriptor KEY_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int - key: typing.Text = ... + key: typing.Text @property def value(self) -> global___Value: ... def __init__(self, *, - key : typing.Text = ..., - value : typing.Optional[global___Value] = ..., + key: typing.Optional[typing.Text] = ..., + value: typing.Optional[global___Value] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ... FIELDS_FIELD_NUMBER: builtins.int - # Unordered map of dynamically typed values. @property - def fields(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, global___Value]: ... + def fields(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, global___Value]: + """Unordered map of dynamically typed values.""" + pass def __init__(self, *, - fields : typing.Optional[typing.Mapping[typing.Text, global___Value]] = ..., + fields: typing.Optional[typing.Mapping[typing.Text, global___Value]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"fields",b"fields"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["fields",b"fields"]) -> None: ... global___Struct = Struct -# `Value` represents a dynamically typed value which can be either -# null, a number, a string, a boolean, a recursive struct value, or a -# list of values. A producer of value is expected to set one of that -# variants, absence of any variant indicates an error. -# -# The JSON representation for `Value` is JSON value. class Value(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`Value` represents a dynamically typed value which can be either + null, a number, a string, a boolean, a recursive struct value, or a + list of values. A producer of value is expected to set one of these + variants. Absence of any variant indicates an error. + + The JSON representation for `Value` is JSON value. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor NULL_VALUE_FIELD_NUMBER: builtins.int NUMBER_VALUE_FIELD_NUMBER: builtins.int STRING_VALUE_FIELD_NUMBER: builtins.int BOOL_VALUE_FIELD_NUMBER: builtins.int STRUCT_VALUE_FIELD_NUMBER: builtins.int LIST_VALUE_FIELD_NUMBER: builtins.int - # Represents a null value. - null_value: global___NullValue.V = ... - # Represents a double value. - number_value: builtins.float = ... - # Represents a string value. - string_value: typing.Text = ... - # Represents a boolean value. - bool_value: builtins.bool = ... - # Represents a structured value. + null_value: global___NullValue.ValueType + """Represents a null value.""" + + number_value: builtins.float + """Represents a double value.""" + + string_value: typing.Text + """Represents a string value.""" + + bool_value: builtins.bool + """Represents a boolean value.""" + @property - def struct_value(self) -> global___Struct: ... - # Represents a repeated `Value`. + def struct_value(self) -> global___Struct: + """Represents a structured value.""" + pass @property - def list_value(self) -> global___ListValue: ... + def list_value(self) -> global___ListValue: + """Represents a repeated `Value`.""" + pass def __init__(self, *, - null_value : global___NullValue.V = ..., - number_value : builtins.float = ..., - string_value : typing.Text = ..., - bool_value : builtins.bool = ..., - struct_value : typing.Optional[global___Struct] = ..., - list_value : typing.Optional[global___ListValue] = ..., + null_value: typing.Optional[global___NullValue.ValueType] = ..., + number_value: typing.Optional[builtins.float] = ..., + string_value: typing.Optional[typing.Text] = ..., + bool_value: typing.Optional[builtins.bool] = ..., + struct_value: typing.Optional[global___Struct] = ..., + list_value: typing.Optional[global___ListValue] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"bool_value",b"bool_value",u"kind",b"kind",u"list_value",b"list_value",u"null_value",b"null_value",u"number_value",b"number_value",u"string_value",b"string_value",u"struct_value",b"struct_value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"bool_value",b"bool_value",u"kind",b"kind",u"list_value",b"list_value",u"null_value",b"null_value",u"number_value",b"number_value",u"string_value",b"string_value",u"struct_value",b"struct_value"]) -> None: ... - def WhichOneof(self, oneof_group: typing_extensions.Literal[u"kind",b"kind"]) -> typing.Optional[typing_extensions.Literal["null_value","number_value","string_value","bool_value","struct_value","list_value"]]: ... + def HasField(self, field_name: typing_extensions.Literal["bool_value",b"bool_value","kind",b"kind","list_value",b"list_value","null_value",b"null_value","number_value",b"number_value","string_value",b"string_value","struct_value",b"struct_value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["bool_value",b"bool_value","kind",b"kind","list_value",b"list_value","null_value",b"null_value","number_value",b"number_value","string_value",b"string_value","struct_value",b"struct_value"]) -> None: ... + def WhichOneof(self, oneof_group: typing_extensions.Literal["kind",b"kind"]) -> typing.Optional[typing_extensions.Literal["null_value","number_value","string_value","bool_value","struct_value","list_value"]]: ... global___Value = Value -# `ListValue` is a wrapper around a repeated field of values. -# -# The JSON representation for `ListValue` is JSON array. class ListValue(google.protobuf.message.Message, google.protobuf.internal.well_known_types.ListValue): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """`ListValue` is a wrapper around a repeated field of values. + + The JSON representation for `ListValue` is JSON array. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUES_FIELD_NUMBER: builtins.int - # Repeated field of dynamically typed values. @property - def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: ... + def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Value]: + """Repeated field of dynamically typed values.""" + pass def __init__(self, *, - values : typing.Optional[typing.Iterable[global___Value]] = ..., + values: typing.Optional[typing.Iterable[global___Value]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"values",b"values"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ... global___ListValue = ListValue diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/symbol_database.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/symbol_database.pyi index 09e32e9de2f7..0644a9be82f3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/symbol_database.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/symbol_database.pyi @@ -1,16 +1,16 @@ -from typing import Dict, Iterable, Type, Union +from typing import Iterable, Union from google.protobuf.descriptor import Descriptor, EnumDescriptor, FileDescriptor, ServiceDescriptor from google.protobuf.message import Message from google.protobuf.message_factory import MessageFactory class SymbolDatabase(MessageFactory): - def RegisterMessage(self, message: Union[Type[Message], Message]) -> Union[Type[Message], Message]: ... + def RegisterMessage(self, message: Union[type[Message], Message]) -> Union[type[Message], Message]: ... def RegisterMessageDescriptor(self, message_descriptor: Descriptor) -> None: ... def RegisterEnumDescriptor(self, enum_descriptor: EnumDescriptor) -> EnumDescriptor: ... def RegisterServiceDescriptor(self, service_descriptor: ServiceDescriptor) -> None: ... def RegisterFileDescriptor(self, file_descriptor: FileDescriptor) -> None: ... - def GetSymbol(self, symbol: str) -> Type[Message]: ... - def GetMessages(self, files: Iterable[str]) -> Dict[str, Type[Message]]: ... + def GetSymbol(self, symbol: str) -> type[Message]: ... + def GetMessages(self, files: Iterable[str]) -> dict[str, type[Message]]: ... def Default(): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/text_format.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/text_format.pyi index 7fe57b6429f3..f70959d02b47 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/text_format.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/text_format.pyi @@ -1,14 +1,10 @@ -import sys from _typeshed import SupportsWrite -from typing import Any, Callable, Iterable, Optional, Text, Tuple, TypeVar, Union +from typing import Any, Callable, Iterable, Optional, Text, TypeVar, Union from .descriptor import FieldDescriptor from .descriptor_pool import DescriptorPool from .message import Message -if sys.version_info < (3, 0): - long = int - _M = TypeVar("_M", bound=Message) # message type (of self) class Error(Exception): ... @@ -194,13 +190,13 @@ class Tokenizer: def TryConsume(self, token: str) -> bool: ... def Consume(self, token: str) -> None: ... def ConsumeComment(self) -> str: ... - def ConsumeCommentOrTrailingComment(self) -> Tuple[bool, str]: ... + def ConsumeCommentOrTrailingComment(self) -> tuple[bool, str]: ... def TryConsumeIdentifier(self) -> bool: ... def ConsumeIdentifier(self) -> str: ... def TryConsumeIdentifierOrNumber(self) -> bool: ... def ConsumeIdentifierOrNumber(self) -> str: ... def TryConsumeInteger(self) -> bool: ... - def ConsumeInteger(self, is_long: bool = ...) -> int: ... + def ConsumeInteger(self) -> int: ... def TryConsumeFloat(self) -> bool: ... def ConsumeFloat(self) -> float: ... def ConsumeBool(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/timestamp_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/timestamp_pb2.pyi index 2eb95333fcbb..e3c8d9b5d07f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/timestamp_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/timestamp_pb2.pyi @@ -6,118 +6,124 @@ import builtins import google.protobuf.descriptor import google.protobuf.internal.well_known_types import google.protobuf.message +import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... - -# A Timestamp represents a point in time independent of any time zone or local -# calendar, encoded as a count of seconds and fractions of seconds at -# nanosecond resolution. The count is relative to an epoch at UTC midnight on -# January 1, 1970, in the proleptic Gregorian calendar which extends the -# Gregorian calendar backwards to year one. -# -# All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap -# second table is needed for interpretation, using a [24-hour linear -# smear](https://developers.google.com/time/smear). -# -# The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By -# restricting to that range, we ensure that we can convert to and from [RFC -# 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. -# -# # Examples -# -# Example 1: Compute Timestamp from POSIX `time()`. -# -# Timestamp timestamp; -# timestamp.set_seconds(time(NULL)); -# timestamp.set_nanos(0); -# -# Example 2: Compute Timestamp from POSIX `gettimeofday()`. -# -# struct timeval tv; -# gettimeofday(&tv, NULL); -# -# Timestamp timestamp; -# timestamp.set_seconds(tv.tv_sec); -# timestamp.set_nanos(tv.tv_usec * 1000); -# -# Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. -# -# FILETIME ft; -# GetSystemTimeAsFileTime(&ft); -# UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; -# -# // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z -# // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. -# Timestamp timestamp; -# timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); -# timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); -# -# Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. -# -# long millis = System.currentTimeMillis(); -# -# Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) -# .setNanos((int) ((millis % 1000) * 1000000)).build(); -# -# -# Example 5: Compute Timestamp from Java `Instant.now()`. -# -# Instant now = Instant.now(); -# -# Timestamp timestamp = -# Timestamp.newBuilder().setSeconds(now.getEpochSecond()) -# .setNanos(now.getNano()).build(); -# -# -# Example 6: Compute Timestamp from current time in Python. -# -# timestamp = Timestamp() -# timestamp.GetCurrentTime() -# -# # JSON Mapping -# -# In JSON format, the Timestamp type is encoded as a string in the -# [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the -# format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" -# where {year} is always expressed using four digits while {month}, {day}, -# {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional -# seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), -# are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone -# is required. A proto3 JSON serializer should always use UTC (as indicated by -# "Z") when printing the Timestamp type and a proto3 JSON parser should be -# able to accept both UTC and other timezones (as indicated by an offset). -# -# For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past -# 01:30 UTC on January 15, 2017. -# -# In JavaScript, one can convert a Date object to this format using the -# standard -# [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) -# method. In Python, a standard `datetime.datetime` object can be converted -# to this format using -# [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with -# the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use -# the Joda Time's [`ISODateTimeFormat.dateTime()`]( -# http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D -# ) to obtain a formatter capable of generating timestamps in this format. +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + class Timestamp(google.protobuf.message.Message, google.protobuf.internal.well_known_types.Timestamp): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """A Timestamp represents a point in time independent of any time zone or local + calendar, encoded as a count of seconds and fractions of seconds at + nanosecond resolution. The count is relative to an epoch at UTC midnight on + January 1, 1970, in the proleptic Gregorian calendar which extends the + Gregorian calendar backwards to year one. + + All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + second table is needed for interpretation, using a [24-hour linear + smear](https://developers.google.com/time/smear). + + The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + restricting to that range, we ensure that we can convert to and from [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + + # Examples + + Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + + Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + + Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + + Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + + + Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + + + Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + + # JSON Mapping + + In JSON format, the Timestamp type is encoded as a string in the + [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + where {year} is always expressed using four digits while {month}, {day}, + {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + is required. A proto3 JSON serializer should always use UTC (as indicated by + "Z") when printing the Timestamp type and a proto3 JSON parser should be + able to accept both UTC and other timezones (as indicated by an offset). + + For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + 01:30 UTC on January 15, 2017. + + In JavaScript, one can convert a Date object to this format using the + standard + [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + method. In Python, a standard `datetime.datetime` object can be converted + to this format using + [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + the Joda Time's [`ISODateTimeFormat.dateTime()`]( + http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D + ) to obtain a formatter capable of generating timestamps in this format. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor SECONDS_FIELD_NUMBER: builtins.int NANOS_FIELD_NUMBER: builtins.int - # Represents seconds of UTC time since Unix epoch - # 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to - # 9999-12-31T23:59:59Z inclusive. - seconds: builtins.int = ... - # Non-negative fractions of a second at nanosecond resolution. Negative - # second values with fractions must still have non-negative nanos values - # that count forward in time. Must be from 0 to 999,999,999 - # inclusive. - nanos: builtins.int = ... + seconds: builtins.int + """Represents seconds of UTC time since Unix epoch + 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + 9999-12-31T23:59:59Z inclusive. + """ + + nanos: builtins.int + """Non-negative fractions of a second at nanosecond resolution. Negative + second values with fractions must still have non-negative nanos values + that count forward in time. Must be from 0 to 999,999,999 + inclusive. + """ + def __init__(self, *, - seconds : builtins.int = ..., - nanos : builtins.int = ..., + seconds: typing.Optional[builtins.int] = ..., + nanos: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"nanos",b"nanos",u"seconds",b"seconds"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["nanos",b"nanos","seconds",b"seconds"]) -> None: ... global___Timestamp = Timestamp diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/type_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/type_pb2.pyi index 399cb4b0ab1a..7cc9f8c2d3a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/type_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/type_pb2.pyi @@ -12,177 +12,236 @@ import google.protobuf.source_context_pb2 import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _Syntax: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType +class _SyntaxEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Syntax.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + SYNTAX_PROTO2: _Syntax.ValueType # 0 + """Syntax `proto2`.""" + + SYNTAX_PROTO3: _Syntax.ValueType # 1 + """Syntax `proto3`.""" -# The syntax in which a protocol buffer element is defined. class Syntax(_Syntax, metaclass=_SyntaxEnumTypeWrapper): + """The syntax in which a protocol buffer element is defined.""" pass -class _Syntax: - V = typing.NewType('V', builtins.int) -class _SyntaxEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Syntax.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Syntax `proto2`. - SYNTAX_PROTO2 = Syntax.V(0) - # Syntax `proto3`. - SYNTAX_PROTO3 = Syntax.V(1) - -# Syntax `proto2`. -SYNTAX_PROTO2 = Syntax.V(0) -# Syntax `proto3`. -SYNTAX_PROTO3 = Syntax.V(1) + +SYNTAX_PROTO2: Syntax.ValueType # 0 +"""Syntax `proto2`.""" + +SYNTAX_PROTO3: Syntax.ValueType # 1 +"""Syntax `proto3`.""" + global___Syntax = Syntax -# A protocol buffer message type. class Type(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """A protocol buffer message type.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int FIELDS_FIELD_NUMBER: builtins.int ONEOFS_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int SOURCE_CONTEXT_FIELD_NUMBER: builtins.int SYNTAX_FIELD_NUMBER: builtins.int - # The fully qualified message name. - name: typing.Text = ... - # The list of fields. + name: typing.Text + """The fully qualified message name.""" + @property - def fields(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Field]: ... - # The list of types appearing in `oneof` definitions in this type. + def fields(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Field]: + """The list of fields.""" + pass @property - def oneofs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ... - # The protocol buffer options. + def oneofs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: + """The list of types appearing in `oneof` definitions in this type.""" + pass @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: ... - # The source context. + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """The protocol buffer options.""" + pass @property - def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: ... - # The source syntax. - syntax: global___Syntax.V = ... + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """The source context.""" + pass + syntax: global___Syntax.ValueType + """The source syntax.""" + def __init__(self, *, - name : typing.Text = ..., - fields : typing.Optional[typing.Iterable[global___Field]] = ..., - oneofs : typing.Optional[typing.Iterable[typing.Text]] = ..., - options : typing.Optional[typing.Iterable[global___Option]] = ..., - source_context : typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., - syntax : global___Syntax.V = ..., + name: typing.Optional[typing.Text] = ..., + fields: typing.Optional[typing.Iterable[global___Field]] = ..., + oneofs: typing.Optional[typing.Iterable[typing.Text]] = ..., + options: typing.Optional[typing.Iterable[global___Option]] = ..., + source_context: typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., + syntax: typing.Optional[global___Syntax.ValueType] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"source_context",b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"fields",b"fields",u"name",b"name",u"oneofs",b"oneofs",u"options",b"options",u"source_context",b"source_context",u"syntax",b"syntax"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context",b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["fields",b"fields","name",b"name","oneofs",b"oneofs","options",b"options","source_context",b"source_context","syntax",b"syntax"]) -> None: ... global___Type = Type -# A single field of a message type. class Field(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... - # Basic field types. + """A single field of a message type.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor + class _Kind: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _KindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Kind.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + TYPE_UNKNOWN: Field._Kind.ValueType # 0 + """Field type unknown.""" + + TYPE_DOUBLE: Field._Kind.ValueType # 1 + """Field type double.""" + + TYPE_FLOAT: Field._Kind.ValueType # 2 + """Field type float.""" + + TYPE_INT64: Field._Kind.ValueType # 3 + """Field type int64.""" + + TYPE_UINT64: Field._Kind.ValueType # 4 + """Field type uint64.""" + + TYPE_INT32: Field._Kind.ValueType # 5 + """Field type int32.""" + + TYPE_FIXED64: Field._Kind.ValueType # 6 + """Field type fixed64.""" + + TYPE_FIXED32: Field._Kind.ValueType # 7 + """Field type fixed32.""" + + TYPE_BOOL: Field._Kind.ValueType # 8 + """Field type bool.""" + + TYPE_STRING: Field._Kind.ValueType # 9 + """Field type string.""" + + TYPE_GROUP: Field._Kind.ValueType # 10 + """Field type group. Proto2 syntax only, and deprecated.""" + + TYPE_MESSAGE: Field._Kind.ValueType # 11 + """Field type message.""" + + TYPE_BYTES: Field._Kind.ValueType # 12 + """Field type bytes.""" + + TYPE_UINT32: Field._Kind.ValueType # 13 + """Field type uint32.""" + + TYPE_ENUM: Field._Kind.ValueType # 14 + """Field type enum.""" + + TYPE_SFIXED32: Field._Kind.ValueType # 15 + """Field type sfixed32.""" + + TYPE_SFIXED64: Field._Kind.ValueType # 16 + """Field type sfixed64.""" + + TYPE_SINT32: Field._Kind.ValueType # 17 + """Field type sint32.""" + + TYPE_SINT64: Field._Kind.ValueType # 18 + """Field type sint64.""" + class Kind(_Kind, metaclass=_KindEnumTypeWrapper): + """Basic field types.""" pass - class _Kind: - V = typing.NewType('V', builtins.int) - class _KindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Kind.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # Field type unknown. - TYPE_UNKNOWN = Field.Kind.V(0) - # Field type double. - TYPE_DOUBLE = Field.Kind.V(1) - # Field type float. - TYPE_FLOAT = Field.Kind.V(2) - # Field type int64. - TYPE_INT64 = Field.Kind.V(3) - # Field type uint64. - TYPE_UINT64 = Field.Kind.V(4) - # Field type int32. - TYPE_INT32 = Field.Kind.V(5) - # Field type fixed64. - TYPE_FIXED64 = Field.Kind.V(6) - # Field type fixed32. - TYPE_FIXED32 = Field.Kind.V(7) - # Field type bool. - TYPE_BOOL = Field.Kind.V(8) - # Field type string. - TYPE_STRING = Field.Kind.V(9) - # Field type group. Proto2 syntax only, and deprecated. - TYPE_GROUP = Field.Kind.V(10) - # Field type message. - TYPE_MESSAGE = Field.Kind.V(11) - # Field type bytes. - TYPE_BYTES = Field.Kind.V(12) - # Field type uint32. - TYPE_UINT32 = Field.Kind.V(13) - # Field type enum. - TYPE_ENUM = Field.Kind.V(14) - # Field type sfixed32. - TYPE_SFIXED32 = Field.Kind.V(15) - # Field type sfixed64. - TYPE_SFIXED64 = Field.Kind.V(16) - # Field type sint32. - TYPE_SINT32 = Field.Kind.V(17) - # Field type sint64. - TYPE_SINT64 = Field.Kind.V(18) - - # Field type unknown. - TYPE_UNKNOWN = Field.Kind.V(0) - # Field type double. - TYPE_DOUBLE = Field.Kind.V(1) - # Field type float. - TYPE_FLOAT = Field.Kind.V(2) - # Field type int64. - TYPE_INT64 = Field.Kind.V(3) - # Field type uint64. - TYPE_UINT64 = Field.Kind.V(4) - # Field type int32. - TYPE_INT32 = Field.Kind.V(5) - # Field type fixed64. - TYPE_FIXED64 = Field.Kind.V(6) - # Field type fixed32. - TYPE_FIXED32 = Field.Kind.V(7) - # Field type bool. - TYPE_BOOL = Field.Kind.V(8) - # Field type string. - TYPE_STRING = Field.Kind.V(9) - # Field type group. Proto2 syntax only, and deprecated. - TYPE_GROUP = Field.Kind.V(10) - # Field type message. - TYPE_MESSAGE = Field.Kind.V(11) - # Field type bytes. - TYPE_BYTES = Field.Kind.V(12) - # Field type uint32. - TYPE_UINT32 = Field.Kind.V(13) - # Field type enum. - TYPE_ENUM = Field.Kind.V(14) - # Field type sfixed32. - TYPE_SFIXED32 = Field.Kind.V(15) - # Field type sfixed64. - TYPE_SFIXED64 = Field.Kind.V(16) - # Field type sint32. - TYPE_SINT32 = Field.Kind.V(17) - # Field type sint64. - TYPE_SINT64 = Field.Kind.V(18) - - # Whether a field is optional, required, or repeated. + + TYPE_UNKNOWN: Field.Kind.ValueType # 0 + """Field type unknown.""" + + TYPE_DOUBLE: Field.Kind.ValueType # 1 + """Field type double.""" + + TYPE_FLOAT: Field.Kind.ValueType # 2 + """Field type float.""" + + TYPE_INT64: Field.Kind.ValueType # 3 + """Field type int64.""" + + TYPE_UINT64: Field.Kind.ValueType # 4 + """Field type uint64.""" + + TYPE_INT32: Field.Kind.ValueType # 5 + """Field type int32.""" + + TYPE_FIXED64: Field.Kind.ValueType # 6 + """Field type fixed64.""" + + TYPE_FIXED32: Field.Kind.ValueType # 7 + """Field type fixed32.""" + + TYPE_BOOL: Field.Kind.ValueType # 8 + """Field type bool.""" + + TYPE_STRING: Field.Kind.ValueType # 9 + """Field type string.""" + + TYPE_GROUP: Field.Kind.ValueType # 10 + """Field type group. Proto2 syntax only, and deprecated.""" + + TYPE_MESSAGE: Field.Kind.ValueType # 11 + """Field type message.""" + + TYPE_BYTES: Field.Kind.ValueType # 12 + """Field type bytes.""" + + TYPE_UINT32: Field.Kind.ValueType # 13 + """Field type uint32.""" + + TYPE_ENUM: Field.Kind.ValueType # 14 + """Field type enum.""" + + TYPE_SFIXED32: Field.Kind.ValueType # 15 + """Field type sfixed32.""" + + TYPE_SFIXED64: Field.Kind.ValueType # 16 + """Field type sfixed64.""" + + TYPE_SINT32: Field.Kind.ValueType # 17 + """Field type sint32.""" + + TYPE_SINT64: Field.Kind.ValueType # 18 + """Field type sint64.""" + + + class _Cardinality: + ValueType = typing.NewType('ValueType', builtins.int) + V: typing_extensions.TypeAlias = ValueType + class _CardinalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Field._Cardinality.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + CARDINALITY_UNKNOWN: Field._Cardinality.ValueType # 0 + """For fields with unknown cardinality.""" + + CARDINALITY_OPTIONAL: Field._Cardinality.ValueType # 1 + """For optional fields.""" + + CARDINALITY_REQUIRED: Field._Cardinality.ValueType # 2 + """For required fields. Proto2 syntax only.""" + + CARDINALITY_REPEATED: Field._Cardinality.ValueType # 3 + """For repeated fields.""" + class Cardinality(_Cardinality, metaclass=_CardinalityEnumTypeWrapper): + """Whether a field is optional, required, or repeated.""" pass - class _Cardinality: - V = typing.NewType('V', builtins.int) - class _CardinalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Cardinality.V], builtins.type): - DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ... - # For fields with unknown cardinality. - CARDINALITY_UNKNOWN = Field.Cardinality.V(0) - # For optional fields. - CARDINALITY_OPTIONAL = Field.Cardinality.V(1) - # For required fields. Proto2 syntax only. - CARDINALITY_REQUIRED = Field.Cardinality.V(2) - # For repeated fields. - CARDINALITY_REPEATED = Field.Cardinality.V(3) - - # For fields with unknown cardinality. - CARDINALITY_UNKNOWN = Field.Cardinality.V(0) - # For optional fields. - CARDINALITY_OPTIONAL = Field.Cardinality.V(1) - # For required fields. Proto2 syntax only. - CARDINALITY_REQUIRED = Field.Cardinality.V(2) - # For repeated fields. - CARDINALITY_REPEATED = Field.Cardinality.V(3) + + CARDINALITY_UNKNOWN: Field.Cardinality.ValueType # 0 + """For fields with unknown cardinality.""" + + CARDINALITY_OPTIONAL: Field.Cardinality.ValueType # 1 + """For optional fields.""" + + CARDINALITY_REQUIRED: Field.Cardinality.ValueType # 2 + """For required fields. Proto2 syntax only.""" + + CARDINALITY_REPEATED: Field.Cardinality.ValueType # 3 + """For repeated fields.""" + KIND_FIELD_NUMBER: builtins.int CARDINALITY_FIELD_NUMBER: builtins.int @@ -194,122 +253,147 @@ class Field(google.protobuf.message.Message): OPTIONS_FIELD_NUMBER: builtins.int JSON_NAME_FIELD_NUMBER: builtins.int DEFAULT_VALUE_FIELD_NUMBER: builtins.int - # The field type. - kind: global___Field.Kind.V = ... - # The field cardinality. - cardinality: global___Field.Cardinality.V = ... - # The field number. - number: builtins.int = ... - # The field name. - name: typing.Text = ... - # The field type URL, without the scheme, for message or enumeration - # types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. - type_url: typing.Text = ... - # The index of the field type in `Type.oneofs`, for message or enumeration - # types. The first type has index 1; zero means the type is not in the list. - oneof_index: builtins.int = ... - # Whether to use alternative packed wire representation. - packed: builtins.bool = ... - # The protocol buffer options. + kind: global___Field.Kind.ValueType + """The field type.""" + + cardinality: global___Field.Cardinality.ValueType + """The field cardinality.""" + + number: builtins.int + """The field number.""" + + name: typing.Text + """The field name.""" + + type_url: typing.Text + """The field type URL, without the scheme, for message or enumeration + types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + """ + + oneof_index: builtins.int + """The index of the field type in `Type.oneofs`, for message or enumeration + types. The first type has index 1; zero means the type is not in the list. + """ + + packed: builtins.bool + """Whether to use alternative packed wire representation.""" + @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: ... - # The field JSON name. - json_name: typing.Text = ... - # The string value of the default value of this field. Proto2 syntax only. - default_value: typing.Text = ... + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """The protocol buffer options.""" + pass + json_name: typing.Text + """The field JSON name.""" + + default_value: typing.Text + """The string value of the default value of this field. Proto2 syntax only.""" + def __init__(self, *, - kind : global___Field.Kind.V = ..., - cardinality : global___Field.Cardinality.V = ..., - number : builtins.int = ..., - name : typing.Text = ..., - type_url : typing.Text = ..., - oneof_index : builtins.int = ..., - packed : builtins.bool = ..., - options : typing.Optional[typing.Iterable[global___Option]] = ..., - json_name : typing.Text = ..., - default_value : typing.Text = ..., + kind: typing.Optional[global___Field.Kind.ValueType] = ..., + cardinality: typing.Optional[global___Field.Cardinality.ValueType] = ..., + number: typing.Optional[builtins.int] = ..., + name: typing.Optional[typing.Text] = ..., + type_url: typing.Optional[typing.Text] = ..., + oneof_index: typing.Optional[builtins.int] = ..., + packed: typing.Optional[builtins.bool] = ..., + options: typing.Optional[typing.Iterable[global___Option]] = ..., + json_name: typing.Optional[typing.Text] = ..., + default_value: typing.Optional[typing.Text] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"cardinality",b"cardinality",u"default_value",b"default_value",u"json_name",b"json_name",u"kind",b"kind",u"name",b"name",u"number",b"number",u"oneof_index",b"oneof_index",u"options",b"options",u"packed",b"packed",u"type_url",b"type_url"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["cardinality",b"cardinality","default_value",b"default_value","json_name",b"json_name","kind",b"kind","name",b"name","number",b"number","oneof_index",b"oneof_index","options",b"options","packed",b"packed","type_url",b"type_url"]) -> None: ... global___Field = Field -# Enum type definition. class Enum(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Enum type definition.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int ENUMVALUE_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int SOURCE_CONTEXT_FIELD_NUMBER: builtins.int SYNTAX_FIELD_NUMBER: builtins.int - # Enum type name. - name: typing.Text = ... - # Enum value definitions. + name: typing.Text + """Enum type name.""" + @property - def enumvalue(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumValue]: ... - # Protocol buffer options. + def enumvalue(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EnumValue]: + """Enum value definitions.""" + pass @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: ... - # The source context. + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """Protocol buffer options.""" + pass @property - def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: ... - # The source syntax. - syntax: global___Syntax.V = ... + def source_context(self) -> google.protobuf.source_context_pb2.SourceContext: + """The source context.""" + pass + syntax: global___Syntax.ValueType + """The source syntax.""" + def __init__(self, *, - name : typing.Text = ..., - enumvalue : typing.Optional[typing.Iterable[global___EnumValue]] = ..., - options : typing.Optional[typing.Iterable[global___Option]] = ..., - source_context : typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., - syntax : global___Syntax.V = ..., + name: typing.Optional[typing.Text] = ..., + enumvalue: typing.Optional[typing.Iterable[global___EnumValue]] = ..., + options: typing.Optional[typing.Iterable[global___Option]] = ..., + source_context: typing.Optional[google.protobuf.source_context_pb2.SourceContext] = ..., + syntax: typing.Optional[global___Syntax.ValueType] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"source_context",b"source_context"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"enumvalue",b"enumvalue",u"name",b"name",u"options",b"options",u"source_context",b"source_context",u"syntax",b"syntax"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["source_context",b"source_context"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["enumvalue",b"enumvalue","name",b"name","options",b"options","source_context",b"source_context","syntax",b"syntax"]) -> None: ... global___Enum = Enum -# Enum value definition. class EnumValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Enum value definition.""" + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int NUMBER_FIELD_NUMBER: builtins.int OPTIONS_FIELD_NUMBER: builtins.int - # Enum value name. - name: typing.Text = ... - # Enum value number. - number: builtins.int = ... - # Protocol buffer options. + name: typing.Text + """Enum value name.""" + + number: builtins.int + """Enum value number.""" + @property - def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: ... + def options(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Option]: + """Protocol buffer options.""" + pass def __init__(self, *, - name : typing.Text = ..., - number : builtins.int = ..., - options : typing.Optional[typing.Iterable[global___Option]] = ..., + name: typing.Optional[typing.Text] = ..., + number: typing.Optional[builtins.int] = ..., + options: typing.Optional[typing.Iterable[global___Option]] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"number",b"number",u"options",b"options"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","number",b"number","options",b"options"]) -> None: ... global___EnumValue = EnumValue -# A protocol buffer option, which can be attached to a message, field, -# enumeration, etc. class Option(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """A protocol buffer option, which can be attached to a message, field, + enumeration, etc. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor NAME_FIELD_NUMBER: builtins.int VALUE_FIELD_NUMBER: builtins.int - # The option's name. For protobuf built-in options (options defined in - # descriptor.proto), this is the short name. For example, `"map_entry"`. - # For custom options, it should be the fully-qualified name. For example, - # `"google.api.http"`. - name: typing.Text = ... - # The option's value packed in an Any message. If the value is a primitive, - # the corresponding wrapper type defined in google/protobuf/wrappers.proto - # should be used. If the value is an enum, it should be stored as an int32 - # value using the google.protobuf.Int32Value type. + name: typing.Text + """The option's name. For protobuf built-in options (options defined in + descriptor.proto), this is the short name. For example, `"map_entry"`. + For custom options, it should be the fully-qualified name. For example, + `"google.api.http"`. + """ + @property - def value(self) -> google.protobuf.any_pb2.Any: ... + def value(self) -> google.protobuf.any_pb2.Any: + """The option's value packed in an Any message. If the value is a primitive, + the corresponding wrapper type defined in google/protobuf/wrappers.proto + should be used. If the value is an enum, it should be stored as an int32 + value using the google.protobuf.Int32Value type. + """ + pass def __init__(self, *, - name : typing.Text = ..., - value : typing.Optional[google.protobuf.any_pb2.Any] = ..., + name: typing.Optional[typing.Text] = ..., + value: typing.Optional[google.protobuf.any_pb2.Any] = ..., ) -> None: ... - def HasField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"value",b"value"]) -> None: ... + def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ... + def ClearField(self, field_name: typing_extensions.Literal["name",b"name","value",b"value"]) -> None: ... global___Option = Option diff --git a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/wrappers_pb2.pyi b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/wrappers_pb2.pyi index 52e4f5080c56..5bb133eae069 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/wrappers_pb2.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/protobuf/google/protobuf/wrappers_pb2.pyi @@ -8,139 +8,157 @@ import google.protobuf.message import typing import typing_extensions -DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ... +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor -# Wrapper message for `double`. -# -# The JSON representation for `DoubleValue` is JSON number. class DoubleValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `double`. + + The JSON representation for `DoubleValue` is JSON number. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The double value. - value: builtins.float = ... + value: builtins.float + """The double value.""" + def __init__(self, *, - value : builtins.float = ..., + value: typing.Optional[builtins.float] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___DoubleValue = DoubleValue -# Wrapper message for `float`. -# -# The JSON representation for `FloatValue` is JSON number. class FloatValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `float`. + + The JSON representation for `FloatValue` is JSON number. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The float value. - value: builtins.float = ... + value: builtins.float + """The float value.""" + def __init__(self, *, - value : builtins.float = ..., + value: typing.Optional[builtins.float] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___FloatValue = FloatValue -# Wrapper message for `int64`. -# -# The JSON representation for `Int64Value` is JSON string. class Int64Value(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `int64`. + + The JSON representation for `Int64Value` is JSON string. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The int64 value. - value: builtins.int = ... + value: builtins.int + """The int64 value.""" + def __init__(self, *, - value : builtins.int = ..., + value: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___Int64Value = Int64Value -# Wrapper message for `uint64`. -# -# The JSON representation for `UInt64Value` is JSON string. class UInt64Value(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `uint64`. + + The JSON representation for `UInt64Value` is JSON string. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The uint64 value. - value: builtins.int = ... + value: builtins.int + """The uint64 value.""" + def __init__(self, *, - value : builtins.int = ..., + value: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___UInt64Value = UInt64Value -# Wrapper message for `int32`. -# -# The JSON representation for `Int32Value` is JSON number. class Int32Value(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `int32`. + + The JSON representation for `Int32Value` is JSON number. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The int32 value. - value: builtins.int = ... + value: builtins.int + """The int32 value.""" + def __init__(self, *, - value : builtins.int = ..., + value: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___Int32Value = Int32Value -# Wrapper message for `uint32`. -# -# The JSON representation for `UInt32Value` is JSON number. class UInt32Value(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `uint32`. + + The JSON representation for `UInt32Value` is JSON number. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The uint32 value. - value: builtins.int = ... + value: builtins.int + """The uint32 value.""" + def __init__(self, *, - value : builtins.int = ..., + value: typing.Optional[builtins.int] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___UInt32Value = UInt32Value -# Wrapper message for `bool`. -# -# The JSON representation for `BoolValue` is JSON `true` and `false`. class BoolValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `bool`. + + The JSON representation for `BoolValue` is JSON `true` and `false`. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The bool value. - value: builtins.bool = ... + value: builtins.bool + """The bool value.""" + def __init__(self, *, - value : builtins.bool = ..., + value: typing.Optional[builtins.bool] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___BoolValue = BoolValue -# Wrapper message for `string`. -# -# The JSON representation for `StringValue` is JSON string. class StringValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `string`. + + The JSON representation for `StringValue` is JSON string. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The string value. - value: typing.Text = ... + value: typing.Text + """The string value.""" + def __init__(self, *, - value : typing.Text = ..., + value: typing.Optional[typing.Text] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___StringValue = StringValue -# Wrapper message for `bytes`. -# -# The JSON representation for `BytesValue` is JSON string. class BytesValue(google.protobuf.message.Message): - DESCRIPTOR: google.protobuf.descriptor.Descriptor = ... + """Wrapper message for `bytes`. + + The JSON representation for `BytesValue` is JSON string. + """ + DESCRIPTOR: google.protobuf.descriptor.Descriptor VALUE_FIELD_NUMBER: builtins.int - # The bytes value. - value: builtins.bytes = ... + value: builtins.bytes + """The bytes value.""" + def __init__(self, *, - value : builtins.bytes = ..., + value: typing.Optional[builtins.bytes] = ..., ) -> None: ... - def ClearField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> None: ... + def ClearField(self, field_name: typing_extensions.Literal["value",b"value"]) -> None: ... global___BytesValue = BytesValue diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml index 0948fc580d0e..295659a1622c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/METADATA.toml @@ -1 +1 @@ -version = "5.8" +version = "5.8.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi index 9f9b404fc567..74a8554dde0c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/__init__.pyi @@ -1,5 +1,7 @@ import sys -from typing import Any, ContextManager +from _typeshed import Self +from contextlib import AbstractContextManager +from typing import Any, Callable, Iterable, Iterator from ._common import ( AIX as AIX, @@ -47,13 +49,29 @@ from ._common import ( NoSuchProcess as NoSuchProcess, TimeoutExpired as TimeoutExpired, ZombieProcess as ZombieProcess, + _Status, pconn, pcputimes, pctxsw, pgids, + pionice, popenfile, pthread, puids, + sbattery, + sconn, + scpufreq, + scpustats, + sdiskio, + sdiskpart, + sdiskusage, + sfan, + shwtemp, + snetio, + snicaddr, + snicstats, + sswap, + suser, ) if sys.platform == "linux": @@ -78,32 +96,46 @@ if sys.platform == "win32": IOPRIO_LOW as IOPRIO_LOW, IOPRIO_NORMAL as IOPRIO_NORMAL, IOPRIO_VERYLOW as IOPRIO_VERYLOW, + win_service_get as win_service_get, win_service_iter as win_service_iter, ) +if sys.platform == "linux": + from ._pslinux import pfullmem, pmem +elif sys.platform == "darwin": + from ._psosx import pfullmem, pmem +elif sys.platform == "win32": + from ._pswindows import pfullmem, pmem +else: + pmem = Any + pfullmem = Any + if sys.platform == "linux": PROCFS_PATH: str -AF_LINK: Any +AF_LINK: int version_info: tuple[int, int, int] __version__: str +__author__: str class Process: def __init__(self, pid: int | None = ...) -> None: ... - def __eq__(self, other) -> bool: ... - def __ne__(self, other) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... def __hash__(self) -> int: ... @property def pid(self) -> int: ... - def oneshot(self) -> ContextManager[None]: ... - def as_dict(self, attrs: Any | None = ..., ad_value: Any | None = ...): ... + def oneshot(self) -> AbstractContextManager[None]: ... + def as_dict( + self, attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = ..., ad_value: Any | None = ... + ) -> dict[str, Any]: ... def parent(self) -> Process: ... def parents(self) -> list[Process]: ... def is_running(self) -> bool: ... def ppid(self) -> int: ... def name(self) -> str: ... def exe(self) -> str: ... - def cmdline(self): ... - def status(self) -> int: ... + def cmdline(self) -> list[str]: ... + def status(self) -> _Status: ... def username(self) -> str: ... def create_time(self) -> float: ... def cwd(self) -> str: ... @@ -115,31 +147,34 @@ class Process: def num_fds(self) -> int: ... if sys.platform != "darwin": def io_counters(self): ... - def ionice(self, ioclass: int | None = ..., value: int | None = ...) -> int: ... + def ionice(self, ioclass: int | None = ..., value: int | None = ...) -> pionice: ... if sys.platform == "linux": - def rlimit(self, resource: int, limits: tuple[int, int] | None = ...): ... + def rlimit(self, resource: int, limits: tuple[int, int] | None = ...) -> tuple[int, int]: ... if sys.platform != "darwin": def cpu_affinity(self, cpus: list[int] | None = ...) -> list[int] | None: ... if sys.platform == "linux": def cpu_num(self) -> int: ... - def environ(self): ... + + def environ(self) -> dict[str, str]: ... if sys.platform == "win32": def num_handles(self) -> int: ... + def num_ctx_switches(self) -> pctxsw: ... def num_threads(self) -> int: ... def threads(self) -> list[pthread]: ... def children(self, recursive: bool = ...) -> list[Process]: ... def cpu_percent(self, interval: float | None = ...) -> float: ... def cpu_times(self) -> pcputimes: ... - def memory_info(self): ... - def memory_info_ex(self): ... - def memory_full_info(self): ... + def memory_info(self) -> pmem: ... + def memory_info_ex(self) -> pmem: ... + def memory_full_info(self) -> pfullmem: ... def memory_percent(self, memtype: str = ...) -> float: ... if sys.platform != "darwin": def memory_maps(self, grouped: bool = ...): ... + def open_files(self) -> list[popenfile]: ... def connections(self, kind: str = ...) -> list[pconn]: ... - def send_signal(self, sig) -> None: ... + def send_signal(self, sig: int) -> None: ... def suspend(self) -> None: ... def resume(self) -> None: ... def terminate(self) -> None: ... @@ -148,42 +183,44 @@ class Process: class Popen(Process): def __init__(self, *args, **kwargs) -> None: ... - def __dir__(self): ... - def __enter__(self): ... - def __exit__(self, *args, **kwargs): ... - def __getattribute__(self, name): ... - def wait(self, timeout: Any | None = ...): ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, *args, **kwargs) -> None: ... + def __getattribute__(self, name: str) -> Any: ... -def pids(): ... -def pid_exists(pid): ... -def process_iter(attrs: Any | None = ..., ad_value: Any | None = ...): ... -def wait_procs(procs, timeout: Any | None = ..., callback: Any | None = ...): ... -def cpu_count(logical: bool = ...): ... +def pids() -> list[int]: ... +def pid_exists(pid: int) -> bool: ... +def process_iter( + attrs: list[str] | tuple[str, ...] | set[str] | frozenset[str] | None = ..., ad_value: Any | None = ... +) -> Iterator[Process]: ... +def wait_procs( + procs: Iterable[Process], timeout: float | None = ..., callback: Callable[[Process], Any] | None = ... +) -> tuple[list[Process], list[Process]]: ... +def cpu_count(logical: bool = ...) -> int: ... def cpu_times(percpu: bool = ...): ... -def cpu_percent(interval: Any | None = ..., percpu: bool = ...): ... -def cpu_times_percent(interval: Any | None = ..., percpu: bool = ...): ... -def cpu_stats(): ... -def cpu_freq(percpu: bool = ...): ... -def getloadavg(): ... +def cpu_percent(interval: float | None = ..., percpu: bool = ...) -> float: ... +def cpu_times_percent(interval: float | None = ..., percpu: bool = ...): ... +def cpu_stats() -> scpustats: ... +def cpu_freq(percpu: bool = ...) -> scpufreq: ... +def getloadavg() -> tuple[float, float, float]: ... def virtual_memory(): ... -def swap_memory(): ... -def disk_usage(path): ... -def disk_partitions(all: bool = ...): ... -def disk_io_counters(perdisk: bool = ..., nowrap: bool = ...): ... -def net_io_counters(pernic: bool = ..., nowrap: bool = ...): ... -def net_connections(kind: str = ...): ... -def net_if_addrs(): ... -def net_if_stats(): ... +def swap_memory() -> sswap: ... +def disk_usage(path: str) -> sdiskusage: ... +def disk_partitions(all: bool = ...) -> list[sdiskpart]: ... +def disk_io_counters(perdisk: bool = ..., nowrap: bool = ...) -> sdiskio: ... +def net_io_counters(pernic: bool = ..., nowrap: bool = ...) -> snetio: ... +def net_connections(kind: str = ...) -> list[sconn]: ... +def net_if_addrs() -> dict[str, list[snicaddr]]: ... +def net_if_stats() -> dict[str, snicstats]: ... if sys.platform == "linux": - def sensors_temperatures(fahrenheit: bool = ...): ... - def sensors_fans(): ... + def sensors_temperatures(fahrenheit: bool = ...) -> dict[str, list[shwtemp]]: ... + def sensors_fans() -> dict[str, list[sfan]]: ... if sys.platform != "win32": - def sensors_battery(): ... + def sensors_battery() -> sbattery | None: ... -def boot_time(): ... -def users(): ... +def boot_time() -> float: ... +def users() -> list[suser]: ... if sys.platform == "linux": RLIMIT_AS: int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi index 18aca12b41e7..9fa49d105581 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_common.pyi @@ -1,5 +1,8 @@ import enum -from typing import Any, Callable, NamedTuple, TypeVar +from _typeshed import StrOrBytesPath, SupportsWrite +from socket import AddressFamily, SocketKind +from typing import Any, Callable, NamedTuple, TypeVar, overload +from typing_extensions import Literal POSIX: bool WINDOWS: bool @@ -12,20 +15,39 @@ NETBSD: bool BSD: bool SUNOS: bool AIX: bool -STATUS_RUNNING: str -STATUS_SLEEPING: str -STATUS_DISK_SLEEP: str -STATUS_STOPPED: str -STATUS_TRACING_STOP: str -STATUS_ZOMBIE: str -STATUS_DEAD: str -STATUS_WAKE_KILL: str -STATUS_WAKING: str -STATUS_IDLE: str -STATUS_LOCKED: str -STATUS_WAITING: str -STATUS_SUSPENDED: str -STATUS_PARKED: str + +STATUS_RUNNING: Literal["running"] +STATUS_SLEEPING: Literal["sleeping"] +STATUS_DISK_SLEEP: Literal["disk-sleep"] +STATUS_STOPPED: Literal["stopped"] +STATUS_TRACING_STOP: Literal["tracing-stop"] +STATUS_ZOMBIE: Literal["zombie"] +STATUS_DEAD: Literal["dead"] +STATUS_WAKE_KILL: Literal["wake-kill"] +STATUS_WAKING: Literal["waking"] +STATUS_IDLE: Literal["idle"] +STATUS_LOCKED: Literal["locked"] +STATUS_WAITING: Literal["waiting"] +STATUS_SUSPENDED: Literal["suspended"] +STATUS_PARKED: Literal["parked"] + +_Status = Literal[ + "running", + "sleeping", + "disk-sleep", + "stopped", + "tracing-stop", + "zombie", + "dead", + "wake-kill", + "waking", + "idle", + "locked", + "waiting", + "suspended", + "parked", +] + CONN_ESTABLISHED: str CONN_SYN_SENT: str CONN_SYN_RECV: str @@ -106,15 +128,15 @@ class suser(NamedTuple): class sconn(NamedTuple): fd: int - family: Any - type: Any - laddr: str - raddr: str + family: AddressFamily + type: SocketKind + laddr: addr | tuple[()] + raddr: addr | tuple[()] status: str pid: int class snicaddr(NamedTuple): - family: Any + family: AddressFamily address: str netmask: str | None broadcast: str | None @@ -122,7 +144,7 @@ class snicaddr(NamedTuple): class snicstats(NamedTuple): isup: bool - duplex: Any + duplex: int speed: int mtu: int @@ -184,7 +206,7 @@ class pio(NamedTuple): write_bytes: int class pionice(NamedTuple): - ioclass: Any + ioclass: int value: int class pctxsw(NamedTuple): @@ -193,8 +215,8 @@ class pctxsw(NamedTuple): class pconn(NamedTuple): fd: int - family: Any - type: Any + family: AddressFamily + type: SocketKind laddr: addr raddr: addr status: str @@ -203,7 +225,7 @@ class addr(NamedTuple): ip: str port: int -conn_tmap: Any +conn_tmap: dict[str, tuple[list[AddressFamily], list[SocketKind]]] class Error(Exception): __module__: str @@ -244,13 +266,16 @@ _Func = TypeVar("_Func", bound=Callable[..., Any]) def usage_percent(used, total, round_: int | None = ...) -> float: ... def memoize(fun: _Func) -> _Func: ... def memoize_when_activated(fun: _Func) -> _Func: ... -def isfile_strict(path) -> bool: ... -def path_exists_strict(path) -> bool: ... +def isfile_strict(path: StrOrBytesPath) -> bool: ... +def path_exists_strict(path: StrOrBytesPath) -> bool: ... def supports_ipv6() -> bool: ... def parse_environ_block(data): ... -def sockfam_to_enum(num): ... -def socktype_to_enum(num): ... -def conn_to_ntuple(fd, fam, type_, laddr, raddr, status, status_map, pid: Any | None = ...): ... +def sockfam_to_enum(num: int) -> AddressFamily: ... +def socktype_to_enum(num: int) -> SocketKind: ... +@overload +def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: int) -> sconn: ... +@overload +def conn_to_ntuple(fd: int, fam: int, type_: int, laddr, raddr, status: str, status_map, pid: None = ...) -> pconn: ... def deprecated_method(replacement: str) -> Callable[[_Func], _Func]: ... class _WrapNumbers: @@ -263,10 +288,10 @@ class _WrapNumbers: def cache_clear(self, name: Any | None = ...) -> None: ... def cache_info(self): ... -def wrap_numbers(input_dict, name): ... +def wrap_numbers(input_dict, name: str): ... def bytes2human(n: int, format: str = ...) -> str: ... -def get_procfs_path(): ... -def term_supports_colors(file=...) -> bool: ... +def get_procfs_path() -> str: ... +def term_supports_colors(file: SupportsWrite[str] = ...) -> bool: ... def hilite(s: str, color: str | None = ..., bold: bool = ...) -> str: ... -def print_color(s: str, color: str | None = ..., bold: bool = ..., file=...) -> None: ... +def print_color(s: str, color: str | None = ..., bold: bool = ..., file: SupportsWrite[str] = ...) -> None: ... def debug(msg) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_compat.pyi index b4efc9d9831c..4031e23fd72d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_compat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_compat.pyi @@ -1,6 +1,8 @@ -FileNotFoundError = FileNotFoundError -PermissionError = PermissionError -ProcessLookupError = ProcessLookupError -InterruptedError = InterruptedError -ChildProcessError = ChildProcessError -FileExistsError = FileExistsError +from builtins import ( + ChildProcessError as ChildProcessError, + FileExistsError as FileExistsError, + FileNotFoundError as FileNotFoundError, + InterruptedError as InterruptedError, + PermissionError as PermissionError, + ProcessLookupError as ProcessLookupError, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi index de3d5f2c9073..6a4df8e9afc9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psbsd.pyi @@ -1,4 +1,5 @@ -from typing import Any, ContextManager, NamedTuple +from contextlib import AbstractContextManager +from typing import Any, NamedTuple from ._common import ( FREEBSD as FREEBSD, @@ -109,7 +110,7 @@ def pids(): ... def pid_exists(pid): ... def is_zombie(pid): ... def wrap_exceptions(fun): ... -def wrap_exceptions_procfs(inst) -> ContextManager[None]: ... +def wrap_exceptions_procfs(inst) -> AbstractContextManager[None]: ... class Process: pid: Any @@ -140,12 +141,14 @@ class Process: def status(self): ... def io_counters(self): ... def cwd(self): ... + class nt_mmap_grouped(NamedTuple): path: Any rss: Any private: Any ref_count: Any shadow_count: Any + class nt_mmap_ext(NamedTuple): addr: Any perms: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi index 7b09c463a47e..e62d9de09cd8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psutil/psutil/_psosx.pyi @@ -1,4 +1,5 @@ -from typing import Any, ContextManager, NamedTuple +from contextlib import AbstractContextManager +from typing import Any, NamedTuple from ._common import ( AccessDenied as AccessDenied, @@ -71,7 +72,7 @@ pid_exists: Any def is_zombie(pid): ... def wrap_exceptions(fun): ... -def catch_zombie(proc) -> ContextManager[None]: ... +def catch_zombie(proc) -> AbstractContextManager[None]: ... class Process: pid: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/METADATA.toml index d8484dc609d9..0009f38f042f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/METADATA.toml @@ -1 +1 @@ -version = "2.9" \ No newline at end of file +version = "2.9.*" \ No newline at end of file diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/_psycopg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/_psycopg.pyi index 79c74d472530..13aad3e7f8c0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/_psycopg.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/_psycopg.pyi @@ -1,4 +1,4 @@ -from typing import Any, Tuple, overload +from typing import Any, Callable, TypeVar, overload import psycopg2 import psycopg2.extensions @@ -92,15 +92,15 @@ class Column: table_oid: Any type_code: Any def __init__(self, *args, **kwargs) -> None: ... - def __eq__(self, other): ... - def __ge__(self, other): ... - def __getitem__(self, index): ... + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __getitem__(self, __index): ... def __getstate__(self): ... - def __gt__(self, other): ... - def __le__(self, other): ... + def __gt__(self, __other): ... + def __le__(self, __other): ... def __len__(self): ... - def __lt__(self, other): ... - def __ne__(self, other): ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... def __setstate__(self, state): ... class ConnectionInfo: @@ -202,15 +202,15 @@ class Notify: payload: Any pid: Any def __init__(self, *args, **kwargs) -> None: ... - def __eq__(self, other): ... - def __ge__(self, other): ... - def __getitem__(self, index): ... - def __gt__(self, other): ... + def __eq__(self, __other): ... + def __ge__(self, __other): ... + def __getitem__(self, __index): ... + def __gt__(self, __other): ... def __hash__(self): ... - def __le__(self, other): ... + def __le__(self, __other): ... def __len__(self): ... - def __lt__(self, other): ... - def __ne__(self, other): ... + def __lt__(self, __other): ... + def __ne__(self, __other): ... class OperationalError(psycopg2.DatabaseError): ... class ProgrammingError(psycopg2.DatabaseError): ... @@ -265,10 +265,11 @@ class Xid: prepared: Any def __init__(self, *args, **kwargs) -> None: ... def from_string(self, *args, **kwargs): ... - def __getitem__(self, index): ... + def __getitem__(self, __index): ... def __len__(self): ... _cursor = cursor +_T_cur = TypeVar("_T_cur", bound=_cursor) class connection: DataError: Any @@ -285,7 +286,7 @@ class connection: autocommit: Any binary_types: Any closed: Any - cursor_factory: Any + cursor_factory: Callable[..., _cursor] deferrable: Any dsn: Any encoding: Any @@ -304,9 +305,9 @@ class connection: def close(self, *args, **kwargs): ... def commit(self, *args, **kwargs): ... @overload - def cursor(self) -> _cursor: ... + def cursor(self, name=..., *, scrollable=..., withhold=...) -> _cursor: ... @overload - def cursor(self, name=..., cursor_factory: Any = ..., withhold=...) -> Any: ... + def cursor(self, name=..., cursor_factory: Callable[..., _T_cur] = ..., scrollable=..., withhold=...) -> _T_cur: ... def fileno(self, *args, **kwargs): ... def get_backend_pid(self, *args, **kwargs): ... def get_dsn_parameters(self, *args, **kwargs): ... @@ -359,9 +360,9 @@ class cursor: def copy_to(self, file, table, sep=..., null=..., columns=...): ... def execute(self, query, vars=...): ... def executemany(self, query, vars_list): ... - def fetchall(self) -> list[Tuple[Any, ...]]: ... - def fetchmany(self, size=...) -> list[Tuple[Any, ...]]: ... - def fetchone(self) -> Tuple[Any, ...] | Any: ... + def fetchall(self) -> list[tuple[Any, ...]]: ... + def fetchmany(self, size=...) -> list[tuple[Any, ...]]: ... + def fetchone(self) -> tuple[Any, ...] | Any: ... def mogrify(self, *args, **kwargs): ... def nextset(self): ... def scroll(self, value, mode=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/errors.pyi index d3793b8b43c5..bf7d357959bd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/errors.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/errors.pyi @@ -1,17 +1,4 @@ -from typing import Any - -class Error: - def __init__(self, *args, **kwargs): ... - args: Any - cursor: Any - diag: Any - pgcode: Any - pgerror: Any - with_traceback: Any - -class Warning: - args: Any - with_traceback: Any +from psycopg2._psycopg import Error as Error, Warning as Warning class DatabaseError(Error): ... class InterfaceError(Error): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/extras.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/extras.pyi index 364de1a2f85a..fdd7cf429b34 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/extras.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/extras.pyi @@ -1,5 +1,5 @@ from collections import OrderedDict -from typing import Any, List +from typing import Any from psycopg2._ipaddress import register_ipaddress as register_ipaddress from psycopg2._json import ( @@ -45,7 +45,7 @@ class DictCursor(DictCursorBase): def execute(self, query, vars: Any | None = ...): ... def callproc(self, procname, vars: Any | None = ...): ... -class DictRow(List[Any]): +class DictRow(list[Any]): def __init__(self, cursor) -> None: ... def __getitem__(self, x): ... def __setitem__(self, x, v) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/pool.pyi b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/pool.pyi index b3aee45a681b..6bcf1b6d9fe5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/pool.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/psycopg2/psycopg2/pool.pyi @@ -9,14 +9,15 @@ class AbstractConnectionPool: maxconn: Any closed: bool def __init__(self, minconn, maxconn, *args, **kwargs) -> None: ... + # getconn, putconn and closeall are officially documented as methods of the + # abstract base class, but in reality, they only exist on the children classes + def getconn(self, key: Any | None = ...): ... + def putconn(self, conn: Any, key: Any | None = ..., close: bool = ...) -> None: ... + def closeall(self) -> None: ... -class SimpleConnectionPool(AbstractConnectionPool): - getconn: Any - putconn: Any - closeall: Any +class SimpleConnectionPool(AbstractConnectionPool): ... class ThreadedConnectionPool(AbstractConnectionPool): - def __init__(self, minconn, maxconn, *args, **kwargs) -> None: ... - def getconn(self, key: Any | None = ...): ... + # This subclass has a default value for conn which doesn't exist + # in the SimpleConnectionPool class, nor in the documentation def putconn(self, conn: Any | None = ..., key: Any | None = ..., close: bool = ...) -> None: ... - def closeall(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/METADATA.toml index 098d93d36d4a..8e97d6cc0e9f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/METADATA.toml @@ -1,3 +1,2 @@ -version = "20.0" -python2 = true +version = "22.0.*" requires = ["types-cryptography"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/SSL.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/SSL.pyi index 13b618ad3524..46e723918858 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/SSL.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/SSL.pyi @@ -20,17 +20,15 @@ TLSv1_METHOD: int TLSv1_1_METHOD: int TLSv1_2_METHOD: int -# To be added in pyOpenSSL 21 +TLS_METHOD: int +TLS_SERVER_METHOD: int +TLS_CLIENT_METHOD: int -# TLS_METHOD: int -# TLS_SERVER_METHOD: int -# TLS_CLIENT_METHOD: int - -# SSL3_VERSION: int -# TLS1_VERSION: int -# TLS1_1_VERSION: int -# TLS1_2_VERSION: int -# TLS1_3_VERSION: int +SSL3_VERSION: int +TLS1_VERSION: int +TLS1_1_VERSION: int +TLS1_2_VERSION: int +TLS1_3_VERSION: int OP_NO_SSLv2: int OP_NO_SSLv3: int @@ -113,9 +111,9 @@ class SysCallError(Error): ... def SSLeay_version(type: int) -> str: ... -class Session(object): ... +class Session: ... -class Connection(object): +class Connection: def __getattr__(self, name: str) -> Any: ... # incomplete def __init__(self, context: Context, socket: socket.socket | None = ...) -> None: ... def connect(self, addr: str | bytes | Sequence[str | int]) -> None: ... @@ -136,7 +134,7 @@ class Connection(object): def get_app_data(self) -> Any: ... def set_app_data(self, data: Any) -> None: ... -class Context(object): +class Context: def __getattr__(self, name: str) -> Any: ... # incomplete def __init__(self, method: int) -> None: ... def load_verify_locations(self, cafile: str | None, capath: str | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/crypto.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/crypto.pyi index ed0cda54eb26..9ffaa7412d4a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/crypto.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyOpenSSL/OpenSSL/crypto.pyi @@ -1,5 +1,6 @@ +from _typeshed import StrOrBytesPath from datetime import datetime -from typing import Any, Callable, Iterable, Sequence, Set, Text, Tuple, Union +from typing import Any, Callable, Iterable, Sequence, Union from cryptography.hazmat.primitives.asymmetric.dsa import DSAPrivateKey, DSAPublicKey from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey @@ -30,28 +31,28 @@ class PKey: def type(self) -> int: ... class X509Name: - countryName: Text - C: Text - stateOrProvinceName: Text - ST: Text - localityName: Text - L: Text - organizationName: Text - O: Text - organizationalUnitName: Text - OU: Text - commonName: Text - CN: Text - emailAddress: Text + countryName: str + C: str + stateOrProvinceName: str + ST: str + localityName: str + L: str + organizationName: str + O: str + organizationalUnitName: str + OU: str + commonName: str + CN: str + emailAddress: str def __init__(self, name: X509Name) -> None: ... def der(self) -> bytes: ... - def get_components(self) -> list[Tuple[bytes, bytes]]: ... + def get_components(self) -> list[tuple[bytes, bytes]]: ... def hash(self) -> int: ... class X509: def __init__(self) -> None: ... def add_extensions(self, extensions: Iterable[X509Extension]) -> None: ... - def digest(self, digest_name: bytes) -> bytes: ... + def digest(self, digest_name: str) -> bytes: ... @classmethod def from_cryptography(cls, crypto_cert: Certificate) -> X509: ... def get_extension(self, index: int) -> X509Extension: ... @@ -74,7 +75,7 @@ class X509: def set_serial_number(self, serial: int) -> None: ... def set_subject(self, subject: X509Name) -> None: ... def set_version(self, version: int) -> None: ... - def sign(self, pkey: PKey, digest: Text | bytes) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... def subject_name_hash(self) -> bytes: ... def to_cryptography(self) -> Certificate: ... @@ -89,7 +90,7 @@ class X509Req: def get_version(self) -> int: ... def set_pubkey(self, pkey: PKey) -> None: ... def set_version(self, version: int) -> None: ... - def sign(self, pkey: PKey, digest: Text | bytes) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... def to_cryptography(self) -> CertificateSigningRequest: ... def verify(self, pkey: PKey) -> bool: ... @@ -118,7 +119,7 @@ class CRL: @classmethod def from_cryptography(cls, crypto_crl: CertificateRevocationList) -> CRL: ... def get_issuer(self) -> X509Name: ... - def get_revoked(self) -> Tuple[Revoked, ...]: ... + def get_revoked(self) -> tuple[Revoked, ...]: ... def set_lastUpdate(self, when: bytes) -> None: ... def set_nextUpdate(self, when: bytes) -> None: ... def set_version(self, version: int) -> None: ... @@ -129,7 +130,7 @@ class X509Store: def __init__(self) -> None: ... def add_cert(self, cert: X509) -> None: ... def add_crl(self, crl: CRL) -> None: ... - def load_locations(self, cafile: Text | bytes, capath: Text | bytes | None = ...) -> None: ... + def load_locations(self, cafile: StrOrBytesPath, capath: StrOrBytesPath | None = ...) -> None: ... def set_flags(self, flags: int) -> None: ... def set_time(self, vfy_time: datetime) -> None: ... @@ -141,7 +142,7 @@ class X509StoreContext: class X509StoreContextError(Exception): certificate: X509 - def __init__(self, message: Text | bytes, certificate: X509) -> None: ... + def __init__(self, message: str | bytes, certificate: X509) -> None: ... class X509StoreFlags: CRL_CHECK: int @@ -157,7 +158,7 @@ class X509StoreFlags: CB_ISSUER_CHECK: int class PKCS7: - def get_type_name(self) -> Text: ... + def get_type_name(self) -> str: ... def type_is_data(self) -> bool: ... def type_is_enveloped(self) -> bool: ... def type_is_signed(self) -> bool: ... @@ -166,7 +167,7 @@ class PKCS7: class PKCS12: def __init__(self) -> None: ... def export(self, passphrase: bytes | None = ..., iter: int = ..., maciter: int = ...) -> bytes: ... - def get_ca_certificates(self) -> Tuple[X509, ...]: ... + def get_ca_certificates(self) -> tuple[X509, ...]: ... def get_certificate(self) -> X509: ... def get_friendlyname(self) -> bytes | None: ... def get_privatekey(self) -> PKey: ... @@ -180,24 +181,24 @@ class NetscapeSPKI: def b64_encode(self) -> bytes: ... def get_pubkey(self) -> PKey: ... def set_pubkey(self, pkey: PKey) -> None: ... - def sign(self, pkey: PKey, digest: bytes) -> None: ... + def sign(self, pkey: PKey, digest: str) -> None: ... def verify(self, key: PKey) -> bool: ... -def get_elliptic_curves() -> Set[_EllipticCurve]: ... -def get_elliptic_curve(name: Text) -> _EllipticCurve: ... +def get_elliptic_curves() -> set[_EllipticCurve]: ... +def get_elliptic_curve(name: str) -> _EllipticCurve: ... def dump_certificate(type: int, cert: X509) -> bytes: ... def load_certificate(type: int, buffer: bytes) -> X509: ... def dump_certificate_request(type: int, req: X509Req) -> bytes: ... def load_certificate_request(type: int, buffer: bytes) -> X509Req: ... def dump_privatekey( - type: int, pkey: PKey, cipher: bytes | None = ..., passphrase: bytes | Callable[[], bytes] | None = ... + type: int, pkey: PKey, cipher: str | None = ..., passphrase: bytes | Callable[[], bytes] | None = ... ) -> bytes: ... -def load_privatekey(type: int, buffer: Text | bytes, passphrase: bytes | Callable[[], bytes] | None = ...) -> PKey: ... +def load_privatekey(type: int, buffer: str | bytes, passphrase: bytes | Callable[[], bytes] | None = ...) -> PKey: ... def dump_publickey(type: int, pkey: PKey) -> bytes: ... -def load_publickey(type: int, buffer: Text | bytes) -> PKey: ... +def load_publickey(type: int, buffer: str | bytes) -> PKey: ... def dump_crl(type: int, crl: CRL) -> bytes: ... -def load_crl(type: int, buffer: Text | bytes) -> CRL: ... -def load_pkcs7_data(type: int, buffer: Text | bytes) -> PKCS7: ... -def load_pkcs12(buffer: Text | bytes, passphrase: bytes | None = ...) -> PKCS12: ... -def sign(pkey: PKey, data: Text | bytes, digest: Text | bytes) -> bytes: ... -def verify(cert: X509, signature: bytes, data: Text | bytes, digest: Text | bytes) -> None: ... +def load_crl(type: int, buffer: str | bytes) -> CRL: ... +def load_pkcs7_data(type: int, buffer: str | bytes) -> PKCS7: ... +def load_pkcs12(buffer: str | bytes, passphrase: bytes | None = ...) -> PKCS12: ... +def sign(pkey: PKey, data: str | bytes, digest: str) -> bytes: ... +def verify(cert: X509, signature: bytes, data: str | bytes, digest: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyRFC3339/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyRFC3339/METADATA.toml index 5f1541084942..bad265e4fe3f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyRFC3339/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyRFC3339/METADATA.toml @@ -1 +1 @@ -version = "0.1" +version = "1.1" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/METADATA.toml index 0291bda1316a..c31d59608950 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/METADATA.toml @@ -1,2 +1,3 @@ -version = "0.2" +version = "0.2.*" python2 = true +stubtest_apt_dependencies = ["portaudio19-dev"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/pyaudio.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/pyaudio.pyi index 6849056b1d47..6bce54a36e5f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/pyaudio.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyaudio/pyaudio.pyi @@ -1,68 +1,68 @@ -from typing import Callable, Mapping, Optional, Sequence, Tuple, Union +from typing import Callable, Mapping, Optional, Sequence, Union from typing_extensions import Final -paFloat32: Final[int] = ... -paInt32: Final[int] = ... -paInt24: Final[int] = ... -paInt16: Final[int] = ... -paInt8: Final[int] = ... -paUInt8: Final[int] = ... -paCustomFormat: Final[int] = ... +paFloat32: Final[int] +paInt32: Final[int] +paInt24: Final[int] +paInt16: Final[int] +paInt8: Final[int] +paUInt8: Final[int] +paCustomFormat: Final[int] -paInDevelopment: Final[int] = ... -paDirectSound: Final[int] = ... -paMME: Final[int] = ... -paASIO: Final[int] = ... -paSoundManager: Final[int] = ... -paCoreAudio: Final[int] = ... -paOSS: Final[int] = ... -paALSA: Final[int] = ... -paAL: Final[int] = ... -paBeOS: Final[int] = ... -paWDMKS: Final[int] = ... -paJACK: Final[int] = ... -paWASAPI: Final[int] = ... -paNoDevice: Final[int] = ... +paInDevelopment: Final[int] +paDirectSound: Final[int] +paMME: Final[int] +paASIO: Final[int] +paSoundManager: Final[int] +paCoreAudio: Final[int] +paOSS: Final[int] +paALSA: Final[int] +paAL: Final[int] +paBeOS: Final[int] +paWDMKS: Final[int] +paJACK: Final[int] +paWASAPI: Final[int] +paNoDevice: Final[int] -paNoError: Final[int] = ... -paNotInitialized: Final[int] = ... -paUnanticipatedHostError: Final[int] = ... -paInvalidChannelCount: Final[int] = ... -paInvalidSampleRate: Final[int] = ... -paInvalidDevice: Final[int] = ... -paInvalidFlag: Final[int] = ... -paSampleFormatNotSupported: Final[int] = ... -paBadIODeviceCombination: Final[int] = ... -paInsufficientMemory: Final[int] = ... -paBufferTooBig: Final[int] = ... -paBufferTooSmall: Final[int] = ... -paNullCallback: Final[int] = ... -paBadStreamPtr: Final[int] = ... -paTimedOut: Final[int] = ... -paInternalError: Final[int] = ... -paDeviceUnavailable: Final[int] = ... -paIncompatibleHostApiSpecificStreamInfo: Final[int] = ... -paStreamIsStopped: Final[int] = ... -paStreamIsNotStopped: Final[int] = ... -paInputOverflowed: Final[int] = ... -paOutputUnderflowed: Final[int] = ... -paHostApiNotFound: Final[int] = ... -paInvalidHostApi: Final[int] = ... -paCanNotReadFromACallbackStream: Final[int] = ... -paCanNotWriteToACallbackStream: Final[int] = ... -paCanNotReadFromAnOutputOnlyStream: Final[int] = ... -paCanNotWriteToAnInputOnlyStream: Final[int] = ... -paIncompatibleStreamHostApi: Final[int] = ... +paNoError: Final[int] +paNotInitialized: Final[int] +paUnanticipatedHostError: Final[int] +paInvalidChannelCount: Final[int] +paInvalidSampleRate: Final[int] +paInvalidDevice: Final[int] +paInvalidFlag: Final[int] +paSampleFormatNotSupported: Final[int] +paBadIODeviceCombination: Final[int] +paInsufficientMemory: Final[int] +paBufferTooBig: Final[int] +paBufferTooSmall: Final[int] +paNullCallback: Final[int] +paBadStreamPtr: Final[int] +paTimedOut: Final[int] +paInternalError: Final[int] +paDeviceUnavailable: Final[int] +paIncompatibleHostApiSpecificStreamInfo: Final[int] +paStreamIsStopped: Final[int] +paStreamIsNotStopped: Final[int] +paInputOverflowed: Final[int] +paOutputUnderflowed: Final[int] +paHostApiNotFound: Final[int] +paInvalidHostApi: Final[int] +paCanNotReadFromACallbackStream: Final[int] +paCanNotWriteToACallbackStream: Final[int] +paCanNotReadFromAnOutputOnlyStream: Final[int] +paCanNotWriteToAnInputOnlyStream: Final[int] +paIncompatibleStreamHostApi: Final[int] -paContinue: Final[int] = ... -paComplete: Final[int] = ... -paAbort: Final[int] = ... +paContinue: Final[int] +paComplete: Final[int] +paAbort: Final[int] -paInputUnderflow: Final[int] = ... -paInputOverflow: Final[int] = ... -paOutputUnderflow: Final[int] = ... -paOutputOverflow: Final[int] = ... -paPrimingOutput: Final[int] = ... +paInputUnderflow: Final[int] +paInputOverflow: Final[int] +paOutputUnderflow: Final[int] +paOutputOverflow: Final[int] +paPrimingOutput: Final[int] paMacCoreStreamInfo: PaMacCoreStreamInfo @@ -70,7 +70,7 @@ paMacCoreStreamInfo: PaMacCoreStreamInfo _ChannelMap = Sequence[int] _PaHostApiInfo = Mapping[str, Union[str, int]] _PaDeviceInfo = Mapping[str, Union[str, int, float]] -_StreamCallback = Callable[[Optional[bytes], int, Mapping[str, float], int], Tuple[Optional[bytes], int]] +_StreamCallback = Callable[[Optional[bytes], int, Mapping[str, float], int], tuple[Optional[bytes], int]] def get_format_from_width(width: int, unsigned: bool = ...) -> int: ... def get_portaudio_version() -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/METADATA.toml index 31f638bf33df..354f2a0fc08c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.1" -python2 = true +version = "7.44.*" +stubtest_apt_dependencies = ["libcurl4-openssl-dev"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi index dc17860278cb..c1f0bdcfc940 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pycurl/pycurl.pyi @@ -1,6 +1,5 @@ -# TODO(MichalPokorny): more precise types - -from typing import Any, Text, Tuple +from typing import Any, Text +from typing_extensions import final GLOBAL_ACK_EINTR: int GLOBAL_ALL: int @@ -14,11 +13,12 @@ def global_cleanup() -> None: ... version: str -def version_info() -> Tuple[int, str, int, str, int, str, int, str, Tuple[str, ...], Any, int, Any]: ... +def version_info() -> tuple[int, str, int, str, int, str, int, str, tuple[str, ...], Any, int, Any]: ... class error(Exception): ... -class Curl(object): +@final +class Curl: def close(self) -> None: ... def setopt(self, option: int, value: Any) -> None: ... def setopt_string(self, option: int, value: str) -> None: ... @@ -34,17 +34,19 @@ class Curl(object): # TODO(MichalPokorny): wat? USERPWD: int -class CurlMulti(object): +@final +class CurlMulti: def close(self) -> None: ... def add_handle(self, obj: Curl) -> None: ... def remove_handle(self, obj: Curl) -> None: ... - def perform(self) -> Tuple[Any, int]: ... - def fdset(self) -> Tuple[list[Any], list[Any], list[Any]]: ... + def perform(self) -> tuple[Any, int]: ... + def fdset(self) -> tuple[list[Any], list[Any], list[Any]]: ... def select(self, timeout: float = ...) -> int: ... - def info_read(self, max_objects: int = ...) -> Tuple[int, list[Any], list[Any]]: ... - def socket_action(self, sockfd: int, ev_bitmask: int) -> Tuple[int, int]: ... + def info_read(self, max_objects: int = ...) -> tuple[int, list[Any], list[Any]]: ... + def socket_action(self, sockfd: int, ev_bitmask: int) -> tuple[int, int]: ... -class CurlShare(object): +@final +class CurlShare: def close(self) -> None: ... def setopt(self, option: int, value: Any) -> Any: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyfarmhash/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyfarmhash/METADATA.toml index 7c969026c3f0..a42da251bed5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyfarmhash/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyfarmhash/METADATA.toml @@ -1 +1 @@ -version = "0.2" +version = "0.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/METADATA.toml new file mode 100644 index 000000000000..bcb78f17fb41 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/METADATA.toml @@ -0,0 +1 @@ +version = "2.4.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/__init__.pyi new file mode 100644 index 000000000000..bda5b5a7f4cc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/__init__.pyi @@ -0,0 +1 @@ +__version__: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/api.pyi new file mode 100644 index 000000000000..dcc9f7e6ac74 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/api.pyi @@ -0,0 +1,15 @@ +from collections.abc import Iterable, Iterator, Sequence +from typing import Any, Pattern + +from pyflakes.reporter import Reporter + +__all__ = ["check", "checkPath", "checkRecursive", "iterSourceCode", "main"] + +PYTHON_SHEBANG_REGEX: Pattern[bytes] + +def check(codeString: str, filename: str, reporter: Reporter | None = ...) -> int: ... +def checkPath(filename, reporter: Reporter | None = ...) -> int: ... +def isPythonFile(filename) -> bool: ... +def iterSourceCode(paths: Iterable[Any]) -> Iterator[Any]: ... +def checkRecursive(paths: Iterable[Any], reporter: Reporter) -> int: ... +def main(prog: str | None = ..., args: Sequence[Any] | None = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/checker.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/checker.pyi new file mode 100644 index 000000000000..833cdb9dcc8d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/checker.pyi @@ -0,0 +1,361 @@ +import ast +import sys +from collections.abc import Callable, Iterable, Iterator +from tokenize import TokenInfo +from typing import Any, ClassVar, Pattern, TypeVar, overload +from typing_extensions import Literal, ParamSpec + +from pyflakes.messages import Message + +_AnyFunction = Callable[..., Any] +_F = TypeVar("_F", bound=_AnyFunction) +_P = ParamSpec("_P") +_T = TypeVar("_T") + +PY2: bool +PY35_PLUS: bool +PY36_PLUS: bool +PY38_PLUS: bool +PYPY: bool + +def getNodeType(node_class: type[ast.AST]) -> str: ... +def get_raise_argument(node: ast.Raise) -> ast.expr | None: ... +def getAlternatives(n: ast.If | ast.Try) -> list[ast.AST]: ... + +FOR_TYPES: tuple[type[ast.For], type[ast.AsyncFor]] +LOOP_TYPES: tuple[type[ast.While], type[ast.For], type[ast.AsyncFor]] +FUNCTION_TYPES: tuple[type[ast.FunctionDef], type[ast.AsyncFunctionDef]] +ANNASSIGN_TYPES: tuple[type[ast.AnnAssign]] +TYPE_COMMENT_RE: Pattern[str] +ASCII_NON_ALNUM: str +TYPE_IGNORE_RE: Pattern[str] +TYPE_FUNC_RE: Pattern[str] +MAPPING_KEY_RE: Pattern[str] +CONVERSION_FLAG_RE: Pattern[str] +WIDTH_RE: Pattern[str] +PRECISION_RE: Pattern[str] +LENGTH_RE: Pattern[str] +VALID_CONVERSIONS: frozenset[str] + +_FormatType = tuple[str | None, str | None, str | None, str | None, str] +_PercentFormat = tuple[str, _FormatType | None] + +def parse_percent_format(s: str) -> tuple[_PercentFormat, ...]: ... + +class _FieldsOrder(dict[type[ast.AST], tuple[str, ...]]): + def __missing__(self, node_class: type[ast.AST]) -> tuple[str, ...]: ... + +def counter(items: Iterable[_T]) -> dict[_T, int]: ... + +_OmitType = str | tuple[str, ...] | None + +def iter_child_nodes(node: ast.AST, omit: _OmitType = ..., _fields_order: _FieldsOrder = ...) -> Iterator[ast.AST]: ... +@overload +def convert_to_value(item: ast.Str) -> str: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Bytes) -> bytes: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Tuple) -> tuple[Any, ...]: ... # type: ignore[misc] +@overload +def convert_to_value(item: ast.Name | ast.NameConstant) -> Any: ... +@overload +def convert_to_value(item: ast.AST) -> UnhandledKeyType: ... +def is_notimplemented_name_node(node: object) -> bool: ... + +class Binding: + name: str + source: ast.AST | None + used: Literal[False] | tuple[Any, ast.AST] + def __init__(self, name: str, source: ast.AST | None) -> None: ... + def redefines(self, other: Binding) -> bool: ... + +class Definition(Binding): ... + +class Builtin(Definition): + def __init__(self, name: str) -> None: ... + +class UnhandledKeyType: ... + +class VariableKey: + name: str + def __init__(self, item: ast.Name) -> None: ... + def __eq__(self, compare: object) -> bool: ... + def __hash__(self) -> int: ... + +class Importation(Definition): + fullName: str + redefined: list[Any] + def __init__(self, name: str, source: ast.AST | None, full_name: str | None = ...) -> None: ... + @property + def source_statement(self) -> str: ... + +class SubmoduleImportation(Importation): + def __init__(self, name: str, source: ast.Import | None) -> None: ... + +class ImportationFrom(Importation): + module: str + real_name: str + def __init__(self, name: str, source: ast.AST, module: str, real_name: str | None = ...) -> None: ... + +class StarImportation(Importation): + def __init__(self, name: str, source: ast.AST) -> None: ... + +class FutureImportation(ImportationFrom): + used: tuple[Any, ast.AST] + def __init__(self, name: str, source: ast.AST, scope) -> None: ... + +class Argument(Binding): ... +class Assignment(Binding): ... + +class Annotation(Binding): + def redefines(self, other: Binding) -> Literal[False]: ... + +class FunctionDefinition(Definition): ... +class ClassDefinition(Definition): ... + +class ExportBinding(Binding): + names: list[str] + def __init__(self, name: str, source: ast.AST, scope: Scope) -> None: ... + +class Scope(dict[str, Binding]): + importStarred: bool + +class ClassScope(Scope): ... + +class FunctionScope(Scope): + usesLocals: bool + alwaysUsed: ClassVar[set[str]] + globals: set[str] + returnValue: Any + isGenerator: bool + def __init__(self) -> None: ... + def unusedAssignments(self) -> Iterator[tuple[str, Binding]]: ... + +class GeneratorScope(Scope): ... +class ModuleScope(Scope): ... +class DoctestScope(ModuleScope): ... + +class DummyNode: + lineno: int + col_offset: int + def __init__(self, lineno: int, col_offset: int) -> None: ... + +class DetectClassScopedMagic: + names: list[str] + +def getNodeName(node: ast.AST) -> str: ... + +TYPING_MODULES: frozenset[Literal["typing", "typing_extensions"]] + +def is_typing_overload(value: Binding, scope_stack) -> bool: ... + +class AnnotationState: + NONE: ClassVar[Literal[0]] + STRING: ClassVar[Literal[1]] + BARE: ClassVar[Literal[2]] + +def in_annotation(func: _F) -> _F: ... +def in_string_annotation(func: _F) -> _F: ... +def make_tokens(code: str | bytes) -> tuple[TokenInfo, ...]: ... + +if sys.version_info >= (3, 8): + _NamedExpr = ast.NamedExpr +else: + _NamedExpr = Any + +if sys.version_info >= (3, 10): + _Match = ast.Match + _MatchCase = ast.match_case + _MatchValue = ast.MatchValue + _MatchSingleton = ast.MatchSingleton + _MatchSequence = ast.MatchSequence + _MatchStar = ast.MatchStar + _MatchMapping = ast.MatchMapping + _MatchClass = ast.MatchClass + _MatchAs = ast.MatchAs + _MatchOr = ast.MatchOr +else: + _Match = Any + _MatchCase = Any + _MatchValue = Any + _MatchSingleton = Any + _MatchSequence = Any + _MatchStar = Any + _MatchMapping = Any + _MatchClass = Any + _MatchAs = Any + _MatchOr = Any + +class Checker: + nodeDepth: int + offset: tuple[int, int] | None + builtIns: set[str] + deadScopes: list[Any] + messages: list[Any] + filename: str + withDoctest: bool + scopeStack: list[Scope] + exceptHandlers: list[Any] + root: ast.AST + def __init__( + self, + tree: ast.AST, + filename: str = ..., + builtins: Iterable[str] | None = ..., + withDoctest: bool = ..., + file_tokens: tuple[Any, ...] = ..., + ) -> None: ... + def deferFunction(self, callable: _AnyFunction) -> None: ... + def deferAssignment(self, callable: _AnyFunction) -> None: ... + def runDeferred(self, deferred: _AnyFunction) -> None: ... + @property + def futuresAllowed(self) -> bool: ... + @futuresAllowed.setter + def futuresAllowed(self, value: Literal[False]) -> None: ... + @property + def annotationsFutureEnabled(self) -> bool: ... + @annotationsFutureEnabled.setter + def annotationsFutureEnabled(self, value: Literal[True]) -> None: ... + @property + def scope(self) -> Scope: ... + def popScope(self) -> None: ... + def checkDeadScopes(self) -> None: ... + def pushScope(self, scopeClass: type[Scope] = ...) -> None: ... + def report(self, messageClass: Callable[_P, Message], *args: _P.args, **kwargs: _P.kwargs) -> None: ... + def getParent(self, node: ast.AST) -> ast.AST: ... + def getCommonAncestor(self, lnode: ast.AST, rnode: ast.AST, stop: ast.AST) -> ast.AST: ... + def descendantOf(self, node: ast.AST, ancestors: ast.AST, stop: ast.AST) -> bool: ... + def getScopeNode(self, node: ast.AST) -> ast.AST | None: ... + def differentForks(self, lnode: ast.AST, rnode: ast.AST) -> bool: ... + def addBinding(self, node: ast.AST, value: Binding) -> None: ... + def getNodeHandler(self, node_class: type[ast.AST]): ... + def handleNodeLoad(self, node: ast.AST) -> None: ... + def handleNodeStore(self, node: ast.AST) -> None: ... + def handleNodeDelete(self, node: ast.AST) -> None: ... + def handleChildren(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def isLiteralTupleUnpacking(self, node: ast.AST) -> bool | None: ... + def isDocstring(self, node: ast.AST) -> bool: ... + def getDocstring(self, node: ast.AST) -> tuple[str, int] | tuple[None, None]: ... + def handleNode(self, node: ast.AST | None, parent) -> None: ... + def handleDoctests(self, node: ast.AST) -> None: ... + def handleStringAnnotation(self, s: str, node: ast.AST, ref_lineno: int, ref_col_offset: int, err: type[Message]) -> None: ... + def handleAnnotation(self, annotation: ast.AST, node: ast.AST) -> None: ... + def ignore(self, node: ast.AST) -> None: ... + def DELETE(self, tree: ast.Delete, omit: _OmitType = ...) -> None: ... + def PRINT(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def FOR(self, tree: ast.For, omit: _OmitType = ...) -> None: ... + def ASYNCFOR(self, tree: ast.AsyncFor, omit: _OmitType = ...) -> None: ... + def WHILE(self, tree: ast.While, omit: _OmitType = ...) -> None: ... + def WITH(self, tree: ast.With, omit: _OmitType = ...) -> None: ... + def WITHITEM(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def ASYNCWITH(self, tree: ast.AsyncWith, omit: _OmitType = ...) -> None: ... + def ASYNCWITHITEM(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def TRYFINALLY(self, tree: ast.Try, omit: _OmitType = ...) -> None: ... + def EXEC(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def EXPR(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def ASSIGN(self, tree: ast.Assign, omit: _OmitType = ...) -> None: ... + def PASS(self, node: ast.AST) -> None: ... + def BOOLOP(self, tree: ast.BoolOp, omit: _OmitType = ...) -> None: ... + def UNARYOP(self, tree: ast.UnaryOp, omit: _OmitType = ...) -> None: ... + def SET(self, tree: ast.Set, omit: _OmitType = ...) -> None: ... + def REPR(self, tree: ast.AST, omit: _OmitType = ...) -> None: ... + def ATTRIBUTE(self, tree: ast.Attribute, omit: _OmitType = ...) -> None: ... + def STARRED(self, tree: ast.Starred, omit: _OmitType = ...) -> None: ... + def NAMECONSTANT(self, tree: ast.NameConstant, omit: _OmitType = ...) -> None: ... + def NAMEDEXPR(self, tree: _NamedExpr, omit: _OmitType = ...) -> None: ... + def SUBSCRIPT(self, node: ast.Subscript) -> None: ... + def CALL(self, node: ast.Call) -> None: ... + def BINOP(self, node: ast.BinOp) -> None: ... + def CONSTANT(self, node: ast.Constant) -> None: ... + if sys.version_info < (3, 8): + def NUM(self, node: ast.Num) -> None: ... + def BYTES(self, node: ast.Bytes) -> None: ... + def ELLIPSIS(self, node: ast.Ellipsis) -> None: ... + + def STR(self, node: ast.Str) -> None: ... + def SLICE(self, tree: ast.Slice, omit: _OmitType = ...) -> None: ... + def EXTSLICE(self, tree: ast.ExtSlice, omit: _OmitType = ...) -> None: ... + def INDEX(self, tree: ast.Index, omit: _OmitType = ...) -> None: ... + def LOAD(self, node: ast.Load) -> None: ... + def STORE(self, node: ast.Store) -> None: ... + def DEL(self, node: ast.Del) -> None: ... + def AUGLOAD(self, node: ast.AugLoad) -> None: ... + def AUGSTORE(self, node: ast.AugStore) -> None: ... + def PARAM(self, node: ast.Param) -> None: ... + def AND(self, node: ast.And) -> None: ... + def OR(self, node: ast.Or) -> None: ... + def ADD(self, node: ast.Add) -> None: ... + def SUB(self, node: ast.Sub) -> None: ... + def MULT(self, node: ast.Mult) -> None: ... + def DIV(self, node: ast.Div) -> None: ... + def MOD(self, node: ast.Mod) -> None: ... + def POW(self, node: ast.Pow) -> None: ... + def LSHIFT(self, node: ast.LShift) -> None: ... + def RSHIFT(self, node: ast.RShift) -> None: ... + def BITOR(self, node: ast.BitOr) -> None: ... + def BITXOR(self, node: ast.BitXor) -> None: ... + def BITAND(self, node: ast.BitAnd) -> None: ... + def FLOORDIV(self, node: ast.FloorDiv) -> None: ... + def INVERT(self, node: ast.Invert) -> None: ... + def NOT(self, node: ast.Not) -> None: ... + def UADD(self, node: ast.UAdd) -> None: ... + def USUB(self, node: ast.USub) -> None: ... + def EQ(self, node: ast.Eq) -> None: ... + def NOTEQ(self, node: ast.NotEq) -> None: ... + def LT(self, node: ast.Lt) -> None: ... + def LTE(self, node: ast.LtE) -> None: ... + def GT(self, node: ast.Gt) -> None: ... + def GTE(self, node: ast.GtE) -> None: ... + def IS(self, node: ast.Is) -> None: ... + def ISNOT(self, node: ast.IsNot) -> None: ... + def IN(self, node: ast.In) -> None: ... + def NOTIN(self, node: ast.NotIn) -> None: ... + def MATMULT(self, node: ast.MatMult) -> None: ... + def RAISE(self, node: ast.Raise) -> None: ... + def COMPREHENSION(self, tree: ast.comprehension, omit: _OmitType = ...) -> None: ... + def KEYWORD(self, tree: ast.keyword, omit: _OmitType = ...) -> None: ... + def FORMATTEDVALUE(self, tree: ast.FormattedValue, omit: _OmitType = ...) -> None: ... + def JOINEDSTR(self, node: ast.AST) -> None: ... + def DICT(self, node: ast.Dict) -> None: ... + def IF(self, node: ast.If) -> None: ... + def IFEXP(self, node: ast.If) -> None: ... + def ASSERT(self, node: ast.Assert) -> None: ... + def GLOBAL(self, node: ast.Global) -> None: ... + def NONLOCAL(self, node: ast.Nonlocal) -> None: ... + def GENERATOREXP(self, node: ast.GeneratorExp) -> None: ... + def LISTCOMP(self, node: ast.ListComp) -> None: ... + def DICTCOMP(self, node: ast.DictComp) -> None: ... + def SETCOMP(self, node: ast.SetComp) -> None: ... + def NAME(self, node: ast.Name) -> None: ... + def CONTINUE(self, node: ast.Continue) -> None: ... + def BREAK(self, node: ast.Break) -> None: ... + def RETURN(self, node: ast.Return) -> None: ... + def YIELD(self, node: ast.Yield) -> None: ... + def AWAIT(self, node: ast.Await) -> None: ... + def YIELDFROM(self, node: ast.YieldFrom) -> None: ... + def FUNCTIONDEF(self, node: ast.FunctionDef) -> None: ... + def ASYNCFUNCTIONDEF(self, node: ast.AsyncFunctionDef) -> None: ... + def LAMBDA(self, node: ast.Lambda) -> None: ... + def ARGUMENTS(self, node: ast.arguments) -> None: ... + def ARG(self, node: ast.arg) -> None: ... + def CLASSDEF(self, node: ast.ClassDef): ... + def AUGASSIGN(self, node: ast.AugAssign) -> None: ... + def TUPLE(self, node: ast.Tuple) -> None: ... + def LIST(self, node: ast.List) -> None: ... + def IMPORT(self, node: ast.Import) -> None: ... + def IMPORTFROM(self, node: ast.ImportFrom) -> None: ... + def TRY(self, node: ast.Try) -> None: ... + def TRYEXCEPT(self, node: ast.Try) -> None: ... + def EXCEPTHANDLER(self, node: ast.ExceptHandler) -> None: ... + def ANNASSIGN(self, node: ast.AnnAssign) -> None: ... + def COMPARE(self, node: ast.Compare) -> None: ... + def MATCH(self, tree: _Match, omit: _OmitType = ...) -> None: ... + def MATCH_CASE(self, tree: _MatchCase, omit: _OmitType = ...) -> None: ... + def MATCHCLASS(self, tree: _MatchClass, omit: _OmitType = ...) -> None: ... + def MATCHOR(self, tree: _MatchOr, omit: _OmitType = ...) -> None: ... + def MATCHSEQUENCE(self, tree: _MatchSequence, omit: _OmitType = ...) -> None: ... + def MATCHSINGLETON(self, tree: _MatchSingleton, omit: _OmitType = ...) -> None: ... + def MATCHVALUE(self, tree: _MatchValue, omit: _OmitType = ...) -> None: ... + def MATCHAS(self, node: _MatchAs) -> None: ... + def MATCHMAPPING(self, node: _MatchMapping) -> None: ... + def MATCHSTAR(self, node: _MatchStar) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/messages.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/messages.pyi new file mode 100644 index 000000000000..b641ee765817 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/messages.pyi @@ -0,0 +1,149 @@ +import ast +from typing import Any, ClassVar + +class Message: + message: ClassVar[str] + message_args: tuple[Any, ...] + filename: Any + lineno: int + col: int + def __init__(self, filename, loc: ast.AST) -> None: ... + +class UnusedImport(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class RedefinedWhileUnused(Message): + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class RedefinedInListComp(Message): + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class ImportShadowedByLoopVar(Message): + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class ImportStarNotPermitted(Message): + message_args: Any + def __init__(self, filename, loc, modname) -> None: ... + +class ImportStarUsed(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, modname) -> None: ... + +class ImportStarUsage(Message): + message_args: tuple[Any, Any] + def __init__(self, filename, loc: ast.AST, name, from_list) -> None: ... + +class UndefinedName(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class DoctestSyntaxError(Message): + message_args: tuple[()] + def __init__(self, filename, loc: ast.AST, position: tuple[int, int] | None = ...) -> None: ... + +class UndefinedExport(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class UndefinedLocal(Message): + default: ClassVar[str] + builtin: ClassVar[str] + message_args: tuple[Any, int] + def __init__(self, filename, loc: ast.AST, name, orig_loc: ast.AST) -> None: ... + +class DuplicateArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class MultiValueRepeatedKeyLiteral(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, key) -> None: ... + +class MultiValueRepeatedKeyVariable(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, key) -> None: ... + +class LateFutureImport(Message): + message_args: tuple[()] + def __init__(self, filename, loc: ast.AST, names) -> None: ... + +class FutureFeatureNotDefined(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, name) -> None: ... + +class UnusedVariable(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, names) -> None: ... + +class ReturnWithArgsInsideGenerator(Message): ... +class ReturnOutsideFunction(Message): ... +class YieldOutsideFunction(Message): ... +class ContinueOutsideLoop(Message): ... +class BreakOutsideLoop(Message): ... +class ContinueInFinally(Message): ... +class DefaultExceptNotLast(Message): ... +class TwoStarredExpressions(Message): ... +class TooManyExpressionsInStarredAssignment(Message): ... +class IfTuple(Message): ... +class AssertTuple(Message): ... + +class ForwardAnnotationSyntaxError(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, annotation) -> None: ... + +class CommentAnnotationSyntaxError(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, annotation) -> None: ... + +class RaiseNotImplemented(Message): ... +class InvalidPrintSyntax(Message): ... +class IsLiteral(Message): ... +class FStringMissingPlaceholders(Message): ... + +class StringDotFormatExtraPositionalArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_positions) -> None: ... + +class StringDotFormatExtraNamedArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + +class StringDotFormatMissingArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + +class StringDotFormatMixingAutomatic(Message): ... + +class StringDotFormatInvalidFormat(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, error) -> None: ... + +class PercentFormatInvalidFormat(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, error) -> None: ... + +class PercentFormatMixedPositionalAndNamed(Message): ... + +class PercentFormatUnsupportedFormatCharacter(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, c) -> None: ... + +class PercentFormatPositionalCountMismatch(Message): + message_args: tuple[int, int] + def __init__(self, filename, loc: ast.AST, n_placeholders: int, n_substitutions: int) -> None: ... + +class PercentFormatExtraNamedArguments(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, extra_keywords) -> None: ... + +class PercentFormatMissingArgument(Message): + message_args: tuple[Any] + def __init__(self, filename, loc: ast.AST, missing_arguments) -> None: ... + +class PercentFormatExpectedMapping(Message): ... +class PercentFormatExpectedSequence(Message): ... +class PercentFormatStarRequiresSequence(Message): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/reporter.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/reporter.pyi new file mode 100644 index 000000000000..5b15ee962578 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyflakes/pyflakes/reporter.pyi @@ -0,0 +1,5 @@ +class Reporter: + def __init__(self, warningStream, errorStream) -> None: ... + def unexpectedError(self, filename, msg) -> None: ... + def syntaxError(self, filename, msg, lineno, offset, text) -> None: ... + def flake(self, message) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pynput/METADATA.toml new file mode 100644 index 000000000000..0e0a8a353b36 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.7.*" +stubtest = false # A display server (e.g. X11) is required to import pynput diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/__init__.pyi new file mode 100644 index 000000000000..1b92738f9891 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/__init__.pyi @@ -0,0 +1 @@ +from . import keyboard as keyboard, mouse as mouse diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_info.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_info.pyi new file mode 100644 index 000000000000..e6655bdd2e3d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_info.pyi @@ -0,0 +1,2 @@ +__author__: str +__version__: tuple[int, int, int] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_util.pyi new file mode 100644 index 000000000000..417d00ac3e9d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/_util.pyi @@ -0,0 +1,70 @@ +import sys +import threading +from _typeshed import Self +from collections.abc import Callable +from queue import Queue +from types import ModuleType, TracebackType +from typing import Any, ClassVar, Generic, TypeVar +from typing_extensions import ParamSpec, TypedDict + +_T = TypeVar("_T") +_AbstractListener_T = TypeVar("_AbstractListener_T", bound=AbstractListener) +_P = ParamSpec("_P") + +class _RESOLUTIONS(TypedDict): + darwin: str + uinput: str + xorg: str + +RESOLUTIONS: _RESOLUTIONS + +def backend(package: str) -> ModuleType: ... +def prefix(base: type | tuple[type | tuple[Any, ...], ...], cls: type) -> str | None: ... + +class AbstractListener(threading.Thread): + class StopException(Exception): ... + _HANDLED_EXCEPTIONS: ClassVar[tuple[type | tuple[Any, ...], ...]] # undocumented + _suppress: bool # undocumented + _running: bool # undocumented + _thread: threading.Thread # undocumented + _condition: threading.Condition # undocumented + _ready: bool # undocumented + _queue: Queue[sys._OptExcInfo | None] # undocumented + daemon: bool + def __init__(self, suppress: bool = ..., **kwargs: Callable[..., bool | None] | None) -> None: ... + @property + def suppress(self) -> bool: ... + @property + def running(self) -> bool: ... + def stop(self) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def wait(self) -> None: ... + def run(self) -> None: ... + @classmethod + def _emitter(cls, f: Callable[_P, _T]) -> Callable[_P, _T]: ... # undocumented + def _mark_ready(self) -> None: ... # undocumented + def _run(self) -> None: ... # undocumented + def _stop_platform(self) -> None: ... # undocumented + def join(self, *args: Any) -> None: ... + +class Events(Generic[_T, _AbstractListener_T]): + _Listener: type[_AbstractListener_T] | None # undocumented + + class Event: + def __eq__(self, other: object) -> bool: ... + _event_queue: Queue[_T] # undocumented + _sentinel: object # undocumented + _listener: _AbstractListener_T # undocumented + start: Callable[[], None] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, *args: Any) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> _T: ... + def get(self, timeout: float | None = ...) -> _T | None: ... + def _event_mapper(self, event: Callable[_P, None]) -> Callable[_P, None]: ... + +class NotifierMixin: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/__init__.pyi new file mode 100644 index 000000000000..e470b5883909 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/__init__.pyi @@ -0,0 +1,30 @@ +from _typeshed import SupportsItems +from collections.abc import Callable +from typing import Any + +from pynput import _util + +from ._base import Controller as Controller, Key as Key, KeyCode as KeyCode, Listener as Listener + +class Events(_util.Events[Any, Listener]): + class Press(_util.Events.Event): + key: Key | KeyCode | None + def __init__(self, key: Key | KeyCode | None) -> None: ... + + class Release(_util.Events.Event): + key: Key | KeyCode | None + def __init__(self, key: Key | KeyCode | None) -> None: ... + + def __init__(self) -> None: ... + def __next__(self) -> Press | Release: ... + def get(self, timeout: float | None = ...) -> Press | Release | None: ... + +class HotKey: + def __init__(self, keys: list[KeyCode], on_activate: Callable[[], None]) -> None: ... + @staticmethod + def parse(keys: str) -> list[KeyCode]: ... + def press(self, key: Key | KeyCode) -> None: ... + def release(self, key: Key | KeyCode) -> None: ... + +class GlobalHotKeys(Listener): + def __init__(self, hotkeys: SupportsItems[str, Callable[[], None]], *args: Any, **kwargs: Any) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_base.pyi new file mode 100644 index 000000000000..6c10ba3ceb33 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_base.pyi @@ -0,0 +1,122 @@ +import contextlib +import enum +from _typeshed import Self +from collections.abc import Callable, Iterable, Iterator +from typing import Any, ClassVar + +from pynput._util import AbstractListener + +class KeyCode: + _PLATFORM_EXTENSIONS: ClassVar[Iterable[str]] # undocumented + vk: int | None + char: str | None + is_dead: bool | None + combining: str | None + def __init__(self, vk: str | None = ..., char: str | None = ..., is_dead: bool = ..., **kwargs: str) -> None: ... + def __eq__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + def join(self: Self, key: Self) -> Self: ... + @classmethod + def from_vk(cls: type[Self], vk: int, **kwargs: Any) -> Self: ... + @classmethod + def from_char(cls: type[Self], char: str, **kwargs: Any) -> Self: ... + @classmethod + def from_dead(cls: type[Self], char: str, **kwargs: Any) -> Self: ... + +class Key(enum.Enum): + alt: int + alt_l: int + alt_r: int + alt_gr: int + backspace: int + caps_lock: int + cmd: int + cmd_l: int + cmd_r: int + ctrl: int + ctrl_l: int + ctrl_r: int + delete: int + down: int + end: int + enter: int + esc: int + f1: int + f2: int + f3: int + f4: int + f5: int + f6: int + f7: int + f8: int + f9: int + f10: int + f11: int + f12: int + f13: int + f14: int + f15: int + f16: int + f17: int + f18: int + f19: int + f20: int + home: int + left: int + page_down: int + page_up: int + right: int + shift: int + shift_l: int + shift_r: int + space: int + tab: int + up: int + media_play_pause: int + media_volume_mute: int + media_volume_down: int + media_volume_up: int + media_previous: int + media_next: int + insert: int + menu: int + num_lock: int + pause: int + print_screen: int + scroll_lock: int + +class Controller: + _KeyCode: ClassVar[KeyCode] # undocumented + _Key: ClassVar[Key] # undocumented + + class InvalidKeyException(Exception): ... + class InvalidCharacterException(Exception): ... + + def __init__(self) -> None: ... + def press(self, key: str | Key | KeyCode) -> None: ... + def release(self, key: str | Key | KeyCode) -> None: ... + def tap(self, key: str | Key | KeyCode) -> None: ... + def touch(self, key: str | Key | KeyCode, is_press: bool) -> None: ... + @contextlib.contextmanager + def pressed(self, *args: str | Key | KeyCode) -> Iterator[None]: ... + def type(self, string: str) -> None: ... + @property + def modifiers(self) -> contextlib.AbstractContextManager[Iterator[set[Key]]]: ... + @property + def alt_pressed(self) -> bool: ... + @property + def alt_gr_pressed(self) -> bool: ... + @property + def ctrl_pressed(self) -> bool: ... + @property + def shift_pressed(self) -> bool: ... + +class Listener(AbstractListener): + def __init__( + self, + on_press: Callable[[Key | KeyCode | None], None] | None = ..., + on_release: Callable[[Key | KeyCode | None], None] | None = ..., + suppress: bool = ..., + **kwargs: Any, + ) -> None: ... + def canonical(self, key: Key | KeyCode) -> Key | KeyCode: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_dummy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_dummy.pyi new file mode 100644 index 000000000000..f49ca47776ea --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/keyboard/_dummy.pyi @@ -0,0 +1 @@ +from ._base import Controller as Controller, Key as Key, KeyCode as KeyCode, Listener as Listener diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/__init__.pyi new file mode 100644 index 000000000000..ad9546c844d7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/__init__.pyi @@ -0,0 +1,29 @@ +from typing import Any + +from pynput import _util + +from ._base import Button as Button, Controller as Controller, Listener as Listener + +class Events(_util.Events[Any, Listener]): + class Move(_util.Events.Event): + x: int + y: int + def __init__(self, x: int, y: int) -> None: ... + + class Click(_util.Events.Event): + x: int + y: int + button: Button + pressed: bool + def __init__(self, x: int, y: int, button: Button, pressed: bool) -> None: ... + + class Scroll(_util.Events.Event): + x: int + y: int + dx: int + dy: int + def __init__(self, x: int, y: int, dx: int, dy: int) -> None: ... + + def __init__(self) -> None: ... + def __next__(self) -> Move | Click | Scroll: ... + def get(self, timeout: float | None = ...) -> Move | Click | Scroll | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_base.pyi new file mode 100644 index 000000000000..181dea5a0ca5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_base.pyi @@ -0,0 +1,39 @@ +import enum +from _typeshed import Self +from collections.abc import Callable +from types import TracebackType +from typing import Any + +from pynput._util import AbstractListener + +class Button(enum.Enum): + unknown: int + left: int + middle: int + right: int + +class Controller: + def __init__(self) -> None: ... + @property + def position(self) -> tuple[int, int]: ... + @position.setter + def position(self, position: tuple[int, int]) -> None: ... + def scroll(self, dx: int, dy: int) -> None: ... + def press(self, button: Button) -> None: ... + def release(self, button: Button) -> None: ... + def move(self, dx: int, dy: int) -> None: ... + def click(self, button: Button, count: int = ...) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +class Listener(AbstractListener): + def __init__( + self, + on_move: Callable[[int, int], bool | None] | None = ..., + on_click: Callable[[int, int, Button, bool], bool | None] | None = ..., + on_scroll: Callable[[int, int, int, int], bool | None] | None = ..., + suppress: bool = ..., + **kwargs: Any, + ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_dummy.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_dummy.pyi new file mode 100644 index 000000000000..c799f582b620 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pynput/pynput/mouse/_dummy.pyi @@ -0,0 +1 @@ +from ._base import Button as Button, Controller as Controller, Listener as Listener diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pysftp/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pysftp/METADATA.toml index c9b2e2d3b78b..e3748bd7b73d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pysftp/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pysftp/METADATA.toml @@ -1,3 +1,3 @@ -version = "0.2" +version = "0.2.*" python2 = true requires = ["types-paramiko"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pysftp/pysftp/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pysftp/pysftp/__init__.pyi index d706213e9a6e..25adb3f2cf85 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pysftp/pysftp/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pysftp/pysftp/__init__.pyi @@ -1,6 +1,7 @@ +from _typeshed import Self from stat import S_IMODE as S_IMODE from types import TracebackType -from typing import IO, Any, Callable, ContextManager, Sequence, Text, Tuple, Type, Union +from typing import IO, Any, Callable, ContextManager, Sequence, Text, Union from typing_extensions import Literal import paramiko @@ -106,9 +107,9 @@ class Connection: @property def sftp_client(self) -> paramiko.SFTPClient: ... @property - def active_ciphers(self) -> Tuple[str, str]: ... + def active_ciphers(self) -> tuple[str, str]: ... @property - def active_compression(self) -> Tuple[str, str]: ... + def active_compression(self) -> tuple[str, str]: ... @property def security_options(self) -> paramiko.SecurityOptions: ... @property @@ -120,7 +121,7 @@ class Connection: @property def remote_server_key(self) -> paramiko.PKey: ... def __del__(self) -> None: ... - def __enter__(self) -> "Connection": ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, etype: Type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + self, etype: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytest-lazy-fixture/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pytest-lazy-fixture/METADATA.toml index 22cbe4eb9ca3..03031f1e9559 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pytest-lazy-fixture/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytest-lazy-fixture/METADATA.toml @@ -1 +1 @@ -version = "0.6" +version = "0.6.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/METADATA.toml index 1985499d63ae..c00bfdcaeadc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/METADATA.toml @@ -1,3 +1,3 @@ -version = "2.8" +version = "2.8.*" python2 = true requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/_common.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/_common.pyi index a437f85035a3..878a7af468ae 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/_common.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/_common.pyi @@ -1,12 +1,9 @@ -from typing import TypeVar - -_T = TypeVar("_T") +from _typeshed import Self class weekday(object): def __init__(self, weekday: int, n: int | None = ...) -> None: ... - def __call__(self: _T, n: int) -> _T: ... + def __call__(self: Self, n: int) -> Self: ... def __eq__(self, other: object) -> bool: ... - def __repr__(self) -> str: ... def __hash__(self) -> int: ... weekday: int n: int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/__init__.pyi index af88c8bee0d9..7782cc0074b4 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/parser/__init__.pyi @@ -1,5 +1,5 @@ from datetime import datetime, tzinfo -from typing import IO, Any, Mapping, Text, Tuple, Union +from typing import IO, Any, Mapping, Text, Union from .isoparser import isoparse as isoparse, isoparser as isoparser @@ -7,10 +7,10 @@ _FileOrStr = Union[bytes, Text, IO[str], IO[Any]] class parserinfo(object): JUMP: list[str] - WEEKDAYS: list[Tuple[str, str]] - MONTHS: list[Tuple[str, str]] - HMS: list[Tuple[str, str, str]] - AMPM: list[Tuple[str, str]] + WEEKDAYS: list[tuple[str, ...]] + MONTHS: list[tuple[str, ...]] + HMS: list[tuple[str, str, str]] + AMPM: list[tuple[str, str]] UTCZONE: list[str] PERTAIN: list[str] TZOFFSET: dict[str, int] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/relativedelta.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/relativedelta.pyi index ec2adea537fa..c19edd0bfdda 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/relativedelta.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/relativedelta.pyi @@ -1,8 +1,10 @@ +from _typeshed import Self from datetime import date, datetime, timedelta from typing import SupportsFloat, TypeVar, overload from ._common import weekday +# We need the extra "Self" TypeVar to avoid overlapping __add__/__radd__ complaints from mypy _SelfT = TypeVar("_SelfT", bound=relativedelta) _DateT = TypeVar("_DateT", date, datetime) # Work around attribute and type having the same name. @@ -61,37 +63,36 @@ class relativedelta(object): def weeks(self) -> int: ... @weeks.setter def weeks(self, value: int) -> None: ... - def normalized(self: _SelfT) -> _SelfT: ... + def normalized(self: Self) -> Self: ... # TODO: use Union when mypy will handle it properly in overloaded operator # methods (#2129, #1442, #1264 in mypy) @overload - def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ... + def __add__(self: _SelfT, other: relativedelta) -> _SelfT: ... # noqa: Y019 @overload - def __add__(self: _SelfT, other: timedelta) -> _SelfT: ... + def __add__(self: _SelfT, other: timedelta) -> _SelfT: ... # noqa: Y019 @overload def __add__(self, other: _DateT) -> _DateT: ... @overload - def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ... + def __radd__(self: _SelfT, other: relativedelta) -> _SelfT: ... # noqa: Y019 @overload - def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ... + def __radd__(self: _SelfT, other: timedelta) -> _SelfT: ... # noqa: Y019 @overload def __radd__(self, other: _DateT) -> _DateT: ... @overload - def __rsub__(self: _SelfT, other: relativedelta) -> _SelfT: ... + def __rsub__(self: Self, other: relativedelta) -> Self: ... @overload - def __rsub__(self: _SelfT, other: timedelta) -> _SelfT: ... + def __rsub__(self: Self, other: timedelta) -> Self: ... @overload def __rsub__(self, other: _DateT) -> _DateT: ... - def __sub__(self: _SelfT, other: relativedelta) -> _SelfT: ... - def __neg__(self: _SelfT) -> _SelfT: ... + def __sub__(self: Self, other: relativedelta) -> Self: ... + def __neg__(self: Self) -> Self: ... def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... - def __mul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... - def __rmul__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... + def __mul__(self: Self, other: SupportsFloat) -> Self: ... + def __rmul__(self: Self, other: SupportsFloat) -> Self: ... def __eq__(self, other: object) -> bool: ... def __ne__(self, other: object) -> bool: ... - def __div__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... - def __truediv__(self: _SelfT, other: SupportsFloat) -> _SelfT: ... - def __repr__(self) -> str: ... - def __abs__(self: _SelfT) -> _SelfT: ... + def __div__(self: Self, other: SupportsFloat) -> Self: ... + def __truediv__(self: Self, other: SupportsFloat) -> Self: ... + def __abs__(self: Self) -> Self: ... def __hash__(self) -> int: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi index d928c8d779bd..44a61b5c1e0d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/rrule.pyi @@ -81,6 +81,8 @@ class _iterinfo: def mtimeset(self, hour, minute, second): ... def stimeset(self, hour, minute, second): ... +_rrule = rrule + class rruleset(rrulebase): class _genitem: dt: Any = ... @@ -93,8 +95,9 @@ class rruleset(rrulebase): def __gt__(self, other): ... def __eq__(self, other): ... def __ne__(self, other): ... + def __init__(self, cache: bool = ...) -> None: ... - def rrule(self, rrule): ... + def rrule(self, rrule: _rrule): ... def rdate(self, rdate): ... def exrule(self, exrule): ... def exdate(self, exdate): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi index 6e264420cf81..dad7fa15a0b3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-dateutil/dateutil/tz/tz.pyi @@ -1,10 +1,11 @@ import datetime -from typing import IO, Any, Text, Tuple, Union +from typing import IO, Any, Text, TypeVar, Union from ..relativedelta import relativedelta from ._common import _tzinfo as _tzinfo, enfold as enfold, tzname_in_python2 as tzname_in_python2, tzrangebase as tzrangebase _FileObj = Union[str, Text, IO[str], IO[Text]] +_DT = TypeVar("_DT", bound=datetime.datetime) ZERO: datetime.timedelta EPOCH: datetime.datetime @@ -15,6 +16,7 @@ class tzutc(datetime.tzinfo): def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... def tzname(self, dt: datetime.datetime | None) -> str: ... def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ... + def fromutc(self, dt: _DT) -> _DT: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... @@ -26,6 +28,7 @@ class tzoffset(datetime.tzinfo): def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... def is_ambiguous(self, dt: datetime.datetime | None) -> bool: ... def tzname(self, dt: datetime.datetime | None) -> str: ... + def fromutc(self, dt: _DT) -> _DT: ... def __eq__(self, other): ... __hash__: Any def __ne__(self, other): ... @@ -73,7 +76,7 @@ class tzrange(tzrangebase): start: relativedelta | None = ..., end: relativedelta | None = ..., ) -> None: ... - def transitions(self, year: int) -> Tuple[datetime.datetime, datetime.datetime]: ... + def transitions(self, year: int) -> tuple[datetime.datetime, datetime.datetime]: ... def __eq__(self, other): ... class tzstr(tzrange): diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/METADATA.toml index 5bba18d25e4e..ac2fd418911a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.1" +version = "3.1.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/gflags.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/gflags.pyi index bc674935cb24..ae294429702c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/gflags.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-gflags/gflags.pyi @@ -70,7 +70,6 @@ class FlagValues: def RegisteredFlags(self) -> list[str]: ... def flag_values_dict(self) -> dict[str, Any]: ... FlagValuesDict = flag_values_dict - def __str__(self) -> str: ... def GetHelp(self, prefix: str = ...) -> str: ... def module_help(self, module: ModuleType | str) -> str: ... ModuleHelp = module_help @@ -101,11 +100,11 @@ class Flag: value: Any help: str short_name: str - boolean = False - present = False + boolean: bool + present: bool parser: ArgumentParser serializer: ArgumentSerializer - allow_override = False + allow_override: bool def __init__( self, parser: ArgumentParser, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/METADATA.toml index 9d5d312e6282..aed349a93a97 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/METADATA.toml @@ -1,3 +1,3 @@ -version = "0.6" +version = "0.6.*" python2 = true requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/nmap/nmap.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/nmap/nmap.pyi index c28623dd8ae5..70be8d261d59 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/nmap/nmap.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-nmap/nmap/nmap.pyi @@ -1,4 +1,4 @@ -from typing import Any, Callable, Dict, Iterable, Iterator, Text, Tuple, TypeVar +from typing import Any, Callable, Iterable, Iterator, Text, TypeVar from typing_extensions import TypedDict _T = TypeVar("_T") @@ -52,7 +52,7 @@ __last_modification__: str class PortScanner(object): def __init__(self, nmap_search_path: Iterable[str] = ...) -> None: ... def get_nmap_last_output(self) -> Text: ... - def nmap_version(self) -> Tuple[int, int]: ... + def nmap_version(self) -> tuple[int, int]: ... def listscan(self, hosts: str = ...) -> list[str]: ... def scan( self, hosts: Text = ..., ports: Text | None = ..., arguments: Text = ..., sudo: bool = ..., timeout: int = ... @@ -94,14 +94,14 @@ class PortScannerAsync(object): class PortScannerYield(PortScannerAsync): def __init__(self) -> None: ... - def scan( # type: ignore + def scan( # type: ignore[override] self, hosts: str = ..., ports: str | None = ..., arguments: str = ..., sudo: bool = ..., timeout: int = ... - ) -> Iterator[Tuple[str, _Result]]: ... + ) -> Iterator[tuple[str, _Result]]: ... def stop(self) -> None: ... def wait(self, timeout: int | None = ...) -> None: ... - def still_scanning(self) -> None: ... # type: ignore + def still_scanning(self) -> None: ... # type: ignore[override] -class PortScannerHostDict(Dict[str, Any]): +class PortScannerHostDict(dict[str, Any]): def hostnames(self) -> list[_ResultHostNames]: ... def hostname(self) -> str: ... def state(self) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/METADATA.toml index 210c7605b3c2..c98db63045e7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/METADATA.toml @@ -1 +1 @@ -version = "5.0" +version = "5.0.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/slugify/special.pyi b/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/slugify/special.pyi index 05e076177bbe..29ca15bdad61 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/slugify/special.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/python-slugify/slugify/special.pyi @@ -1,8 +1,8 @@ -from typing import Sequence, Tuple +from typing import Sequence -def add_uppercase_char(char_list: Sequence[Tuple[str, str]]) -> Sequence[Tuple[str, str]]: ... +def add_uppercase_char(char_list: Sequence[tuple[str, str]]) -> Sequence[tuple[str, str]]: ... -CYRILLIC: Sequence[Tuple[str, str]] -GERMAN: Sequence[Tuple[str, str]] -GREEK: Sequence[Tuple[str, str]] -PRE_TRANSLATIONS: Sequence[Tuple[str, str]] +CYRILLIC: Sequence[tuple[str, str]] +GERMAN: Sequence[tuple[str, str]] +GREEK: Sequence[tuple[str, str]] +PRE_TRANSLATIONS: Sequence[tuple[str, str]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytz/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pytz/METADATA.toml index a9c4347ec2a4..0bdf109638a1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pytz/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytz/METADATA.toml @@ -1,2 +1,2 @@ -version = "2021.1" +version = "2021.3" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/__init__.pyi index f9dd0e11ca38..4838e7614128 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/__init__.pyi @@ -1,41 +1,42 @@ import datetime -from typing import Mapping, Set +from typing import ClassVar, Mapping -class BaseTzInfo(datetime.tzinfo): - zone: str = ... - def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... - def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... +from .exceptions import ( + AmbiguousTimeError as AmbiguousTimeError, + InvalidTimeError as InvalidTimeError, + NonExistentTimeError as NonExistentTimeError, + UnknownTimeZoneError as UnknownTimeZoneError, +) +from .tzinfo import BaseTzInfo as BaseTzInfo, DstTzInfo, StaticTzInfo +# Actually named UTC and then masked with a singleton with the same name class _UTCclass(BaseTzInfo): + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... def tzname(self, dt: datetime.datetime | None) -> str: ... def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta: ... def dst(self, dt: datetime.datetime | None) -> datetime.timedelta: ... -class _StaticTzInfo(BaseTzInfo): - def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... - def utcoffset(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... - def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... - -class _DstTzInfo(BaseTzInfo): - def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... - def utcoffset(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta | None: ... - def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta | None: ... - -class UnknownTimeZoneError(KeyError): ... -class InvalidTimeError(Exception): ... -class AmbiguousTimeError(InvalidTimeError): ... -class NonExistentTimeError(InvalidTimeError): ... - utc: _UTCclass UTC: _UTCclass -def timezone(zone: str) -> _UTCclass | _StaticTzInfo | _DstTzInfo: ... -def FixedOffset(offset: int) -> _UTCclass | datetime.tzinfo: ... +def timezone(zone: str) -> _UTCclass | StaticTzInfo | DstTzInfo: ... + +class _FixedOffset(datetime.tzinfo): + zone: ClassVar[None] + def __init__(self, minutes: int) -> None: ... + def utcoffset(self, dt: object) -> datetime.timedelta | None: ... + def dst(self, dt: object) -> datetime.timedelta: ... + def tzname(self, dt: object) -> None: ... + def localize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime, is_dst: bool = ...) -> datetime.datetime: ... + +def FixedOffset(offset: int, _tzinfos: dict[int, _FixedOffset] = ...) -> _UTCclass | _FixedOffset: ... all_timezones: list[str] -all_timezones_set: Set[str] +all_timezones_set: set[str] common_timezones: list[str] -common_timezones_set: Set[str] +common_timezones_set: set[str] country_timezones: Mapping[str, list[str]] country_names: Mapping[str, str] ZERO: datetime.timedelta diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/exceptions.pyi new file mode 100644 index 000000000000..1880e442ac57 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/exceptions.pyi @@ -0,0 +1,7 @@ +__all__ = ["UnknownTimeZoneError", "InvalidTimeError", "AmbiguousTimeError", "NonExistentTimeError"] + +class Error(Exception): ... +class UnknownTimeZoneError(KeyError, Error): ... +class InvalidTimeError(Error): ... +class AmbiguousTimeError(InvalidTimeError): ... +class NonExistentTimeError(InvalidTimeError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/tzinfo.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/tzinfo.pyi new file mode 100644 index 000000000000..c2c68526870d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/pytz/pytz/tzinfo.pyi @@ -0,0 +1,35 @@ +import datetime +from abc import abstractmethod +from typing import Any + +class BaseTzInfo(datetime.tzinfo): + zone: str | None # Actually None but should be set on concrete subclasses + # The following abstract methods don't exist in the implementation, but + # are implemented by all sub-classes. + @abstractmethod + def localize(self, dt: datetime.datetime) -> datetime.datetime: ... + @abstractmethod + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... + @abstractmethod + def tzname(self, dt: datetime.datetime | None) -> str: ... + @abstractmethod + def utcoffset(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + @abstractmethod + def dst(self, dt: datetime.datetime | None) -> datetime.timedelta | None: ... + +class StaticTzInfo(BaseTzInfo): + def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... + def utcoffset(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... + def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta: ... + +class DstTzInfo(BaseTzInfo): + def __init__(self, _inf: Any = ..., _tzinfos: Any = ...) -> None: ... + def fromutc(self, dt: datetime.datetime) -> datetime.datetime: ... + def localize(self, dt: datetime.datetime, is_dst: bool | None = ...) -> datetime.datetime: ... + def normalize(self, dt: datetime.datetime) -> datetime.datetime: ... + def tzname(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> str: ... + def utcoffset(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta | None: ... + def dst(self, dt: datetime.datetime | None, is_dst: bool | None = ...) -> datetime.timedelta | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/METADATA.toml index 9af5725bdeab..678b938f43f9 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/METADATA.toml @@ -1,3 +1,3 @@ -version = "7.0" +version = "7.0.*" python2 = true requires = ["types-enum34"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/__init__.pyi index a06b018224d4..c8c47e1a01da 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/__init__.pyi @@ -36,12 +36,14 @@ class PerformanceManager: counterId: int instance: str def __init__(self, counterId: int, instance: str): ... + class PerfCounterInfo: key: int groupInfo: Any nameInfo: Any rollupType: Any def __getattr__(self, name: str) -> Any: ... # incomplete + class QuerySpec: entity: ManagedEntity metricId: list[PerformanceManager.MetricId] @@ -49,6 +51,7 @@ class PerformanceManager: maxSample: int startTime: datetime def __getattr__(self, name: str) -> Any: ... # incomplete + class EntityMetricBase: entity: ManagedEntity def QueryPerfCounterByLevel(self, collection_level: int) -> list[PerformanceManager.PerfCounterInfo]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/view.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/view.pyi index c00ad51db1c7..114883ba3431 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/view.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vim/view.pyi @@ -1,4 +1,4 @@ -from typing import Any, Type +from typing import Any from pyVmomi.vim import ManagedEntity @@ -12,4 +12,4 @@ class ViewManager: # but in practice it seems to be `list[Type[ManagedEntity]]` # Source: https://pubs.vmware.com/vi-sdk/visdk250/ReferenceGuide/vim.view.ViewManager.html @staticmethod - def CreateContainerView(container: ManagedEntity, type: list[Type[ManagedEntity]], recursive: bool) -> ContainerView: ... + def CreateContainerView(container: ManagedEntity, type: list[type[ManagedEntity]], recursive: bool) -> ContainerView: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vmodl/query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vmodl/query.pyi index 89f2769c13f8..251ab5ca389f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vmodl/query.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/pyvmomi/pyVmomi/vmodl/query.pyi @@ -1,4 +1,4 @@ -from typing import Any, Type +from typing import Any from pyVmomi.vim import ManagedEntity from pyVmomi.vim.view import ContainerView @@ -6,21 +6,24 @@ from pyVmomi.vmodl import DynamicProperty class PropertyCollector: class PropertySpec: - def __init__(self, *, all: bool = ..., type: Type[ManagedEntity] = ..., pathSet: list[str] = ...) -> None: ... + def __init__(self, *, all: bool = ..., type: type[ManagedEntity] = ..., pathSet: list[str] = ...) -> None: ... all: bool - type: Type[ManagedEntity] + type: type[ManagedEntity] pathSet: list[str] + class TraversalSpec: def __init__( - self, *, path: str = ..., skip: bool = ..., type: Type[ContainerView] = ..., **kwargs: Any # incomplete + self, *, path: str = ..., skip: bool = ..., type: type[ContainerView] = ..., **kwargs: Any # incomplete ) -> None: ... path: str skip: bool - type: Type[ContainerView] + type: type[ContainerView] def __getattr__(self, name: str) -> Any: ... # incomplete + class RetrieveOptions: def __init__(self, *, maxObjects: int) -> None: ... maxObjects: int + class ObjectSpec: def __init__( self, *, skip: bool = ..., selectSet: list[PropertyCollector.TraversalSpec] = ..., obj: Any = ... @@ -28,6 +31,7 @@ class PropertyCollector: skip: bool selectSet: list[PropertyCollector.TraversalSpec] obj: Any + class FilterSpec: def __init__( self, @@ -39,6 +43,7 @@ class PropertyCollector: propSet: list[PropertyCollector.PropertySpec] objectSet: list[PropertyCollector.ObjectSpec] def __getattr__(self, name: str) -> Any: ... # incomplete + class ObjectContent: def __init__( self, *, obj: ManagedEntity = ..., propSet: list[DynamicProperty] = ..., **kwargs: Any # incomplete @@ -46,6 +51,7 @@ class PropertyCollector: obj: ManagedEntity propSet: list[DynamicProperty] def __getattr__(self, name: str) -> Any: ... # incomplete + class RetrieveResult: def __init__(self, *, objects: list[PropertyCollector.ObjectContent] = ..., token: str | None = ...) -> None: ... objects: list[PropertyCollector.ObjectContent] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/redis/METADATA.toml index ec34fb2231b3..bb8cc449b44f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/METADATA.toml @@ -1,3 +1 @@ -version = "3.5" -python2 = true -requires = [] +version = "4.1.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/__init__.pyi index 07191af84ef9..c7e5ffe11544 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/__init__.pyi @@ -1,15 +1,55 @@ -from . import client, connection, exceptions, utils +from . import client, connection, exceptions, sentinel, utils +from .cluster import RedisCluster as RedisCluster + +__all__ = [ + "AuthenticationError", + "AuthenticationWrongNumberOfArgsError", + "BlockingConnectionPool", + "BusyLoadingError", + "ChildDeadlockedError", + "Connection", + "ConnectionError", + "ConnectionPool", + "DataError", + "from_url", + "InvalidResponse", + "PubSubError", + "ReadOnlyError", + "Redis", + "RedisCluster", + "RedisError", + "ResponseError", + "Sentinel", + "SentinelConnectionPool", + "SentinelManagedConnection", + "SentinelManagedSSLConnection", + "SSLConnection", + "StrictRedis", + "TimeoutError", + "UnixDomainSocketConnection", + "WatchError", +] Redis = client.Redis -StrictRedis = client.StrictRedis + BlockingConnectionPool = connection.BlockingConnectionPool -ConnectionPool = connection.ConnectionPool Connection = connection.Connection +ConnectionPool = connection.ConnectionPool SSLConnection = connection.SSLConnection +StrictRedis = client.StrictRedis UnixDomainSocketConnection = connection.UnixDomainSocketConnection + from_url = utils.from_url + +Sentinel = sentinel.Sentinel +SentinelConnectionPool = sentinel.SentinelConnectionPool +SentinelManagedConnection = sentinel.SentinelManagedConnection +SentinelManagedSSLConnection = sentinel.SentinelManagedSSLConnection + AuthenticationError = exceptions.AuthenticationError +AuthenticationWrongNumberOfArgsError = exceptions.AuthenticationWrongNumberOfArgsError BusyLoadingError = exceptions.BusyLoadingError +ChildDeadlockedError = exceptions.ChildDeadlockedError ConnectionError = exceptions.ConnectionError DataError = exceptions.DataError InvalidResponse = exceptions.InvalidResponse diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/client.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/client.pyi index 0e9ae2c6733a..554cb1abcf64 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/client.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/client.pyi @@ -1,28 +1,57 @@ +import threading +from _typeshed import Self, SupportsItems from datetime import datetime, timedelta -from typing import Any, Callable, Generic, Iterable, Iterator, Mapping, Sequence, Set, Text, Tuple, Type, TypeVar, Union, overload +from typing import Any, Callable, ClassVar, Generic, Iterable, Iterator, Mapping, Pattern, Sequence, TypeVar, Union, overload from typing_extensions import Literal -from .connection import ConnectionPool +from .commands import CoreCommands, RedisModuleCommands, SentinelCommands +from .connection import ConnectionPool, _ConnectionPoolOptions from .lock import Lock +from .retry import Retry -SYM_EMPTY: Any +_Value = Union[bytes, float, int, str] +_Key = Union[str, bytes] + +# Lib returns str or bytes depending on value of decode_responses +_StrType = TypeVar("_StrType", bound=Union[str, bytes]) + +_VT = TypeVar("_VT") +_T = TypeVar("_T") +_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") + +# Keyword arguments that are passed to Redis.parse_response(). +_ParseResponseOptions = Any +# Keyword arguments that are passed to Redis.execute_command(). +_CommandOptions = _ConnectionPoolOptions | _ParseResponseOptions + +SYM_EMPTY: bytes +EMPTY_RESPONSE: str +NEVER_DECODE: str + +class CaseInsensitiveDict(dict[_StrType, _VT]): + def __init__(self, data: SupportsItems[_StrType, _VT]) -> None: ... + def update(self, data: SupportsItems[_StrType, _VT]) -> None: ... # type: ignore[override] + @overload + def get(self, k: _StrType, default: None = ...) -> _VT | None: ... + @overload + def get(self, k: _StrType, default: _VT | _T) -> _VT | _T: ... + # Overrides many other methods too, but without changing signature def list_or_args(keys, args): ... def timestamp_to_datetime(response): ... def string_keys_to_dict(key_string, callback): ... -def dict_merge(*dicts): ... def parse_debug_object(response): ... def parse_object(response, infotype): ... def parse_info(response): ... -SENTINEL_STATE_TYPES: Any +SENTINEL_STATE_TYPES: dict[str, type[int]] def parse_sentinel_state(item): ... def parse_sentinel_master(response): ... def parse_sentinel_masters(response): ... def parse_sentinel_slaves_and_sentinels(response): ... def parse_sentinel_get_master(response): ... -def pairs_to_dict(response): ... +def pairs_to_dict(response, decode_keys: bool = ..., decode_string_values: bool = ...): ... def pairs_to_dict_typed(response, type_info): ... def zset_score_pairs(response, **options): ... def sort_return_tuples(response, **options): ... @@ -36,308 +65,199 @@ def parse_hscan(response, **options): ... def parse_zscan(response, **options): ... def parse_slowlog_get(response, **options): ... -_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") - -_Value = Union[bytes, float, int, Text] -_Key = Union[Text, bytes] - -# Lib returns str or bytes depending on Python version and value of decode_responses -_StrType = TypeVar("_StrType", bound=Union[Text, bytes]) - _LockType = TypeVar("_LockType") -class Redis(Generic[_StrType]): +class Redis(RedisModuleCommands, CoreCommands[_StrType], SentinelCommands, Generic[_StrType]): RESPONSE_CALLBACKS: Any @overload @classmethod def from_url( cls, - url: Text, - host: Text | None, - port: int | None, - db: int | None, - password: Text | None, - socket_timeout: float | None, - socket_connect_timeout: float | None, - socket_keepalive: bool | None, - socket_keepalive_options: Mapping[str, int | str] | None, - connection_pool: ConnectionPool | None, - unix_socket_path: Text | None, - encoding: Text, - encoding_errors: Text, - charset: Text | None, - errors: Text | None, - decode_responses: Literal[True], - retry_on_timeout: bool = ..., - ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., - ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., - ssl_check_hostname: bool = ..., - max_connections: int | None = ..., - single_connection_client: bool = ..., - health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., - ) -> Redis[str]: ... - @overload - @classmethod - def from_url( - cls, - url: Text, - host: Text | None = ..., + url: str, + *, + host: str | None = ..., port: int | None = ..., db: int | None = ..., - password: Text | None = ..., + password: str | None = ..., socket_timeout: float | None = ..., socket_connect_timeout: float | None = ..., socket_keepalive: bool | None = ..., socket_keepalive_options: Mapping[str, int | str] | None = ..., connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., - errors: Text | None = ..., - *, + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., decode_responses: Literal[True], retry_on_timeout: bool = ..., ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., + ssl_ca_certs: str | None = ..., ssl_check_hostname: bool = ..., max_connections: int | None = ..., single_connection_client: bool = ..., health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., ) -> Redis[str]: ... @overload @classmethod def from_url( cls, - url: Text, - host: Text | None = ..., + url: str, + *, + host: str | None = ..., port: int | None = ..., db: int | None = ..., - password: Text | None = ..., + password: str | None = ..., socket_timeout: float | None = ..., socket_connect_timeout: float | None = ..., socket_keepalive: bool | None = ..., socket_keepalive_options: Mapping[str, int | str] | None = ..., connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., decode_responses: Literal[False] = ..., - errors: Text | None = ..., retry_on_timeout: bool = ..., ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., + ssl_ca_certs: str | None = ..., ssl_check_hostname: bool = ..., max_connections: int | None = ..., single_connection_client: bool = ..., health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., ) -> Redis[bytes]: ... connection_pool: Any response_callbacks: Any @overload - def __new__( - cls, - host: Text, - port: int, - db: int, - password: Text | None, - socket_timeout: float | None, - socket_connect_timeout: float | None, - socket_keepalive: bool | None, - socket_keepalive_options: Mapping[str, int | str] | None, - connection_pool: ConnectionPool | None, - unix_socket_path: Text | None, - encoding: Text, - encoding_errors: Text, - charset: Text | None, - decode_responses: Literal[True], - errors: Text | None = ..., - retry_on_timeout: bool = ..., - ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., - ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., - ssl_check_hostname: bool = ..., - max_connections: int | None = ..., - single_connection_client: bool = ..., - health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., - ) -> Redis[str]: ... - @overload - def __new__( - cls, - host: Text = ..., - port: int = ..., - db: int = ..., - password: Text | None = ..., - socket_timeout: float | None = ..., - socket_connect_timeout: float | None = ..., - socket_keepalive: bool | None = ..., - socket_keepalive_options: Mapping[str, int | str] | None = ..., - connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., - *, - decode_responses: Literal[True], - errors: Text | None = ..., - retry_on_timeout: bool = ..., - ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., - ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., - ssl_check_hostname: bool = ..., - max_connections: int | None = ..., - single_connection_client: bool = ..., - health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., - ) -> Redis[str]: ... - @overload - def __new__( - cls, - host: Text = ..., - port: int = ..., - db: int = ..., - password: Text | None = ..., - socket_timeout: float | None = ..., - socket_connect_timeout: float | None = ..., - socket_keepalive: bool | None = ..., - socket_keepalive_options: Mapping[str, int | str] | None = ..., - connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., - errors: Text | None = ..., - decode_responses: Literal[False] = ..., - retry_on_timeout: bool = ..., - ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., - ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., - ssl_check_hostname: bool = ..., - max_connections: int | None = ..., - single_connection_client: bool = ..., - health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., - ) -> Redis[bytes]: ... - @overload def __init__( self: Redis[str], - host: Text, + host: str, port: int, db: int, - password: Text | None, + password: str | None, socket_timeout: float | None, socket_connect_timeout: float | None, socket_keepalive: bool | None, socket_keepalive_options: Mapping[str, int | str] | None, connection_pool: ConnectionPool | None, - unix_socket_path: Text | None, - encoding: Text, - encoding_errors: Text, - charset: Text | None, - errors: Text | None, + unix_socket_path: str | None, + encoding: str, + encoding_errors: str, + charset: str | None, + errors: str | None, decode_responses: Literal[True], retry_on_timeout: bool = ..., + retry_on_error=..., ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., + ssl_ca_certs: str | None = ..., + ssl_ca_path: Any | None = ..., ssl_check_hostname: bool = ..., + ssl_password: Any | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Any | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Any | None = ..., # added in 4.1.1 max_connections: int | None = ..., single_connection_client: bool = ..., health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: Any | None = ..., ) -> None: ... @overload def __init__( self: Redis[str], - host: Text = ..., + host: str = ..., port: int = ..., db: int = ..., - password: Text | None = ..., + password: str | None = ..., socket_timeout: float | None = ..., socket_connect_timeout: float | None = ..., socket_keepalive: bool | None = ..., socket_keepalive_options: Mapping[str, int | str] | None = ..., connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., - errors: Text | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., *, decode_responses: Literal[True], retry_on_timeout: bool = ..., ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., + ssl_ca_certs: str | None = ..., ssl_check_hostname: bool = ..., + ssl_password: Any | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Any | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Any | None = ..., # added in 4.1.1 max_connections: int | None = ..., single_connection_client: bool = ..., health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: Any | None = ..., ) -> None: ... @overload def __init__( self: Redis[bytes], - host: Text = ..., + host: str = ..., port: int = ..., db: int = ..., - password: Text | None = ..., + password: str | None = ..., socket_timeout: float | None = ..., socket_connect_timeout: float | None = ..., socket_keepalive: bool | None = ..., socket_keepalive_options: Mapping[str, int | str] | None = ..., connection_pool: ConnectionPool | None = ..., - unix_socket_path: Text | None = ..., - encoding: Text = ..., - encoding_errors: Text = ..., - charset: Text | None = ..., - errors: Text | None = ..., + unix_socket_path: str | None = ..., + encoding: str = ..., + encoding_errors: str = ..., + charset: str | None = ..., + errors: str | None = ..., decode_responses: Literal[False] = ..., retry_on_timeout: bool = ..., ssl: bool = ..., - ssl_keyfile: Text | None = ..., - ssl_certfile: Text | None = ..., + ssl_keyfile: str | None = ..., + ssl_certfile: str | None = ..., ssl_cert_reqs: str | int | None = ..., - ssl_ca_certs: Text | None = ..., + ssl_ca_certs: str | None = ..., ssl_check_hostname: bool = ..., + ssl_password: Any | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Any | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Any | None = ..., # added in 4.1.1 max_connections: int | None = ..., single_connection_client: bool = ..., health_check_interval: float = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: Any | None = ..., ) -> None: ... + def get_encoder(self): ... + def get_connection_kwargs(self): ... def set_response_callback(self, command, callback): ... def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... def transaction(self, func, *watches, **kwargs): ... @@ -358,7 +278,7 @@ class Redis(Generic[_StrType]): timeout: float | None, sleep: float, blocking_timeout: float | None, - lock_class: Type[_LockType], + lock_class: type[_LockType], thread_local: bool = ..., ) -> _LockType: ... @overload @@ -369,398 +289,13 @@ class Redis(Generic[_StrType]): sleep: float = ..., blocking_timeout: float | None = ..., *, - lock_class: Type[_LockType], + lock_class: type[_LockType], thread_local: bool = ..., ) -> _LockType: ... - def pubsub(self, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... - def execute_command(self, *args, **options): ... - def parse_response(self, connection, command_name, **options): ... - def acl_cat(self, category: Text | None = ...) -> list[str]: ... - def acl_deluser(self, username: Text) -> int: ... - def acl_genpass(self) -> Text: ... - def acl_getuser(self, username: Text) -> Any | None: ... - def acl_list(self) -> list[Text]: ... - def acl_load(self) -> bool: ... - def acl_setuser( - self, - username: Text = ..., - enabled: bool = ..., - nopass: bool = ..., - passwords: Sequence[Text] | None = ..., - hashed_passwords: Sequence[Text] | None = ..., - categories: Sequence[Text] | None = ..., - commands: Sequence[Text] | None = ..., - keys: Sequence[Text] | None = ..., - reset: bool = ..., - reset_keys: bool = ..., - reset_passwords: bool = ..., - ) -> bool: ... - def acl_users(self) -> list[Text]: ... - def acl_whoami(self) -> Text: ... - def bgrewriteaof(self): ... - def bgsave(self): ... - def client_id(self) -> int: ... - def client_kill(self, address: Text) -> bool: ... - def client_list(self) -> list[dict[str, str]]: ... - def client_getname(self) -> str | None: ... - def client_setname(self, name: Text) -> bool: ... - def readwrite(self) -> bool: ... - def readonly(self) -> bool: ... - def config_get(self, pattern=...): ... - def config_set(self, name, value): ... - def config_resetstat(self): ... - def config_rewrite(self): ... - def dbsize(self) -> int: ... - def debug_object(self, key): ... - def echo(self, value: _Value) -> bytes: ... - def flushall(self) -> bool: ... - def flushdb(self) -> bool: ... - def info(self, section: _Key | None = ...) -> Mapping[str, Any]: ... - def lastsave(self): ... - def object(self, infotype, key): ... - def ping(self) -> bool: ... - def save(self) -> bool: ... - def sentinel(self, *args): ... - def sentinel_get_master_addr_by_name(self, service_name): ... - def sentinel_master(self, service_name): ... - def sentinel_masters(self): ... - def sentinel_monitor(self, name, ip, port, quorum): ... - def sentinel_remove(self, name): ... - def sentinel_sentinels(self, service_name): ... - def sentinel_set(self, name, option, value): ... - def sentinel_slaves(self, service_name): ... - def shutdown(self): ... - def slaveof(self, host=..., port=...): ... - def slowlog_get(self, num=...): ... - def slowlog_len(self): ... - def slowlog_reset(self): ... - def time(self): ... - def append(self, key, value): ... - def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ...) -> int: ... - def bitop(self, operation, dest, *keys): ... - def bitpos(self, key, bit, start=..., end=...): ... - def decr(self, name, amount=...): ... - def delete(self, *names: _Key) -> int: ... - def __delitem__(self, _Key): ... - def dump(self, name): ... - def exists(self, *names: _Key) -> int: ... - __contains__: Any - def expire(self, name: _Key, time: int | timedelta) -> bool: ... - def expireat(self, name, when): ... - def get(self, name: _Key) -> _StrType | None: ... - def __getitem__(self, name): ... - def getbit(self, name: _Key, offset: int) -> int: ... - def getrange(self, key, start, end): ... - def getset(self, name, value) -> _StrType | None: ... - def incr(self, name: _Key, amount: int = ...) -> int: ... - def incrby(self, name: _Key, amount: int = ...) -> int: ... - def incrbyfloat(self, name: _Key, amount: float = ...) -> float: ... - def keys(self, pattern: _Key = ...) -> list[_StrType]: ... - def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... - def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... - def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... - def move(self, name: _Key, db: int) -> bool: ... - def persist(self, name: _Key) -> bool: ... - def pexpire(self, name: _Key, time: int | timedelta) -> Literal[1, 0]: ... - def pexpireat(self, name: _Key, when: int | datetime) -> Literal[1, 0]: ... - def psetex(self, name, time_ms, value): ... - def pttl(self, name): ... - def randomkey(self): ... - def rename(self, src, dst): ... - def renamenx(self, src, dst): ... - def restore(self, name, ttl, value, replace: bool = ...): ... - def set( - self, - name: _Key, - value: _Value, - ex: None | int | timedelta = ..., - px: None | int | timedelta = ..., - nx: bool = ..., - xx: bool = ..., - keepttl: bool = ..., - ) -> bool | None: ... - def __setitem__(self, name, value): ... - def setbit(self, name: _Key, offset: int, value: int) -> int: ... - def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... - def setnx(self, name: _Key, value: _Value) -> bool: ... - def setrange(self, name, offset, value): ... - def strlen(self, name): ... - def substr(self, name, start, end=...): ... - def ttl(self, name: _Key) -> int: ... - def type(self, name): ... - def watch(self, *names): ... - def unlink(self, *names: _Key) -> int: ... - def unwatch(self): ... - @overload - def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType]: ... - @overload - def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Tuple[_StrType, _StrType] | None: ... - @overload - def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType]: ... - @overload - def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> Tuple[_StrType, _StrType] | None: ... - def brpoplpush(self, src, dst, timeout=...): ... - def lindex(self, name: _Key, index: int) -> _StrType | None: ... - def linsert( - self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value - ) -> int: ... - def llen(self, name: _Key) -> int: ... - def lpop(self, name): ... - def lpush(self, name: _Value, *values: _Value) -> int: ... - def lpushx(self, name, value): ... - def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... - def lrem(self, name: _Key, count: int, value: _Value) -> int: ... - def lset(self, name: _Key, index: int, value: _Value) -> bool: ... - def ltrim(self, name: _Key, start: int, end: int) -> bool: ... - def rpop(self, name): ... - def rpoplpush(self, src, dst): ... - def rpush(self, name: _Value, *values: _Value) -> int: ... - def rpushx(self, name, value): ... - @overload - def sort( - self, - name: _Key, - start: int | None = ..., - num: int | None = ..., - by: _Key | None = ..., - get: _Key | Sequence[_Key] | None = ..., - desc: bool = ..., - alpha: bool = ..., - store: None = ..., - groups: bool = ..., - ) -> list[_StrType]: ... - @overload - def sort( - self, - name: _Key, - start: int | None = ..., - num: int | None = ..., - by: _Key | None = ..., - get: _Key | Sequence[_Key] | None = ..., - desc: bool = ..., - alpha: bool = ..., - *, - store: _Key, - groups: bool = ..., - ) -> int: ... - @overload - def sort( - self, - name: _Key, - start: int | None, - num: int | None, - by: _Key | None, - get: _Key | Sequence[_Key] | None, - desc: bool, - alpha: bool, - store: _Key, - groups: bool = ..., - ) -> int: ... - def scan(self, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Tuple[int, list[_StrType]]: ... - def scan_iter(self, match: Text | None = ..., count: int | None = ...) -> Iterator[_StrType]: ... - def sscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Tuple[int, list[_StrType]]: ... - def sscan_iter(self, name, match=..., count=...): ... - def hscan( - self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ... - ) -> Tuple[int, dict[_StrType, _StrType]]: ... - def hscan_iter(self, name, match=..., count=...): ... - def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...): ... - def zscan_iter(self, name, match=..., count=..., score_cast_func=...): ... - def sadd(self, name: _Key, *values: _Value) -> int: ... - def scard(self, name: _Key) -> int: ... - def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Set[_Value]: ... - def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... - def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Set[_Value]: ... - def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... - def sismember(self, name: _Key, value: _Value) -> bool: ... - def smembers(self, name: _Key) -> Set[_StrType]: ... - def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... - @overload - def spop(self, name: _Key, count: None = ...) -> _Value | None: ... - @overload - def spop(self, name: _Key, count: int) -> list[_Value]: ... - @overload - def srandmember(self, name: _Key, number: None = ...) -> _Value | None: ... - @overload - def srandmember(self, name: _Key, number: int) -> list[_Value]: ... - def srem(self, name: _Key, *values: _Value) -> int: ... - def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Set[_Value]: ... - def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... - def xack(self, name, groupname, *ids): ... - def xadd(self, name, fields, id=..., maxlen=..., approximate=...): ... - def xclaim( - self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... - ): ... - def xdel(self, name, *ids): ... - def xgroup_create(self, name, groupname, id=..., mkstream=...): ... - def xgroup_delconsumer(self, name, groupname, consumername): ... - def xgroup_destroy(self, name, groupname): ... - def xgroup_setid(self, name, groupname, id): ... - def xinfo_consumers(self, name, groupname): ... - def xinfo_groups(self, name): ... - def xinfo_stream(self, name): ... - def xlen(self, name: _Key) -> int: ... - def xpending(self, name, groupname): ... - def xpending_range(self, name, groupname, min, max, count, consumername=...): ... - def xrange(self, name, min=..., max=..., count=...): ... - def xread(self, streams, count=..., block=...): ... - def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...): ... - def xrevrange(self, name, max=..., min=..., count=...): ... - def xtrim(self, name, maxlen, approximate=...): ... - def zadd( - self, name: _Key, mapping: Mapping[_Key, _Value], nx: bool = ..., xx: bool = ..., ch: bool = ..., incr: bool = ... - ) -> int: ... - def zcard(self, name: _Key) -> int: ... - def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... - def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... - def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> int: ... - def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... - def zpopmax(self, name: _Key, count: int | None = ...) -> list[_StrType]: ... - def zpopmin(self, name: _Key, count: int | None = ...) -> list[_StrType]: ... - @overload - def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType, float]: ... - @overload - def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> Tuple[_StrType, _StrType, float] | None: ... - @overload - def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> Tuple[_StrType, _StrType, float]: ... - @overload - def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> Tuple[_StrType, _StrType, float] | None: ... - @overload - def zrange( - self, - name: _Key, - start: int, - end: int, - desc: bool = ..., - *, - withscores: Literal[True], - score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ..., - ) -> list[Tuple[_StrType, _ScoreCastFuncReturn]]: ... - @overload - def zrange( - self, - name: _Key, - start: int, - end: int, - desc: bool = ..., - withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., - ) -> list[_StrType]: ... - def zrangebylex( - self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... - ) -> list[_StrType]: ... - @overload - def zrangebyscore( - self, - name: _Key, - min: _Value, - max: _Value, - start: int | None = ..., - num: int | None = ..., - *, - withscores: Literal[True], - score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ..., - ) -> list[Tuple[_StrType, _ScoreCastFuncReturn]]: ... - @overload - def zrangebyscore( - self, - name: _Key, - min: _Value, - max: _Value, - start: int | None = ..., - num: int | None = ..., - withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., - ) -> list[_StrType]: ... - def zrank(self, name: _Key, value: _Value) -> int | None: ... - def zrem(self, name: _Key, *values: _Value) -> int: ... - def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... - def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... - def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... - @overload - def zrevrange( - self, - name: _Key, - start: int, - end: int, - desc: bool = ..., - *, - withscores: Literal[True], - score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ..., - ) -> list[Tuple[_StrType, _ScoreCastFuncReturn]]: ... - @overload - def zrevrange( - self, - name: _Key, - start: int, - end: int, - desc: bool = ..., - withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., - ) -> list[_StrType]: ... - @overload - def zrevrangebyscore( - self, - name: _Key, - min: _Value, - max: _Value, - start: int | None = ..., - num: int | None = ..., - *, - withscores: Literal[True], - score_cast_func: Callable[[float], _ScoreCastFuncReturn] = ..., - ) -> list[Tuple[_StrType, _ScoreCastFuncReturn]]: ... - @overload - def zrevrangebyscore( - self, - name: _Key, - min: _Value, - max: _Value, - start: int | None = ..., - num: int | None = ..., - withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., - ) -> list[_StrType]: ... - def zrevrangebylex( - self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... - ) -> list[_StrType]: ... - def zrevrank(self, name: _Key, value: _Value) -> int | None: ... - def zscore(self, name: _Key, value: _Value) -> float | None: ... - def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> int: ... - def pfadd(self, name: _Key, *values: _Value) -> int: ... - def pfcount(self, name: _Key) -> int: ... - def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... - def hdel(self, name: _Key, *keys: _Key) -> int: ... - def hexists(self, name: _Key, key: _Key) -> bool: ... - def hget(self, name: _Key, key: _Key) -> _StrType | None: ... - def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... - def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> int: ... - def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> float: ... - def hkeys(self, name: _Key) -> list[_StrType]: ... - def hlen(self, name: _Key) -> int: ... - @overload - def hset(self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ...) -> int: ... - @overload - def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value]) -> int: ... - @overload - def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value]) -> int: ... - def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... - def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... - def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... - def hvals(self, name: _Key) -> list[_StrType]: ... - def publish(self, channel: _Key, message: _Key) -> int: ... - def eval(self, script, numkeys, *keys_and_args): ... - def evalsha(self, sha, numkeys, *keys_and_args): ... - def script_exists(self, *args): ... - def script_flush(self): ... - def script_kill(self): ... - def script_load(self, script): ... - def register_script(self, script: Text | _StrType) -> Script: ... - def pubsub_channels(self, pattern: _Key = ...) -> list[Text]: ... - def pubsub_numsub(self, *args: _Key) -> list[Tuple[Text, int]]: ... - def pubsub_numpat(self) -> int: ... + def pubsub(self, *, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... + def execute_command(self, *args, **options: _CommandOptions): ... + def parse_response(self, connection, command_name, **options: _ParseResponseOptions): ... def monitor(self) -> Monitor: ... - def cluster(self, cluster_arg: str, *args: Any) -> Any: ... def __enter__(self) -> Redis[_StrType]: ... def __exit__(self, exc_type, exc_value, traceback): ... def __del__(self) -> None: ... @@ -770,27 +305,35 @@ class Redis(Generic[_StrType]): StrictRedis = Redis class PubSub: - PUBLISH_MESSAGE_TYPES: Any - UNSUBSCRIBE_MESSAGE_TYPES: Any + PUBLISH_MESSAGE_TYPES: ClassVar[tuple[str, str]] + UNSUBSCRIBE_MESSAGE_TYPES: ClassVar[tuple[str, str]] + HEALTH_CHECK_MESSAGE: ClassVar[str] connection_pool: Any shard_hint: Any ignore_subscribe_messages: Any connection: Any - encoding: Any - encoding_errors: Any - decode_responses: Any - def __init__(self, connection_pool, shard_hint=..., ignore_subscribe_messages=...) -> None: ... + subscribed_event: threading.Event + encoder: Any + health_check_response_b: bytes + health_check_response: list[str] | list[bytes] + def __init__( + self, connection_pool, shard_hint: Any | None = ..., ignore_subscribe_messages: bool = ..., encoder: Any | None = ... + ) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, exc_type: object, exc_value: object, traceback: object) -> None: ... def __del__(self): ... channels: Any patterns: Any def reset(self): ... def close(self) -> None: ... def on_connect(self, connection): ... - def encode(self, value): ... @property def subscribed(self): ... - def execute_command(self, *args, **kwargs): ... - def parse_response(self, block=...): ... + def execute_command(self, *args): ... + def clean_health_check_responses(self) -> None: ... + def parse_response(self, block: bool = ..., timeout: float = ...): ... + def is_health_check_response(self, response) -> bool: ... + def check_health(self) -> None: ... def psubscribe(self, *args: _Key, **kwargs: Callable[[Any], None]): ... def punsubscribe(self, *args: _Key) -> None: ... def subscribe(self, *args: _Key, **kwargs: Callable[[Any], None]) -> None: ... @@ -798,9 +341,18 @@ class PubSub: def listen(self): ... def get_message(self, ignore_subscribe_messages: bool = ..., timeout: float = ...) -> dict[str, Any] | None: ... def handle_message(self, response, ignore_subscribe_messages: bool = ...) -> dict[str, Any] | None: ... - def run_in_thread(self, sleep_time=...): ... + def run_in_thread(self, sleep_time: float = ..., daemon: bool = ..., exception_handler: Any | None = ...): ... def ping(self, message: _Value | None = ...) -> None: ... +class PubSubWorkerThread(threading.Thread): + daemon: Any + pubsub: Any + sleep_time: Any + exception_handler: Any + def __init__(self, pubsub, sleep_time, daemon: bool = ..., exception_handler: Any | None = ...) -> None: ... + def run(self) -> None: ... + def stop(self) -> None: ... + class Pipeline(Redis[_StrType], Generic[_StrType]): UNWATCH_COMMANDS: Any connection_pool: Any @@ -814,11 +366,12 @@ class Pipeline(Redis[_StrType], Generic[_StrType]): scripts: Any explicit_transaction: Any def __init__(self, connection_pool, response_callbacks, transaction, shard_hint) -> None: ... - def __enter__(self) -> Pipeline[_StrType]: ... # type: ignore + def __enter__(self) -> Pipeline[_StrType]: ... # type: ignore[override] def __exit__(self, exc_type, exc_value, traceback) -> None: ... def __del__(self) -> None: ... def __len__(self) -> int: ... def __bool__(self) -> bool: ... + def discard(self) -> None: ... def reset(self) -> None: ... def multi(self) -> None: ... def execute_command(self, *args, **options): ... @@ -833,104 +386,101 @@ class Pipeline(Redis[_StrType], Generic[_StrType]): def unwatch(self) -> bool: ... # in the Redis implementation, the following methods are inherited from client. def set_response_callback(self, command, callback): ... - def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... # type: ignore + def pipeline(self, transaction: bool = ..., shard_hint: Any = ...) -> Pipeline[_StrType]: ... # type: ignore[override] def lock(self, name, timeout=..., sleep=..., blocking_timeout=..., lock_class=..., thread_local=...): ... - def pubsub(self, shard_hint: Any = ..., ignore_subscribe_messages: bool = ...) -> PubSub: ... - def acl_cat(self, category: Text | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def acl_deluser(self, username: Text) -> Pipeline[_StrType]: ... # type: ignore - def acl_genpass(self) -> Pipeline[_StrType]: ... # type: ignore - def acl_getuser(self, username: Text) -> Pipeline[_StrType]: ... # type: ignore - def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore - def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore - def acl_setuser( # type: ignore + def acl_cat(self, category: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_deluser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_genpass(self, bits: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_getuser(self, username: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_list(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_load(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_setuser( # type: ignore[override] self, - username: Text = ..., + username: str, enabled: bool = ..., nopass: bool = ..., - passwords: Sequence[Text] | None = ..., - hashed_passwords: Sequence[Text] | None = ..., - categories: Sequence[Text] | None = ..., - commands: Sequence[Text] | None = ..., - keys: Sequence[Text] | None = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., reset: bool = ..., reset_keys: bool = ..., reset_passwords: bool = ..., ) -> Pipeline[_StrType]: ... - def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore - def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore - def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore - def bgsave(self) -> Pipeline[_StrType]: ... # type: ignore - def client_id(self) -> Pipeline[_StrType]: ... # type: ignore - def client_kill(self, address: Text) -> Pipeline[_StrType]: ... # type: ignore - def client_list(self) -> Pipeline[_StrType]: ... # type: ignore - def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore - def client_setname(self, name: Text) -> Pipeline[_StrType]: ... # type: ignore - def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore - def readonly(self) -> Pipeline[_StrType]: ... # type: ignore - def config_get(self, pattern=...) -> Pipeline[_StrType]: ... # type: ignore - def config_set(self, name, value) -> Pipeline[_StrType]: ... # type: ignore - def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore - def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore - def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore - def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore - def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore - def flushall(self) -> Pipeline[_StrType]: ... # type: ignore - def flushdb(self) -> Pipeline[_StrType]: ... # type: ignore - def info(self, section: _Key | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore - def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore - def ping(self) -> Pipeline[_StrType]: ... # type: ignore - def save(self) -> Pipeline[_StrType]: ... # type: ignore - def sentinel(self, *args) -> None: ... - def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_masters(self) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_remove(self, name) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... # type: ignore - def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... # type: ignore - def shutdown(self) -> None: ... - def slaveof(self, host=..., port=...) -> Pipeline[_StrType]: ... # type: ignore - def slowlog_get(self, num=...) -> Pipeline[_StrType]: ... # type: ignore - def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore - def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore - def time(self) -> Pipeline[_StrType]: ... # type: ignore - def append(self, key, value) -> Pipeline[_StrType]: ... # type: ignore - def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... # type: ignore - def bitpos(self, key, bit, start=..., end=...) -> Pipeline[_StrType]: ... # type: ignore - def decr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore - def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore + def acl_users(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def acl_whoami(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgrewriteaof(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def bgsave(self, schedule: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_id(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_kill(self, address: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_list(self, _type: str | None = ..., client_id: list[str] = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_getname(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def client_setname(self, name: str) -> Pipeline[_StrType]: ... # type: ignore[override] + def readwrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def readonly(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_get(self, pattern=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_set(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_resetstat(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def config_rewrite(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def dbsize(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def debug_object(self, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def echo(self, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushall(self, asynchronous: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def flushdb(self, asynchronous: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def info(self, section: _Key | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def lastsave(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def object(self, infotype, key) -> Pipeline[_StrType]: ... # type: ignore[override] + def ping(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def save(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_get_master_addr_by_name(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_master(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_masters(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_monitor(self, name, ip, port, quorum) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_remove(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_sentinels(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_set(self, name, option, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sentinel_slaves(self, service_name) -> Pipeline[_StrType]: ... # type: ignore[override] + def slaveof(self, host=..., port=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_get(self, num=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_len(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def slowlog_reset(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def time(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def append(self, key, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitop(self, operation, dest, *keys) -> Pipeline[_StrType]: ... # type: ignore[override] + def bitpos(self, key, bit, start=..., end=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def decr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def delete(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] def __delitem__(self, _Key) -> None: ... - def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore - def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore - def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore - def expire(self, name: _Key, time: int | timedelta) -> Pipeline[_StrType]: ... # type: ignore - def expireat(self, name, when) -> Pipeline[_StrType]: ... # type: ignore - def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def __getitem__(self, name) -> Pipeline[_StrType]: ... # type: ignore - def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore - def getrange(self, key, start, end) -> Pipeline[_StrType]: ... # type: ignore - def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore - def incr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore - def incrby(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore - def incrbyfloat(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore - def keys(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore - def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore - def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore - def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore - def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def pexpire(self, name: _Key, time: int | timedelta) -> Pipeline[_StrType]: ... # type: ignore - def pexpireat(self, name: _Key, when: int | datetime) -> Pipeline[_StrType]: ... # type: ignore - def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... # type: ignore - def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore - def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore - def rename(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore - def renamenx(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore - def restore(self, name, ttl, value, replace: bool = ...) -> Pipeline[_StrType]: ... # type: ignore - def set( # type: ignore + def dump(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def exists(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __contains__(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def expire(self, name: _Key, time: int | timedelta) -> Pipeline[_StrType]: ... # type: ignore[override] + def expireat(self, name, when) -> Pipeline[_StrType]: ... # type: ignore[override] + def get(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def __getitem__(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def getbit(self, name: _Key, offset: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def getrange(self, key, start, end) -> Pipeline[_StrType]: ... # type: ignore[override] + def getset(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def incr(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrby(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def incrbyfloat(self, name, amount=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def keys(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def mset(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def msetnx(self, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def move(self, name: _Key, db: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def persist(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpire(self, name: _Key, time: int | timedelta) -> Pipeline[_StrType]: ... # type: ignore[override] + def pexpireat(self, name: _Key, when: int | datetime) -> Pipeline[_StrType]: ... # type: ignore[override] + def psetex(self, name, time_ms, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pttl(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def randomkey(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def rename(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def renamenx(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def restore(self, name, ttl, value, replace: bool = ..., absttl: bool = ..., idletime: Any | None = ..., frequency: Any | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def set( # type: ignore[override] self, name: _Key, value: _Value, @@ -939,37 +489,51 @@ class Pipeline(Redis[_StrType], Generic[_StrType]): nx: bool = ..., xx: bool = ..., keepttl: bool = ..., + get: bool = ..., + exat: Any | None = ..., + pxat: Any | None = ..., ) -> Pipeline[_StrType]: ... def __setitem__(self, name, value) -> None: ... - def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore - def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore - def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... # type: ignore - def strlen(self, name) -> Pipeline[_StrType]: ... # type: ignore - def substr(self, name, start, end=...) -> Pipeline[_StrType]: ... # type: ignore - def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def type(self, name) -> Pipeline[_StrType]: ... # type: ignore - def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore - def blpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore - def brpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore - def brpoplpush(self, src, dst, timeout=...) -> Pipeline[_StrType]: ... # type: ignore - def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore - def linsert( # type: ignore + def setbit(self, name: _Key, offset: int, value: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setnx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def setrange(self, name, offset, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def strlen(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def substr(self, name, start, end=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def ttl(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def type(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def unlink(self, *names: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def blmove( # type: ignore[override] + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> Pipeline[_StrType]: ... + def blpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpop(self, keys: _Value | Iterable[_Value], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def brpoplpush(self, src, dst, timeout=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def lindex(self, name: _Key, index: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def linsert( # type: ignore[override] self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value ) -> Pipeline[_StrType]: ... - def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def lpop(self, name) -> Pipeline[_StrType]: ... # type: ignore - def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def lpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore - def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore - def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore - def rpop(self, name) -> Pipeline[_StrType]: ... # type: ignore - def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore - def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def rpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore - def sort( # type: ignore + def llen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def lmove( # type: ignore[override] + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> Pipeline[_StrType]: ... + def lpop(self, name, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrange(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def lrem(self, name: _Key, count: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def lset(self, name: _Key, index: int, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def ltrim(self, name: _Key, start: int, end: int) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpop(self, name, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpoplpush(self, src, dst) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpush(self, name: _Value, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def rpushx(self, name, value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sort( # type: ignore[override] self, name: _Key, start: int | None = ..., @@ -981,72 +545,86 @@ class Pipeline(Redis[_StrType], Generic[_StrType]): store: _Key | None = ..., groups: bool = ..., ) -> Pipeline[_StrType]: ... - def scan(self, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def scan_iter(self, match: Text | None = ..., count: int | None = ...) -> Iterator[Any]: ... - def sscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Pipeline[_StrType]: ... # type: ignore - def sscan_iter(self, name, match=..., count=...) -> Iterator[Any]: ... - def hscan(self, name: _Key, cursor: int = ..., match: Text = ..., count: int = ...) -> Pipeline[_StrType]: ... # type: ignore - def hscan_iter(self, name, match=..., count=...) -> Iterator[Any]: ... - def zscan(self, name, cursor=..., match=..., count=..., score_cast_func=...) -> Pipeline[_StrType]: ... # type: ignore - def zscan_iter(self, name, match=..., count=..., score_cast_func=...) -> Iterator[Any]: ... - def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def spop(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def srandmember(self, name: _Key, number: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... # type: ignore - def xadd(self, name, fields, id=..., maxlen=..., approximate=...) -> Pipeline[_StrType]: ... # type: ignore + def scan(self, cursor: int = ..., match: _Key | None = ..., count: int | None = ..., _type: str | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def scan_iter(self, match: _Key | None = ..., count: int | None = ..., _type: str | None = ...) -> Iterator[Any]: ... # type: ignore[override] + def sscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def sscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> Iterator[Any]: ... + def hscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hscan_iter(self, name, match: _Key | None = ..., count: int | None = ...) -> Iterator[Any]: ... + def zscan(self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ..., score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ..., score_cast_func: Callable[[_StrType], Any] = ... + ) -> Iterator[Any]: ... + def sadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def scard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sismember(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def smembers(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def smove(self, src: _Key, dst: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def spop(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def srandmember(self, name: _Key, number: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def srem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xack(self, name, groupname, *ids) -> Pipeline[_StrType]: ... # type: ignore[override] + def xadd(self, name, fields, id=..., maxlen=..., approximate: bool = ..., nomkstream: bool = ..., minid: Any | None = ..., limit: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] def xclaim( self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... - ) -> Pipeline[_StrType]: ... # type: ignore - def xdel(self, name, *ids) -> Pipeline[_StrType]: ... # type: ignore - def xgroup_create(self, name, groupname, id=..., mkstream=...) -> Pipeline[_StrType]: ... # type: ignore - def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... # type: ignore - def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore - def xgroup_setid(self, name, groupname, id) -> Pipeline[_StrType]: ... # type: ignore - def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore - def xinfo_groups(self, name) -> Pipeline[_StrType]: ... # type: ignore - def xinfo_stream(self, name) -> Pipeline[_StrType]: ... # type: ignore - def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def xpending(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore - def xpending_range(self, name, groupname, min, max, count, consumername=...) -> Pipeline[_StrType]: ... # type: ignore - def xrange(self, name, min=..., max=..., count=...) -> Pipeline[_StrType]: ... # type: ignore - def xread(self, streams, count=..., block=...) -> Pipeline[_StrType]: ... # type: ignore - def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...) -> Pipeline[_StrType]: ... # type: ignore - def xrevrange(self, name, max=..., min=..., count=...) -> Pipeline[_StrType]: ... # type: ignore - def xtrim(self, name, maxlen, approximate=...) -> Pipeline[_StrType]: ... # type: ignore - def zadd( # type: ignore - self, name: _Key, mapping: Mapping[_Key, _Value], nx: bool = ..., xx: bool = ..., ch: bool = ..., incr: bool = ... + ) -> Pipeline[_StrType]: ... # type: ignore[override] + def xdel(self, name, *ids) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_create(self, name, groupname, id=..., mkstream=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_delconsumer(self, name, groupname, consumername) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_destroy(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xgroup_setid(self, name, groupname, id) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_consumers(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_groups(self, name) -> Pipeline[_StrType]: ... # type: ignore[override] + def xinfo_stream(self, name, full: bool = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending(self, name, groupname) -> Pipeline[_StrType]: ... # type: ignore[override] + def xpending_range(self, name, groupname, idle: Any | None = ..., min: int | None = ..., max: int | None = ..., count: int | None = ..., consumername=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xrange(self, name, min=..., max=..., count=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xread(self, streams, count=..., block=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xreadgroup(self, groupname, consumername, streams, count=..., block=..., noack=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xrevrange(self, name, max=..., min=..., count=...) -> Pipeline[_StrType]: ... # type: ignore[override] + def xtrim(self, name, maxlen: int | None = ..., approximate: bool = ..., minid: Any | None = ..., limit: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zadd( # type: ignore[override] + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Any | None = ..., + lt: Any | None = ..., ) -> Pipeline[_StrType]: ... - def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> Pipeline[_StrType]: ... # type: ignore - def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zpopmax(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def zpopmin(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore - def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore - def zrange( # type: ignore + def zcard(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def zcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zincrby(self, name: _Key, amount: float, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmax(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zpopmin(self, name: _Key, count: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrange( # type: ignore[override] self, name: _Key, start: int, end: int, desc: bool = ..., withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., ) -> Pipeline[_StrType]: ... - def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore - def zrangebyscore( # type: ignore + def zrangebylex(self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrangebyscore( # type: ignore[override] self, name: _Key, min: _Value, @@ -1054,84 +632,72 @@ class Pipeline(Redis[_StrType], Generic[_StrType]): start: int | None = ..., num: int | None = ..., withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., + score_cast_func: Callable[[_StrType], Any] = ..., ) -> Pipeline[_StrType]: ... - def zrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zrevrange( # type: ignore - self, - name: _Key, - start: int, - end: int, - desc: bool = ..., - withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., + def zrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrem(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyrank(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zrevrange( # type: ignore[override] + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[_StrType], Any] = ... ) -> Pipeline[_StrType]: ... - def zrevrangebyscore( # type: ignore + def zrevrangebyscore( # type: ignore[override] self, name: _Key, - min: _Value, max: _Value, + min: _Value, start: int | None = ..., num: int | None = ..., withscores: bool = ..., - score_cast_func: Callable[[Any], Any] = ..., + score_cast_func: Callable[[_StrType], Any] = ..., ) -> Pipeline[_StrType]: ... - def zrevrangebylex( # type: ignore - self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... + def zrevrangebylex( # type: ignore[override] + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ... ) -> Pipeline[_StrType]: ... - def zrevrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] = ...) -> Pipeline[_StrType]: ... # type: ignore - def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore - def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> Pipeline[_StrType]: ... # type: ignore - def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> Pipeline[_StrType]: ... # type: ignore - def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - @overload # type: ignore + def zrevrank(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zscore(self, name: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfadd(self, name: _Key, *values: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfcount(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pfmerge(self, dest: _Key, *sources: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hdel(self, name: _Key, *keys: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hexists(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hget(self, name: _Key, key: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hgetall(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def hkeys(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hlen(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + @overload # type: ignore[override] def hset(self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ...) -> Pipeline[_StrType]: ... - @overload # type: ignore + @overload # type: ignore[override] def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... - @overload # type: ignore + @overload # type: ignore[override] def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... - def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore - def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore - def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore - def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore - def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore - def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore - def script_exists(self, *args) -> Pipeline[_StrType]: ... # type: ignore - def script_flush(self) -> Pipeline[_StrType]: ... # type: ignore - def script_kill(self) -> Pipeline[_StrType]: ... # type: ignore - def script_load(self, script) -> Pipeline[_StrType]: ... # type: ignore - def register_script(self, script: Text | _StrType) -> Script: ... - def pubsub_channels(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore - def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore - def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> Pipeline[_StrType]: ... # type: ignore[override] + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def hvals(self, name: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def publish(self, channel: _Key, message: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def eval(self, script, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore[override] + def evalsha(self, sha, numkeys, *keys_and_args) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_exists(self, *args) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_flush(self, sync_type: Any | None = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_kill(self) -> Pipeline[_StrType]: ... # type: ignore[override] + def script_load(self, script) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_channels(self, pattern: _Key = ...) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numsub(self, *args: _Key) -> Pipeline[_StrType]: ... # type: ignore[override] + def pubsub_numpat(self) -> Pipeline[_StrType]: ... # type: ignore[override] def monitor(self) -> Monitor: ... - def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore + def cluster(self, cluster_arg: str, *args: Any) -> Pipeline[_StrType]: ... # type: ignore[override] def client(self) -> Any: ... -class Script: - registered_client: Any - script: Any - sha: Any - def __init__(self, registered_client, script) -> None: ... - def __call__(self, keys=..., args=..., client=...): ... - -class Monitor(object): +class Monitor: + command_re: Pattern[str] + monitor_re: Pattern[str] def __init__(self, connection_pool) -> None: ... - def __enter__(self) -> Monitor: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... - def next_command(self) -> dict[Text, Any]: ... - def listen(self) -> Iterable[dict[Text, Any]]: ... + def next_command(self) -> dict[str, Any]: ... + def listen(self) -> Iterable[dict[str, Any]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/cluster.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/cluster.pyi new file mode 100644 index 000000000000..fe6b8c45a108 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/cluster.pyi @@ -0,0 +1,210 @@ +from typing import Any, ClassVar, Generic + +from redis.client import PubSub +from redis.commands import RedisClusterCommands +from redis.commands.core import _StrType +from redis.connection import DefaultParser +from redis.exceptions import RedisError + +def get_node_name(host, port): ... +def get_connection(redis_node, *args, **options): ... +def parse_scan_result(command, res, **options): ... +def parse_pubsub_numsub(command, res, **options): ... +def parse_cluster_slots(resp, **options): ... + +PRIMARY: str +REPLICA: str +SLOT_ID: str +REDIS_ALLOWED_KEYS: Any +KWARGS_DISABLED_KEYS: Any +READ_COMMANDS: Any + +def cleanup_kwargs(**kwargs): ... + +class ClusterParser(DefaultParser): + EXCEPTION_CLASSES: Any + +class RedisCluster(RedisClusterCommands[_StrType], Generic[_StrType]): + RedisClusterRequestTTL: ClassVar[int] + PRIMARIES: ClassVar[str] + REPLICAS: ClassVar[str] + ALL_NODES: ClassVar[str] + RANDOM: ClassVar[str] + DEFAULT_NODE: ClassVar[str] + NODE_FLAGS: ClassVar[set[str]] + COMMAND_FLAGS: ClassVar[Any] + CLUSTER_COMMANDS_RESPONSE_CALLBACKS: ClassVar[dict[str, Any]] + RESULT_CALLBACKS: ClassVar[Any] + ERRORS_ALLOW_RETRY: ClassVar[tuple[type[RedisError], ...]] + user_on_connect_func: Any + encoder: Any + cluster_error_retry_attempts: Any + command_flags: Any + node_flags: Any + read_from_replicas: Any + reinitialize_counter: int + reinitialize_steps: Any + nodes_manager: Any + cluster_response_callbacks: Any + result_callbacks: Any + commands_parser: Any + def __init__( + self, + host: Any | None = ..., + port: int = ..., + startup_nodes: Any | None = ..., + cluster_error_retry_attempts: int = ..., + require_full_coverage: bool = ..., + reinitialize_steps: int = ..., + read_from_replicas: bool = ..., + url: Any | None = ..., + **kwargs, + ) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self) -> None: ... + def disconnect_connection_pools(self) -> None: ... + @classmethod + def from_url(cls, url, **kwargs): ... + def on_connect(self, connection) -> None: ... + def get_redis_connection(self, node): ... + def get_node(self, host: Any | None = ..., port: Any | None = ..., node_name: Any | None = ...): ... + def get_primaries(self): ... + def get_replicas(self): ... + def get_random_node(self): ... + def get_nodes(self): ... + def get_node_from_key(self, key, replica: bool = ...): ... + def get_default_node(self): ... + def set_default_node(self, node): ... + def monitor(self, target_node: Any | None = ...): ... + def pubsub(self, node: Any | None = ..., host: Any | None = ..., port: Any | None = ..., **kwargs): ... + def pipeline(self, transaction: Any | None = ..., shard_hint: Any | None = ...): ... + def keyslot(self, key): ... + def determine_slot(self, *args): ... + def reinitialize_caches(self) -> None: ... + def get_encoder(self): ... + def get_connection_kwargs(self): ... + def execute_command(self, *args, **kwargs): ... + def close(self) -> None: ... + +class ClusterNode: + host: Any + port: Any + name: Any + server_type: Any + redis_connection: Any + def __init__(self, host, port, server_type: Any | None = ..., redis_connection: Any | None = ...) -> None: ... + def __eq__(self, obj): ... + def __del__(self) -> None: ... + +class LoadBalancer: + primary_to_idx: Any + start_index: Any + def __init__(self, start_index: int = ...) -> None: ... + def get_server_index(self, primary, list_size): ... + def reset(self) -> None: ... + +class NodesManager: + nodes_cache: Any + slots_cache: Any + startup_nodes: Any + default_node: Any + from_url: Any + connection_kwargs: Any + read_load_balancer: Any + def __init__( + self, startup_nodes, from_url: bool = ..., require_full_coverage: bool = ..., lock: Any | None = ..., **kwargs + ) -> None: ... + def get_node(self, host: Any | None = ..., port: Any | None = ..., node_name: Any | None = ...): ... + def update_moved_exception(self, exception) -> None: ... + def get_node_from_slot(self, slot, read_from_replicas: bool = ..., server_type: Any | None = ...): ... + def get_nodes_by_server_type(self, server_type): ... + def populate_startup_nodes(self, nodes) -> None: ... + def check_slots_coverage(self, slots_cache): ... + def create_redis_connections(self, nodes) -> None: ... + def create_redis_node(self, host, port, **kwargs): ... + def initialize(self) -> None: ... + def close(self) -> None: ... + def reset(self) -> None: ... + +class ClusterPubSub(PubSub): + node: Any + cluster: Any + def __init__( + self, redis_cluster, node: Any | None = ..., host: Any | None = ..., port: Any | None = ..., **kwargs + ) -> None: ... + def set_pubsub_node(self, cluster, node: Any | None = ..., host: Any | None = ..., port: Any | None = ...) -> None: ... + def get_pubsub_node(self): ... + def execute_command(self, *args, **kwargs) -> None: ... + def get_redis_connection(self): ... + +class ClusterPipeline(RedisCluster[_StrType], Generic[_StrType]): + command_stack: Any + nodes_manager: Any + refresh_table_asap: bool + result_callbacks: Any + startup_nodes: Any + read_from_replicas: Any + command_flags: Any + cluster_response_callbacks: Any + cluster_error_retry_attempts: Any + reinitialize_counter: int + reinitialize_steps: Any + encoder: Any + commands_parser: Any + def __init__( + self, + nodes_manager, + result_callbacks: Any | None = ..., + cluster_response_callbacks: Any | None = ..., + startup_nodes: Any | None = ..., + read_from_replicas: bool = ..., + cluster_error_retry_attempts: int = ..., + reinitialize_steps: int = ..., + **kwargs, + ) -> None: ... + def __enter__(self): ... + def __exit__(self, exc_type, exc_value, traceback) -> None: ... + def __del__(self) -> None: ... + def __len__(self): ... + def __nonzero__(self): ... + def __bool__(self): ... + def execute_command(self, *args, **kwargs): ... + def pipeline_execute_command(self, *args, **options): ... + def raise_first_error(self, stack) -> None: ... + def annotate_exception(self, exception, number, command) -> None: ... + def execute(self, raise_on_error: bool = ...): ... + scripts: Any + watching: bool + explicit_transaction: bool + def reset(self) -> None: ... + def send_cluster_commands(self, stack, raise_on_error: bool = ..., allow_redirections: bool = ...): ... + def eval(self) -> None: ... + def multi(self) -> None: ... + def immediate_execute_command(self, *args, **options) -> None: ... + def load_scripts(self) -> None: ... + def watch(self, *names) -> None: ... + def unwatch(self) -> None: ... + def script_load_for_pipeline(self, *args, **kwargs) -> None: ... + def delete(self, *names): ... + +def block_pipeline_command(func): ... + +class PipelineCommand: + args: Any + options: Any + position: Any + result: Any + node: Any + asking: bool + def __init__(self, args, options: Any | None = ..., position: Any | None = ...) -> None: ... + +class NodeCommands: + parse_response: Any + connection_pool: Any + connection: Any + commands: Any + def __init__(self, parse_response, connection_pool, connection) -> None: ... + def append(self, c) -> None: ... + def write(self) -> None: ... + def read(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/__init__.pyi new file mode 100644 index 000000000000..5ab5e2219907 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/__init__.pyi @@ -0,0 +1,8 @@ +from .cluster import RedisClusterCommands as RedisClusterCommands +from .core import CoreCommands as CoreCommands +from .helpers import list_or_args as list_or_args +from .parser import CommandsParser as CommandsParser +from .redismodules import RedisModuleCommands as RedisModuleCommands +from .sentinel import SentinelCommands as SentinelCommands + +__all__ = ["RedisClusterCommands", "CommandsParser", "CoreCommands", "list_or_args", "RedisModuleCommands", "SentinelCommands"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/__init__.pyi new file mode 100644 index 000000000000..ba1eb8dd8c8f --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/__init__.pyi @@ -0,0 +1,58 @@ +from typing import Any + +from .commands import * +from .info import BFInfo as BFInfo, CFInfo as CFInfo, CMSInfo as CMSInfo, TDigestInfo as TDigestInfo, TopKInfo as TopKInfo + +class AbstractBloom: + @staticmethod + def appendItems(params, items) -> None: ... + @staticmethod + def appendError(params, error) -> None: ... + @staticmethod + def appendCapacity(params, capacity) -> None: ... + @staticmethod + def appendExpansion(params, expansion) -> None: ... + @staticmethod + def appendNoScale(params, noScale) -> None: ... + @staticmethod + def appendWeights(params, weights) -> None: ... + @staticmethod + def appendNoCreate(params, noCreate) -> None: ... + @staticmethod + def appendItemsAndIncrements(params, items, increments) -> None: ... + @staticmethod + def appendValuesAndWeights(params, items, weights) -> None: ... + @staticmethod + def appendMaxIterations(params, max_iterations) -> None: ... + @staticmethod + def appendBucketSize(params, bucket_size) -> None: ... + +class CMSBloom(CMSCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TOPKBloom(TOPKCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class CFBloom(CFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class TDigestBloom(TDigestCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... + +class BFBloom(BFCommands, AbstractBloom): + client: Any + commandmixin: Any + execute_command: Any + def __init__(self, client, **kwargs) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/commands.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/commands.pyi new file mode 100644 index 000000000000..5036d1108554 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/commands.pyi @@ -0,0 +1,107 @@ +from typing import Any + +BF_RESERVE: str +BF_ADD: str +BF_MADD: str +BF_INSERT: str +BF_EXISTS: str +BF_MEXISTS: str +BF_SCANDUMP: str +BF_LOADCHUNK: str +BF_INFO: str +CF_RESERVE: str +CF_ADD: str +CF_ADDNX: str +CF_INSERT: str +CF_INSERTNX: str +CF_EXISTS: str +CF_DEL: str +CF_COUNT: str +CF_SCANDUMP: str +CF_LOADCHUNK: str +CF_INFO: str +CMS_INITBYDIM: str +CMS_INITBYPROB: str +CMS_INCRBY: str +CMS_QUERY: str +CMS_MERGE: str +CMS_INFO: str +TOPK_RESERVE: str +TOPK_ADD: str +TOPK_INCRBY: str +TOPK_QUERY: str +TOPK_COUNT: str +TOPK_LIST: str +TOPK_INFO: str +TDIGEST_CREATE: str +TDIGEST_RESET: str +TDIGEST_ADD: str +TDIGEST_MERGE: str +TDIGEST_CDF: str +TDIGEST_QUANTILE: str +TDIGEST_MIN: str +TDIGEST_MAX: str +TDIGEST_INFO: str + +class BFCommands: + def create(self, key, errorRate, capacity, expansion: Any | None = ..., noScale: Any | None = ...): ... + def add(self, key, item): ... + def madd(self, key, *items): ... + def insert( + self, + key, + items, + capacity: Any | None = ..., + error: Any | None = ..., + noCreate: Any | None = ..., + expansion: Any | None = ..., + noScale: Any | None = ..., + ): ... + def exists(self, key, item): ... + def mexists(self, key, *items): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class CFCommands: + def create( + self, key, capacity, expansion: Any | None = ..., bucket_size: Any | None = ..., max_iterations: Any | None = ... + ): ... + def add(self, key, item): ... + def addnx(self, key, item): ... + def insert(self, key, items, capacity: Any | None = ..., nocreate: Any | None = ...): ... + def insertnx(self, key, items, capacity: Any | None = ..., nocreate: Any | None = ...): ... + def exists(self, key, item): ... + def delete(self, key, item): ... + def count(self, key, item): ... + def scandump(self, key, iter): ... + def loadchunk(self, key, iter, data): ... + def info(self, key): ... + +class TOPKCommands: + def reserve(self, key, k, width, depth, decay): ... + def add(self, key, *items): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def count(self, key, *items): ... + def list(self, key, withcount: bool = ...): ... + def info(self, key): ... + +class TDigestCommands: + def create(self, key, compression): ... + def reset(self, key): ... + def add(self, key, values, weights): ... + def merge(self, toKey, fromKey): ... + def min(self, key): ... + def max(self, key): ... + def quantile(self, key, quantile): ... + def cdf(self, key, value): ... + def info(self, key): ... + +class CMSCommands: + def initbydim(self, key, width, depth): ... + def initbyprob(self, key, error, probability): ... + def incrby(self, key, items, increments): ... + def query(self, key, *items): ... + def merge(self, destKey, numKeys, srcKeys, weights=...): ... + def info(self, key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/info.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/info.pyi new file mode 100644 index 000000000000..54d1cf044e9e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/bf/info.pyi @@ -0,0 +1,43 @@ +from typing import Any + +class BFInfo: + capacity: Any + size: Any + filterNum: Any + insertedNum: Any + expansionRate: Any + def __init__(self, args) -> None: ... + +class CFInfo: + size: Any + bucketNum: Any + filterNum: Any + insertedNum: Any + deletedNum: Any + bucketSize: Any + expansionRate: Any + maxIteration: Any + def __init__(self, args) -> None: ... + +class CMSInfo: + width: Any + depth: Any + count: Any + def __init__(self, args) -> None: ... + +class TopKInfo: + k: Any + width: Any + depth: Any + decay: Any + def __init__(self, args) -> None: ... + +class TDigestInfo: + compression: Any + capacity: Any + mergedNodes: Any + unmergedNodes: Any + mergedWeight: Any + unmergedWeight: Any + totalCompressions: Any + def __init__(self, args) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/cluster.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/cluster.pyi new file mode 100644 index 000000000000..66324ed8aa42 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/cluster.pyi @@ -0,0 +1,60 @@ +from typing import Any, Generic + +from .core import ACLCommands, DataAccessCommands, ManagementCommands, PubSubCommands, _StrType + +class ClusterMultiKeyCommands: + def mget_nonatomic(self, keys, *args): ... + def mset_nonatomic(self, mapping): ... + def exists(self, *keys): ... + def delete(self, *keys): ... + def touch(self, *keys): ... + def unlink(self, *keys): ... + +class ClusterManagementCommands(ManagementCommands): + def slaveof(self, *args, **kwargs) -> None: ... + def replicaof(self, *args, **kwargs) -> None: ... + def swapdb(self, *args, **kwargs) -> None: ... + +class ClusterDataAccessCommands(DataAccessCommands[_StrType], Generic[_StrType]): + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Any | None = ..., + withmatchlen: bool = ..., + **kwargs, + ): ... + +class RedisClusterCommands( + ClusterMultiKeyCommands, + ClusterManagementCommands, + ACLCommands[_StrType], + PubSubCommands, + ClusterDataAccessCommands[_StrType], + Generic[_StrType], +): + def cluster_addslots(self, target_node, *slots): ... + def cluster_countkeysinslot(self, slot_id): ... + def cluster_count_failure_report(self, node_id): ... + def cluster_delslots(self, *slots): ... + def cluster_failover(self, target_node, option: Any | None = ...): ... + def cluster_info(self, target_nodes: Any | None = ...): ... + def cluster_keyslot(self, key): ... + def cluster_meet(self, host, port, target_nodes: Any | None = ...): ... + def cluster_nodes(self): ... + def cluster_replicate(self, target_nodes, node_id): ... + def cluster_reset(self, soft: bool = ..., target_nodes: Any | None = ...): ... + def cluster_save_config(self, target_nodes: Any | None = ...): ... + def cluster_get_keys_in_slot(self, slot, num_keys): ... + def cluster_set_config_epoch(self, epoch, target_nodes: Any | None = ...): ... + def cluster_setslot(self, target_node, node_id, slot_id, state): ... + def cluster_setslot_stable(self, slot_id): ... + def cluster_replicas(self, node_id, target_nodes: Any | None = ...): ... + def cluster_slots(self, target_nodes: Any | None = ...): ... + read_from_replicas: bool + def readonly(self, target_nodes: Any | None = ...): ... + def readwrite(self, target_nodes: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/core.pyi new file mode 100644 index 000000000000..808227cdbee2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/core.pyi @@ -0,0 +1,742 @@ +import builtins +from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence +from datetime import datetime, timedelta +from typing import Any, Generic, TypeVar, Union, overload +from typing_extensions import Literal + +from ..client import _CommandOptions, _Key, _Value + +_ScoreCastFuncReturn = TypeVar("_ScoreCastFuncReturn") +_StrType = TypeVar("_StrType", bound=Union[str, bytes]) + +class ACLCommands(Generic[_StrType]): + def acl_cat(self, category: str | None = ..., **kwargs: _CommandOptions) -> list[str]: ... + def acl_deluser(self, *username: str, **kwargs: _CommandOptions) -> int: ... + def acl_genpass(self, bits: int | None = ..., **kwargs: _CommandOptions) -> str: ... + def acl_getuser(self, username: str, **kwargs: _CommandOptions) -> Any | None: ... + def acl_help(self, **kwargs: _CommandOptions): ... + def acl_list(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_log(self, count: int | None = ..., **kwargs: _CommandOptions): ... + def acl_log_reset(self, **kwargs: _CommandOptions): ... + def acl_load(self, **kwargs: _CommandOptions) -> bool: ... + def acl_save(self, **kwargs: _CommandOptions): ... + def acl_setuser( + self, + username: str, + enabled: bool = ..., + nopass: bool = ..., + passwords: Sequence[str] | None = ..., + hashed_passwords: Sequence[str] | None = ..., + categories: Sequence[str] | None = ..., + commands: Sequence[str] | None = ..., + keys: Sequence[str] | None = ..., + reset: bool = ..., + reset_keys: bool = ..., + reset_passwords: bool = ..., + **kwargs: _CommandOptions, + ) -> bool: ... + def acl_users(self, **kwargs: _CommandOptions) -> list[str]: ... + def acl_whoami(self, **kwargs: _CommandOptions) -> str: ... + +class ManagementCommands: + def bgrewriteaof(self, **kwargs: _CommandOptions): ... + def bgsave(self, schedule: bool = ..., **kwargs: _CommandOptions): ... + def role(self): ... + def client_kill(self, address: str, **kwargs: _CommandOptions) -> bool: ... + def client_kill_filter( + self, + _id: Any | None = ..., + _type: Any | None = ..., + addr: Any | None = ..., + skipme: Any | None = ..., + laddr: Any | None = ..., + user: Any | None = ..., + **kwargs: _CommandOptions, + ): ... + def client_info(self, **kwargs: _CommandOptions): ... + def client_list( + self, _type: str | None = ..., client_id: list[str] = ..., **kwargs: _CommandOptions + ) -> list[dict[str, str]]: ... + def client_getname(self, **kwargs: _CommandOptions) -> str | None: ... + def client_getredir(self, **kwargs: _CommandOptions): ... + def client_reply(self, reply, **kwargs: _CommandOptions): ... + def client_id(self, **kwargs: _CommandOptions) -> int: ... + def client_tracking_on( + self, clientid: Any | None = ..., prefix=..., bcast: bool = ..., optin: bool = ..., optout: bool = ..., noloop: bool = ... + ): ... + def client_tracking_off( + self, clientid: Any | None = ..., prefix=..., bcast: bool = ..., optin: bool = ..., optout: bool = ..., noloop: bool = ... + ): ... + def client_tracking( + self, + on: bool = ..., + clientid: Any | None = ..., + prefix=..., + bcast: bool = ..., + optin: bool = ..., + optout: bool = ..., + noloop: bool = ..., + **kwargs: _CommandOptions, + ): ... + def client_trackinginfo(self, **kwargs: _CommandOptions): ... + def client_setname(self, name: str, **kwargs: _CommandOptions) -> bool: ... + def client_unblock(self, client_id, error: bool = ..., **kwargs: _CommandOptions): ... + def client_pause(self, timeout, all: bool = ..., **kwargs: _CommandOptions): ... + def client_unpause(self, **kwargs: _CommandOptions): ... + def command(self, **kwargs: _CommandOptions): ... + def command_info(self, **kwargs: _CommandOptions): ... + def command_count(self, **kwargs: _CommandOptions): ... + def config_get(self, pattern: str = ..., **kwargs: _CommandOptions): ... + def config_set(self, name, value, **kwargs: _CommandOptions): ... + def config_resetstat(self, **kwargs: _CommandOptions): ... + def config_rewrite(self, **kwargs: _CommandOptions): ... + def dbsize(self, **kwargs: _CommandOptions) -> int: ... + def debug_object(self, key, **kwargs: _CommandOptions): ... + def debug_segfault(self, **kwargs: _CommandOptions): ... + def echo(self, value: _Value, **kwargs: _CommandOptions) -> bytes: ... + def flushall(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + def flushdb(self, asynchronous: bool = ..., **kwargs: _CommandOptions) -> bool: ... + def sync(self): ... + def psync(self, replicationid, offset): ... + def swapdb(self, first, second, **kwargs: _CommandOptions): ... + def select(self, index, **kwargs: _CommandOptions): ... + def info(self, section: _Key | None = ..., **kwargs: _CommandOptions) -> Mapping[str, Any]: ... + def lastsave(self, **kwargs: _CommandOptions): ... + def lolwut(self, *version_numbers: _Value, **kwargs: _CommandOptions) -> bytes: ... + def reset(self) -> None: ... + def migrate( + self, + host, + port, + keys, + destination_db, + timeout, + copy: bool = ..., + replace: bool = ..., + auth: Any | None = ..., + **kwargs: _CommandOptions, + ): ... + def object(self, infotype, key, **kwargs: _CommandOptions): ... + def memory_doctor(self, **kwargs: _CommandOptions): ... + def memory_help(self, **kwargs: _CommandOptions): ... + def memory_stats(self, **kwargs: _CommandOptions) -> dict[str, Any]: ... + def memory_malloc_stats(self, **kwargs: _CommandOptions): ... + def memory_usage(self, key, samples: Any | None = ..., **kwargs: _CommandOptions): ... + def memory_purge(self, **kwargs: _CommandOptions): ... + def ping(self, **kwargs: _CommandOptions) -> bool: ... + def quit(self, **kwargs: _CommandOptions): ... + def replicaof(self, *args, **kwargs: _CommandOptions): ... + def save(self, **kwargs: _CommandOptions) -> bool: ... + def shutdown(self, save: bool = ..., nosave: bool = ..., **kwargs: _CommandOptions) -> None: ... + def slaveof(self, host: Any | None = ..., port: Any | None = ..., **kwargs: _CommandOptions): ... + def slowlog_get(self, num: Any | None = ..., **kwargs: _CommandOptions): ... + def slowlog_len(self, **kwargs: _CommandOptions): ... + def slowlog_reset(self, **kwargs: _CommandOptions): ... + def time(self, **kwargs: _CommandOptions): ... + def wait(self, num_replicas, timeout, **kwargs: _CommandOptions): ... + +class BasicKeyCommands(Generic[_StrType]): + def append(self, key, value): ... + def bitcount(self, key: _Key, start: int | None = ..., end: int | None = ...) -> int: ... + def bitfield(self, key, default_overflow: Any | None = ...): ... + def bitop(self, operation, dest, *keys): ... + def bitpos(self, key, bit, start=..., end=...): ... + def copy(self, source, destination, destination_db: Any | None = ..., replace: bool = ...): ... + def decr(self, name, amount: int = ...) -> int: ... + def decrby(self, name, amount: int = ...) -> int: ... + def delete(self, *names: _Key) -> int: ... + def __delitem__(self, name: _Key) -> None: ... + def dump(self, name: _Key) -> _StrType | None: ... + def exists(self, *names: _Key) -> int: ... + __contains__ = exists + def expire(self, name: _Key, time: int | timedelta) -> bool: ... + def expireat(self, name, when): ... + def get(self, name: _Key) -> _StrType | None: ... + def getdel(self, name: _Key) -> _StrType | None: ... + def getex( + self, + name, + ex: Any | None = ..., + px: Any | None = ..., + exat: Any | None = ..., + pxat: Any | None = ..., + persist: bool = ..., + ): ... + def __getitem__(self, name: str): ... + def getbit(self, name: _Key, offset: int) -> int: ... + def getrange(self, key, start, end): ... + def getset(self, name, value) -> _StrType | None: ... + def incr(self, name: _Key, amount: int = ...) -> int: ... + def incrby(self, name: _Key, amount: int = ...) -> int: ... + def incrbyfloat(self, name: _Key, amount: float = ...) -> float: ... + def keys(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[_StrType]: ... + def lmove( + self, first_list: _Key, second_list: _Key, src: Literal["LEFT", "RIGHT"] = ..., dest: Literal["LEFT", "RIGHT"] = ... + ) -> _Value: ... + def blmove( + self, + first_list: _Key, + second_list: _Key, + timeout: float, + src: Literal["LEFT", "RIGHT"] = ..., + dest: Literal["LEFT", "RIGHT"] = ..., + ) -> _Value | None: ... + def mget(self, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def mset(self, mapping: Mapping[_Key, _Value]) -> Literal[True]: ... + def msetnx(self, mapping: Mapping[_Key, _Value]) -> bool: ... + def move(self, name: _Key, db: int) -> bool: ... + def persist(self, name: _Key) -> bool: ... + def pexpire(self, name: _Key, time: int | timedelta) -> Literal[1, 0]: ... + def pexpireat(self, name: _Key, when: int | datetime) -> Literal[1, 0]: ... + def psetex(self, name, time_ms, value): ... + def pttl(self, name: _Key) -> int: ... + def hrandfield(self, key, count: Any | None = ..., withvalues: bool = ...): ... + def randomkey(self, **kwargs: _CommandOptions): ... + def rename(self, src, dst): ... + def renamenx(self, src, dst): ... + def restore( + self, name, ttl, value, replace: bool = ..., absttl: bool = ..., idletime: Any | None = ..., frequency: Any | None = ... + ): ... + def set( + self, + name: _Key, + value: _Value, + ex: None | int | timedelta = ..., + px: None | int | timedelta = ..., + nx: bool = ..., + xx: bool = ..., + keepttl: bool = ..., + get: bool = ..., + exat: Any | None = ..., + pxat: Any | None = ..., + ) -> bool | None: ... + def __setitem__(self, name, value) -> None: ... + def setbit(self, name: _Key, offset: int, value: int) -> int: ... + def setex(self, name: _Key, time: int | timedelta, value: _Value) -> bool: ... + def setnx(self, name: _Key, value: _Value) -> bool: ... + def setrange(self, name, offset, value): ... + def stralgo( + self, + algo, + value1, + value2, + specific_argument: str = ..., + len: bool = ..., + idx: bool = ..., + minmatchlen: Any | None = ..., + withmatchlen: bool = ..., + **kwargs: _CommandOptions, + ): ... + def strlen(self, name): ... + def substr(self, name, start, end: int = ...): ... + def touch(self, *args): ... + def ttl(self, name: _Key) -> int: ... + def type(self, name): ... + def watch(self, *names): ... + def unwatch(self): ... + def unlink(self, *names: _Key) -> int: ... + +class ListCommands(Generic[_StrType]): + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + def blpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: Literal[0] | None = ...) -> tuple[_StrType, _StrType]: ... + @overload + def brpop(self, keys: _Value | Iterable[_Value], timeout: float) -> tuple[_StrType, _StrType] | None: ... + def brpoplpush(self, src, dst, timeout: int | None = ...): ... + def lindex(self, name: _Key, index: int) -> _StrType | None: ... + def linsert( + self, name: _Key, where: Literal["BEFORE", "AFTER", "before", "after"], refvalue: _Value, value: _Value + ) -> int: ... + def llen(self, name: _Key) -> int: ... + def lpop(self, name, count: int | None = ...): ... + def lpush(self, name: _Value, *values: _Value) -> int: ... + def lpushx(self, name, value): ... + def lrange(self, name: _Key, start: int, end: int) -> list[_StrType]: ... + def lrem(self, name: _Key, count: int, value: _Value) -> int: ... + def lset(self, name: _Key, index: int, value: _Value) -> bool: ... + def ltrim(self, name: _Key, start: int, end: int) -> bool: ... + def rpop(self, name, count: int | None = ...): ... + def rpoplpush(self, src, dst): ... + def rpush(self, name: _Value, *values: _Value) -> int: ... + def rpushx(self, name, value): ... + def lpos(self, name, value, rank: Any | None = ..., count: Any | None = ..., maxlen: Any | None = ...): ... + @overload + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + store: None = ..., + groups: bool = ..., + ) -> list[_StrType]: ... + @overload + def sort( + self, + name: _Key, + start: int | None = ..., + num: int | None = ..., + by: _Key | None = ..., + get: _Key | Sequence[_Key] | None = ..., + desc: bool = ..., + alpha: bool = ..., + *, + store: _Key, + groups: bool = ..., + ) -> int: ... + @overload + def sort( + self, + name: _Key, + start: int | None, + num: int | None, + by: _Key | None, + get: _Key | Sequence[_Key] | None, + desc: bool, + alpha: bool, + store: _Key, + groups: bool = ..., + ) -> int: ... + +class ScanCommands(Generic[_StrType]): + def scan( + self, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + _type: str | None = ..., + **kwargs: _CommandOptions, + ) -> tuple[int, list[_StrType]]: ... + def scan_iter( + self, match: _Key | None = ..., count: int | None = ..., _type: str | None = ..., **kwargs: _CommandOptions + ) -> Iterator[_StrType]: ... + def sscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, list[_StrType]]: ... + def sscan_iter(self, name: _Key, match: _Key | None = ..., count: int | None = ...) -> Iterator[_StrType]: ... + def hscan( + self, name: _Key, cursor: int = ..., match: _Key | None = ..., count: int | None = ... + ) -> tuple[int, dict[_StrType, _StrType]]: ... + def hscan_iter( + self, name: _Key, match: _Key | None = ..., count: int | None = ... + ) -> Iterator[tuple[_StrType, _StrType]]: ... + def zscan( + self, + name: _Key, + cursor: int = ..., + match: _Key | None = ..., + count: int | None = ..., + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + ) -> tuple[int, list[tuple[_StrType, _ScoreCastFuncReturn]]]: ... + def zscan_iter( + self, + name: _Key, + match: _Key | None = ..., + count: int | None = ..., + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + ) -> Iterator[tuple[_StrType, _ScoreCastFuncReturn]]: ... + +class SetCommands(Generic[_StrType]): + def sadd(self, name: _Key, *values: _Value) -> int: ... + def scard(self, name: _Key) -> int: ... + def sdiff(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sdiffstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sinter(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sinterstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + def sismember(self, name: _Key, value: _Value) -> bool: ... + def smembers(self, name: _Key) -> builtins.set[_StrType]: ... + def smismember(self, name, values, *args): ... + def smove(self, src: _Key, dst: _Key, value: _Value) -> bool: ... + @overload + def spop(self, name: _Key, count: None = ...) -> _Value | None: ... + @overload + def spop(self, name: _Key, count: int) -> list[_Value]: ... + @overload + def srandmember(self, name: _Key, number: None = ...) -> _Value | None: ... + @overload + def srandmember(self, name: _Key, number: int) -> list[_Value]: ... + def srem(self, name: _Key, *values: _Value) -> int: ... + def sunion(self, keys: _Key | Iterable[_Key], *args: _Key) -> builtins.set[_Value]: ... + def sunionstore(self, dest: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> int: ... + +class StreamCommands: + def xack(self, name, groupname, *ids): ... + def xadd( + self, + name, + fields, + id: str = ..., + maxlen=..., + approximate: bool = ..., + nomkstream: bool = ..., + minid: Any | None = ..., + limit: Any | None = ..., + ): ... + def xautoclaim( + self, name, groupname, consumername, min_idle_time, start_id: int = ..., count: Any | None = ..., justid: bool = ... + ): ... + def xclaim( + self, name, groupname, consumername, min_idle_time, message_ids, idle=..., time=..., retrycount=..., force=..., justid=... + ): ... + def xdel(self, name, *ids): ... + def xgroup_create(self, name, groupname, id: str = ..., mkstream: bool = ...): ... + def xgroup_delconsumer(self, name, groupname, consumername): ... + def xgroup_destroy(self, name, groupname): ... + def xgroup_createconsumer(self, name, groupname, consumername): ... + def xgroup_setid(self, name, groupname, id): ... + def xinfo_consumers(self, name, groupname): ... + def xinfo_groups(self, name): ... + def xinfo_stream(self, name, full: bool = ...): ... + def xlen(self, name: _Key) -> int: ... + def xpending(self, name, groupname): ... + def xpending_range( + self, + name, + groupname, + idle: Any | None = ..., + min: Any | None = ..., + max: Any | None = ..., + count: int | None = ..., + consumername: Any | None = ..., + ): ... + def xrange(self, name, min: str = ..., max: str = ..., count: Any | None = ...): ... + def xread(self, streams, count: Any | None = ..., block: Any | None = ...): ... + def xreadgroup( + self, groupname, consumername, streams, count: Any | None = ..., block: Any | None = ..., noack: bool = ... + ): ... + def xrevrange(self, name, max: str = ..., min: str = ..., count: Any | None = ...): ... + def xtrim( + self, name, maxlen: Any | None = ..., approximate: bool = ..., minid: Any | None = ..., limit: Any | None = ... + ): ... + +class SortedSetCommands(Generic[_StrType]): + def zadd( + self, + name: _Key, + mapping: Mapping[_Key, _Value], + nx: bool = ..., + xx: bool = ..., + ch: bool = ..., + incr: bool = ..., + gt: Any | None = ..., + lt: Any | None = ..., + ) -> int: ... + def zcard(self, name: _Key) -> int: ... + def zcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zdiff(self, keys, withscores: bool = ...): ... + def zdiffstore(self, dest, keys): ... + def zincrby(self, name: _Key, amount: float, value: _Value) -> float: ... + def zinter(self, keys, aggregate: Any | None = ..., withscores: bool = ...): ... + def zinterstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> int: ... + def zlexcount(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zpopmax(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + def zpopmin(self, name: _Key, count: int | None = ...) -> list[tuple[_StrType, float]]: ... + def zrandmember(self, key, count: Any | None = ..., withscores: bool = ...): ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmax(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: Literal[0] = ...) -> tuple[_StrType, _StrType, float]: ... + @overload + def bzpopmin(self, keys: _Key | Iterable[_Key], timeout: float) -> tuple[_StrType, _StrType, float] | None: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrange( + self, + name: _Key, + start: int, + end: int, + desc: bool = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + byscore: bool = ..., + bylex: bool = ..., + offset: int | None = ..., + num: int | None = ..., + ) -> list[_StrType]: ... + @overload + def zrevrange( + self, + name: _Key, + start: int, + end: int, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrange( + self, name: _Key, start: int, end: int, withscores: bool = ..., score_cast_func: Callable[[Any], Any] = ... + ) -> list[_StrType]: ... + def zrangestore( + self, + dest, + name, + start, + end, + byscore: bool = ..., + bylex: bool = ..., + desc: bool = ..., + offset: Any | None = ..., + num: Any | None = ..., + ): ... + def zrangebylex( + self, name: _Key, min: _Value, max: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + def zrevrangebylex( + self, name: _Key, max: _Value, min: _Value, start: int | None = ..., num: int | None = ... + ) -> list[_StrType]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrangebyscore( + self, + name: _Key, + min: _Value, + max: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + *, + withscores: Literal[True], + score_cast_func: Callable[[_StrType], _ScoreCastFuncReturn] = ..., + ) -> list[tuple[_StrType, _ScoreCastFuncReturn]]: ... + @overload + def zrevrangebyscore( + self, + name: _Key, + max: _Value, + min: _Value, + start: int | None = ..., + num: int | None = ..., + withscores: bool = ..., + score_cast_func: Callable[[_StrType], Any] = ..., + ) -> list[_StrType]: ... + def zrank(self, name: _Key, value: _Value) -> int | None: ... + def zrem(self, name: _Key, *values: _Value) -> int: ... + def zremrangebylex(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zremrangebyrank(self, name: _Key, min: int, max: int) -> int: ... + def zremrangebyscore(self, name: _Key, min: _Value, max: _Value) -> int: ... + def zrevrank(self, name: _Key, value: _Value) -> int | None: ... + def zscore(self, name: _Key, value: _Value) -> float | None: ... + def zunion(self, keys, aggregate: Any | None = ..., withscores: bool = ...): ... + def zunionstore(self, dest: _Key, keys: Iterable[_Key], aggregate: Literal["SUM", "MIN", "MAX"] | None = ...) -> int: ... + def zmscore(self, key, members): ... + +class HyperlogCommands: + def pfadd(self, name: _Key, *values: _Value) -> int: ... + def pfcount(self, name: _Key) -> int: ... + def pfmerge(self, dest: _Key, *sources: _Key) -> bool: ... + +class HashCommands(Generic[_StrType]): + def hdel(self, name: _Key, *keys: _Key) -> int: ... + def hexists(self, name: _Key, key: _Key) -> bool: ... + def hget(self, name: _Key, key: _Key) -> _StrType | None: ... + def hgetall(self, name: _Key) -> dict[_StrType, _StrType]: ... + def hincrby(self, name: _Key, key: _Key, amount: int = ...) -> int: ... + def hincrbyfloat(self, name: _Key, key: _Key, amount: float = ...) -> float: ... + def hkeys(self, name: _Key) -> list[_StrType]: ... + def hlen(self, name: _Key) -> int: ... + @overload + def hset(self, name: _Key, key: _Key, value: _Value, mapping: Mapping[_Key, _Value] | None = ...) -> int: ... + @overload + def hset(self, name: _Key, key: None, value: None, mapping: Mapping[_Key, _Value]) -> int: ... + @overload + def hset(self, name: _Key, *, mapping: Mapping[_Key, _Value]) -> int: ... + def hsetnx(self, name: _Key, key: _Key, value: _Value) -> int: ... + def hmset(self, name: _Key, mapping: Mapping[_Key, _Value]) -> bool: ... + def hmget(self, name: _Key, keys: _Key | Iterable[_Key], *args: _Key) -> list[_StrType | None]: ... + def hvals(self, name: _Key) -> list[_StrType]: ... + def hstrlen(self, name, key): ... + +class PubSubCommands: + def publish(self, channel: _Key, message: _Key, **kwargs: _CommandOptions) -> int: ... + def pubsub_channels(self, pattern: _Key = ..., **kwargs: _CommandOptions) -> list[str]: ... + def pubsub_numpat(self, **kwargs: _CommandOptions) -> int: ... + def pubsub_numsub(self, *args: _Key, **kwargs: _CommandOptions) -> list[tuple[str, int]]: ... + +class ScriptCommands(Generic[_StrType]): + def eval(self, script, numkeys, *keys_and_args): ... + def evalsha(self, sha, numkeys, *keys_and_args): ... + def script_exists(self, *args): ... + def script_debug(self, *args): ... + def script_flush(self, sync_type: Any | None = ...): ... + def script_kill(self): ... + def script_load(self, script): ... + def register_script(self, script: str | _StrType) -> Script: ... + +class GeoCommands: + def geoadd(self, name, values, nx: bool = ..., xx: bool = ..., ch: bool = ...): ... + def geodist(self, name, place1, place2, unit: Any | None = ...): ... + def geohash(self, name, *values): ... + def geopos(self, name, *values): ... + def georadius( + self, + name, + longitude, + latitude, + radius, + unit: Any | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Any | None = ..., + sort: Any | None = ..., + store: Any | None = ..., + store_dist: Any | None = ..., + any: bool = ..., + ): ... + def georadiusbymember( + self, + name, + member, + radius, + unit: Any | None = ..., + withdist: bool = ..., + withcoord: bool = ..., + withhash: bool = ..., + count: Any | None = ..., + sort: Any | None = ..., + store: Any | None = ..., + store_dist: Any | None = ..., + any: bool = ..., + ): ... + def geosearch( + self, + name, + member: Any | None = ..., + longitude: Any | None = ..., + latitude: Any | None = ..., + unit: str = ..., + radius: Any | None = ..., + width: Any | None = ..., + height: Any | None = ..., + sort: Any | None = ..., + count: Any | None = ..., + any: bool = ..., + withcoord: bool = ..., + withdist: bool = ..., + withhash: bool = ..., + ): ... + def geosearchstore( + self, + dest, + name, + member: Any | None = ..., + longitude: Any | None = ..., + latitude: Any | None = ..., + unit: str = ..., + radius: Any | None = ..., + width: Any | None = ..., + height: Any | None = ..., + sort: Any | None = ..., + count: Any | None = ..., + any: bool = ..., + storedist: bool = ..., + ): ... + +class ModuleCommands: + def module_load(self, path, *args): ... + def module_unload(self, name): ... + def module_list(self): ... + def command_info(self): ... + def command_count(self): ... + def command_getkeys(self, *args): ... + def command(self): ... + +class Script: + def __init__(self, registered_client, script) -> None: ... + def __call__(self, keys=..., args=..., client: Any | None = ...): ... + +class BitFieldOperation: + def __init__(self, client, key, default_overflow: Any | None = ...): ... + def reset(self) -> None: ... + def overflow(self, overflow): ... + def incrby(self, fmt, offset, increment, overflow: Any | None = ...): ... + def get(self, fmt, offset): ... + def set(self, fmt, offset, value): ... + @property + def command(self): ... + def execute(self): ... + +class ClusterCommands: + def cluster(self, cluster_arg: str, *args, **kwargs: _CommandOptions): ... + def readwrite(self, **kwargs: _CommandOptions) -> bool: ... + def readonly(self, **kwargs: _CommandOptions) -> bool: ... + +class DataAccessCommands( + BasicKeyCommands[_StrType], + HyperlogCommands, + HashCommands[_StrType], + GeoCommands, + ListCommands[_StrType], + ScanCommands[_StrType], + SetCommands[_StrType], + StreamCommands, + SortedSetCommands[_StrType], + Generic[_StrType], +): ... +class CoreCommands( + ACLCommands[_StrType], + ClusterCommands, + DataAccessCommands[_StrType], + ManagementCommands, + ModuleCommands, + PubSubCommands, + ScriptCommands[_StrType], + Generic[_StrType], +): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/__init__.pyi new file mode 100644 index 000000000000..2f4915e3bef3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/__init__.pyi @@ -0,0 +1,26 @@ +from typing import Any + +from .commands import GraphCommands as GraphCommands +from .edge import Edge as Edge +from .node import Node as Node +from .path import Path as Path + +class Graph(GraphCommands): + NAME: Any + client: Any + execute_command: Any + nodes: Any + edges: Any + version: int + def __init__(self, client, name=...) -> None: ... + @property + def name(self): ... + def get_label(self, idx): ... + def get_relation(self, idx): ... + def get_property(self, idx): ... + def add_node(self, node) -> None: ... + def add_edge(self, edge) -> None: ... + def call_procedure(self, procedure, *args, read_only: bool = ..., **kwagrs): ... + def labels(self): ... + def relationshipTypes(self): ... + def propertyKeys(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/commands.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/commands.pyi new file mode 100644 index 000000000000..baa061374d29 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/commands.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class GraphCommands: + def commit(self): ... + version: Any + def query(self, q, params: Any | None = ..., timeout: Any | None = ..., read_only: bool = ..., profile: bool = ...): ... + def merge(self, pattern): ... + def delete(self): ... + nodes: Any + edges: Any + def flush(self) -> None: ... + def explain(self, query, params: Any | None = ...): ... + def bulk(self, **kwargs) -> None: ... + def profile(self, query): ... + def slowlog(self): ... + def config(self, name, value: Any | None = ..., set: bool = ...): ... + def list_keys(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/edge.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/edge.pyi new file mode 100644 index 000000000000..22543f5223a9 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/edge.pyi @@ -0,0 +1,11 @@ +from typing import Any + +class Edge: + id: Any + relation: Any + properties: Any + src_node: Any + dest_node: Any + def __init__(self, src_node, relation, dest_node, edge_id: Any | None = ..., properties: Any | None = ...) -> None: ... + def toString(self): ... + def __eq__(self, rhs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/exceptions.pyi new file mode 100644 index 000000000000..6069e0555848 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/exceptions.pyi @@ -0,0 +1,5 @@ +from typing import Any + +class VersionMismatchException(Exception): + version: Any + def __init__(self, version) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/node.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/node.pyi new file mode 100644 index 000000000000..0b14fbae6d40 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/node.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class Node: + id: Any + alias: Any + label: Any + labels: Any + properties: Any + def __init__( + self, + node_id: Any | None = ..., + alias: Any | None = ..., + label: str | list[str] | None = ..., + properties: Any | None = ..., + ) -> None: ... + def toString(self): ... + def __eq__(self, rhs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/path.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/path.pyi new file mode 100644 index 000000000000..69106f89667b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/path.pyi @@ -0,0 +1,18 @@ +from typing import Any + +class Path: + append_type: Any + def __init__(self, nodes, edges) -> None: ... + @classmethod + def new_empty_path(cls): ... + def nodes(self): ... + def edges(self): ... + def get_node(self, index): ... + def get_relationship(self, index): ... + def first_node(self): ... + def last_node(self): ... + def edge_count(self): ... + def nodes_count(self): ... + def add_node(self, node): ... + def add_edge(self, edge): ... + def __eq__(self, other): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/query_result.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/query_result.pyi new file mode 100644 index 000000000000..53cf3ebf44f7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/graph/query_result.pyi @@ -0,0 +1,75 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +LABELS_ADDED: str +NODES_CREATED: str +NODES_DELETED: str +RELATIONSHIPS_DELETED: str +PROPERTIES_SET: str +RELATIONSHIPS_CREATED: str +INDICES_CREATED: str +INDICES_DELETED: str +CACHED_EXECUTION: str +INTERNAL_EXECUTION_TIME: str +STATS: Any + +class ResultSetColumnTypes: + COLUMN_UNKNOWN: ClassVar[Literal[0]] + COLUMN_SCALAR: ClassVar[Literal[1]] + COLUMN_NODE: ClassVar[Literal[2]] + COLUMN_RELATION: ClassVar[Literal[3]] + +class ResultSetScalarTypes: + VALUE_UNKNOWN: ClassVar[Literal[0]] + VALUE_NULL: ClassVar[Literal[1]] + VALUE_STRING: ClassVar[Literal[2]] + VALUE_INTEGER: ClassVar[Literal[3]] + VALUE_BOOLEAN: ClassVar[Literal[4]] + VALUE_DOUBLE: ClassVar[Literal[5]] + VALUE_ARRAY: ClassVar[Literal[6]] + VALUE_EDGE: ClassVar[Literal[7]] + VALUE_NODE: ClassVar[Literal[8]] + VALUE_PATH: ClassVar[Literal[9]] + VALUE_MAP: ClassVar[Literal[10]] + VALUE_POINT: ClassVar[Literal[11]] + +class QueryResult: + graph: Any + header: Any + result_set: Any + def __init__(self, graph, response, profile: bool = ...) -> None: ... + def parse_results(self, raw_result_set) -> None: ... + statistics: Any + def parse_statistics(self, raw_statistics) -> None: ... + def parse_header(self, raw_result_set): ... + def parse_records(self, raw_result_set): ... + def parse_entity_properties(self, props): ... + def parse_string(self, cell): ... + def parse_node(self, cell): ... + def parse_edge(self, cell): ... + def parse_path(self, cell): ... + def parse_map(self, cell): ... + def parse_point(self, cell): ... + def parse_scalar(self, cell): ... + def parse_profile(self, response) -> None: ... + def is_empty(self): ... + @property + def labels_added(self): ... + @property + def nodes_created(self): ... + @property + def nodes_deleted(self): ... + @property + def properties_set(self): ... + @property + def relationships_created(self): ... + @property + def relationships_deleted(self): ... + @property + def indices_created(self): ... + @property + def indices_deleted(self): ... + @property + def cached_execution(self): ... + @property + def run_time_ms(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/helpers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/helpers.pyi new file mode 100644 index 000000000000..d359ff0f86ae --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/helpers.pyi @@ -0,0 +1,10 @@ +def list_or_args(keys, args): ... +def nativestr(x): ... +def delist(x): ... +def parse_to_list(response): ... +def parse_list_to_dict(response): ... +def parse_to_dict(response): ... +def random_string(length: int = ...) -> str: ... +def quote_string(v): ... +def decodeDictKeys(obj): ... +def stringify_param_value(value): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/__init__.pyi new file mode 100644 index 000000000000..e67c7bfef39e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/__init__.pyi @@ -0,0 +1,14 @@ +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import JSONCommands + +class JSON(JSONCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + MODULE_VERSION: Any | None + def __init__(self, client, version: Any | None = ..., decoder=..., encoder=...) -> None: ... + def pipeline(self, transaction: bool = ..., shard_hint: Any | None = ...) -> Pipeline: ... + +class Pipeline(JSONCommands, ClientPipeline): ... # type: ignore[misc] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/commands.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/commands.pyi new file mode 100644 index 000000000000..6f830282abcd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/commands.pyi @@ -0,0 +1,30 @@ +from typing import Any + +class JSONCommands: + def arrappend(self, name, path=..., *args): ... + def arrindex(self, name, path, scalar, start: int = ..., stop: int = ...): ... + def arrinsert(self, name, path, index, *args): ... + def arrlen(self, name, path=...): ... + def arrpop(self, name, path=..., index: int = ...): ... + def arrtrim(self, name, path, start, stop): ... + def type(self, name, path=...): ... + def resp(self, name, path=...): ... + def objkeys(self, name, path=...): ... + def objlen(self, name, path=...): ... + def numincrby(self, name, path, number): ... + def nummultby(self, name, path, number): ... + def clear(self, name, path=...): ... + def delete(self, key, path=...): ... + forget = delete + def get(self, name, *args, no_escape: bool = ...): ... + def mget(self, keys, path): ... + def set(self, name, path, obj, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def set_file(self, name, path, file_name, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def set_path(self, json_path, root_folder, nx: bool = ..., xx: bool = ..., decode_keys: bool = ...): ... + def strlen(self, name, path: Any | None = ...): ... + def toggle(self, name, path=...): ... + def strappend(self, name, value, path=...): ... + def debug(self, subcommand, key: Any | None = ..., path=...): ... + def jsonget(self, *args, **kwargs): ... + def jsonmget(self, *args, **kwargs): ... + def jsonset(self, *args, **kwargs): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/decoders.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/decoders.pyi new file mode 100644 index 000000000000..ccea24386cb4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/decoders.pyi @@ -0,0 +1,4 @@ +def bulk_of_jsons(d): ... +def decode_dict_keys(obj): ... +def unstring(obj): ... +def decode_list(b): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/path.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/path.pyi new file mode 100644 index 000000000000..d33df3045bea --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/json/path.pyi @@ -0,0 +1,5 @@ +class Path: + strPath: str + @staticmethod + def rootPath() -> str: ... + def __init__(self, path: str) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/parser.pyi new file mode 100644 index 000000000000..58094e5d73ff --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/parser.pyi @@ -0,0 +1,8 @@ +from typing import Any + +class CommandsParser: + initialized: bool + commands: Any + def __init__(self, redis_connection) -> None: ... + def initialize(self, r) -> None: ... + def get_keys(self, redis_conn, *args): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/redismodules.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/redismodules.pyi new file mode 100644 index 000000000000..6f535b2315fb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/redismodules.pyi @@ -0,0 +1,14 @@ +from .json import JSON +from .search import Search +from .timeseries import TimeSeries + +class RedisModuleCommands: + def json(self, encoder=..., decoder=...) -> JSON: ... + def ft(self, index_name: str = ...) -> Search: ... + def ts(self) -> TimeSeries: ... + def bf(self): ... + def cf(self): ... + def cms(self): ... + def topk(self): ... + def tdigest(self): ... + def graph(self, index_name: str = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/__init__.pyi new file mode 100644 index 000000000000..5f84523057dc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/__init__.pyi @@ -0,0 +1,22 @@ +from typing import Any + +from .commands import SearchCommands + +class Search(SearchCommands): + class BatchIndexer: + def __init__(self, client, chunk_size: int = ...) -> None: ... + def add_document( + self, + doc_id, + nosave: bool = ..., + score: float = ..., + payload: Any | None = ..., + replace: bool = ..., + partial: bool = ..., + no_create: bool = ..., + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = ..., replace: bool = ...): ... + def commit(self): ... + + def __init__(self, client, index_name: str = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/aggregation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/aggregation.pyi new file mode 100644 index 000000000000..0ccd50523891 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/aggregation.pyi @@ -0,0 +1,54 @@ +from typing import Any, ClassVar +from typing_extensions import Literal + +FIELDNAME: Any + +class Limit: + offset: Any + count: Any + def __init__(self, offset: int = ..., count: int = ...) -> None: ... + def build_args(self): ... + +class Reducer: + NAME: ClassVar[None] + def __init__(self, *args) -> None: ... + def alias(self, alias): ... + @property + def args(self): ... + +class SortDirection: + DIRSTRING: ClassVar[str | None] + field: Any + def __init__(self, field) -> None: ... + +class Asc(SortDirection): + DIRSTRING: ClassVar[Literal["ASC"]] + +class Desc(SortDirection): + DIRSTRING: ClassVar[Literal["DESC"]] + +class AggregateRequest: + def __init__(self, query: str = ...) -> None: ... + def load(self, *fields): ... + def group_by(self, fields, *reducers): ... + def apply(self, **kwexpr): ... + def limit(self, offset, num): ... + def sort_by(self, *fields, **kwargs): ... + def filter(self, expressions): ... + def with_schema(self): ... + def verbatim(self): ... + def cursor(self, count: int = ..., max_idle: float = ...): ... + def build_args(self): ... + +class Cursor: + cid: Any + max_idle: int + count: int + def __init__(self, cid) -> None: ... + def build_args(self): ... + +class AggregateResult: + rows: Any + cursor: Any + schema: Any + def __init__(self, rows, cursor, schema) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/commands.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/commands.pyi new file mode 100644 index 000000000000..5a65c92642d8 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/commands.pyi @@ -0,0 +1,106 @@ +from collections.abc import Mapping +from typing import Any +from typing_extensions import Literal + +from .aggregation import AggregateRequest, AggregateResult, Cursor +from .query import Query +from .result import Result + +_QueryParams = Mapping[str, str | float] + +NUMERIC: Literal["NUMERIC"] + +CREATE_CMD: Literal["FT.CREATE"] +ALTER_CMD: Literal["FT.ALTER"] +SEARCH_CMD: Literal["FT.SEARCH"] +ADD_CMD: Literal["FT.ADD"] +ADDHASH_CMD: Literal["FT.ADDHASH"] +DROP_CMD: Literal["FT.DROP"] +EXPLAIN_CMD: Literal["FT.EXPLAIN"] +EXPLAINCLI_CMD: Literal["FT.EXPLAINCLI"] +DEL_CMD: Literal["FT.DEL"] +AGGREGATE_CMD: Literal["FT.AGGREGATE"] +PROFILE_CMD: Literal["FT.PROFILE"] +CURSOR_CMD: Literal["FT.CURSOR"] +SPELLCHECK_CMD: Literal["FT.SPELLCHECK"] +DICT_ADD_CMD: Literal["FT.DICTADD"] +DICT_DEL_CMD: Literal["FT.DICTDEL"] +DICT_DUMP_CMD: Literal["FT.DICTDUMP"] +GET_CMD: Literal["FT.GET"] +MGET_CMD: Literal["FT.MGET"] +CONFIG_CMD: Literal["FT.CONFIG"] +TAGVALS_CMD: Literal["FT.TAGVALS"] +ALIAS_ADD_CMD: Literal["FT.ALIASADD"] +ALIAS_UPDATE_CMD: Literal["FT.ALIASUPDATE"] +ALIAS_DEL_CMD: Literal["FT.ALIASDEL"] +INFO_CMD: Literal["FT.INFO"] +SUGADD_COMMAND: Literal["FT.SUGADD"] +SUGDEL_COMMAND: Literal["FT.SUGDEL"] +SUGLEN_COMMAND: Literal["FT.SUGLEN"] +SUGGET_COMMAND: Literal["FT.SUGGET"] +SYNUPDATE_CMD: Literal["FT.SYNUPDATE"] +SYNDUMP_CMD: Literal["FT.SYNDUMP"] + +NOOFFSETS: Literal["NOOFFSETS"] +NOFIELDS: Literal["NOFIELDS"] +STOPWORDS: Literal["STOPWORDS"] +WITHSCORES: Literal["WITHSCORES"] +FUZZY: Literal["FUZZY"] +WITHPAYLOADS: Literal["WITHPAYLOADS"] + +class SearchCommands: + def batch_indexer(self, chunk_size: int = ...): ... + def create_index( + self, + fields, + no_term_offsets: bool = ..., + no_field_flags: bool = ..., + stopwords: Any | None = ..., + definition: Any | None = ..., + max_text_fields: bool = ..., # added in 4.1.1 + temporary: Any | None = ..., # added in 4.1.1 + no_highlight: bool = ..., # added in 4.1.1 + no_term_frequencies: bool = ..., # added in 4.1.1 + skip_initial_scan: bool = ..., # added in 4.1.1 + ): ... + def alter_schema_add(self, fields): ... + def dropindex(self, delete_documents: bool = ...): ... + def add_document( + self, + doc_id, + nosave: bool = ..., + score: float = ..., + payload: Any | None = ..., + replace: bool = ..., + partial: bool = ..., + language: Any | None = ..., + no_create: bool = ..., + **fields, + ): ... + def add_document_hash(self, doc_id, score: float = ..., language: Any | None = ..., replace: bool = ...): ... + def delete_document(self, doc_id, conn: Any | None = ..., delete_actual_document: bool = ...): ... + def load_document(self, id): ... + def get(self, *ids): ... + def info(self): ... + def get_params_args(self, query_params: _QueryParams) -> list[Any]: ... + def search(self, query: str | Query, query_params: _QueryParams | None = ...) -> Result: ... + def explain(self, query: str | Query, query_params: _QueryParams | None = ...): ... + def explain_cli(self, query): ... + def aggregate(self, query: AggregateRequest | Cursor, query_params: _QueryParams | None = ...) -> AggregateResult: ... + def profile(self, query, limited: bool = ...): ... + def spellcheck(self, query, distance: Any | None = ..., include: Any | None = ..., exclude: Any | None = ...): ... + def dict_add(self, name, *terms): ... + def dict_del(self, name, *terms): ... + def dict_dump(self, name): ... + def config_set(self, option, value): ... + def config_get(self, option): ... + def tagvals(self, tagfield): ... + def aliasadd(self, alias): ... + def aliasupdate(self, alias): ... + def aliasdel(self, alias): ... + def sugadd(self, key, *suggestions, **kwargs): ... + def suglen(self, key): ... + def sugdel(self, key, string): ... + def sugget(self, key, prefix, fuzzy: bool = ..., num: int = ..., with_scores: bool = ..., with_payloads: bool = ...): ... + def synupdate(self, groupid, skipinitial: bool = ..., *terms): ... + def syndump(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/query.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/query.pyi new file mode 100644 index 000000000000..b41ee067d3c7 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/query.pyi @@ -0,0 +1,47 @@ +from typing import Any + +class Query: + def __init__(self, query_string) -> None: ... + def query_string(self): ... + def limit_ids(self, *ids): ... + def return_fields(self, *fields): ... + def return_field(self, field, as_field: Any | None = ...): ... + def summarize( + self, fields: Any | None = ..., context_len: Any | None = ..., num_frags: Any | None = ..., sep: Any | None = ... + ): ... + def highlight(self, fields: Any | None = ..., tags: Any | None = ...): ... + def language(self, language): ... + def slop(self, slop): ... + def in_order(self): ... + def scorer(self, scorer): ... + def get_args(self): ... + def paging(self, offset, num): ... + def verbatim(self): ... + def no_content(self): ... + def no_stopwords(self): ... + def with_payloads(self): ... + def with_scores(self): ... + def limit_fields(self, *fields): ... + def add_filter(self, flt): ... + def sort_by(self, field, asc: bool = ...): ... + def expander(self, expander): ... + +class Filter: + args: Any + def __init__(self, keyword, field, *args) -> None: ... + +class NumericFilter(Filter): + INF: str + NEG_INF: str + def __init__(self, field, minval, maxval, minExclusive: bool = ..., maxExclusive: bool = ...) -> None: ... + +class GeoFilter(Filter): + METERS: str + KILOMETERS: str + FEET: str + MILES: str + def __init__(self, field, lon, lat, radius, unit=...) -> None: ... + +class SortbyField: + args: Any + def __init__(self, field, asc: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/result.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/result.pyi new file mode 100644 index 000000000000..2908b9a61dc6 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/search/result.pyi @@ -0,0 +1,7 @@ +from typing import Any + +class Result: + total: Any + duration: Any + docs: Any + def __init__(self, res, hascontent, duration: int = ..., has_payload: bool = ..., with_scores: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/sentinel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/sentinel.pyi new file mode 100644 index 000000000000..545642fac979 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/sentinel.pyi @@ -0,0 +1,14 @@ +class SentinelCommands: + def sentinel(self, *args): ... + def sentinel_get_master_addr_by_name(self, service_name): ... + def sentinel_master(self, service_name): ... + def sentinel_masters(self): ... + def sentinel_monitor(self, name, ip, port, quorum): ... + def sentinel_remove(self, name): ... + def sentinel_sentinels(self, service_name): ... + def sentinel_set(self, name, option, value): ... + def sentinel_slaves(self, service_name): ... + def sentinel_reset(self, pattern): ... + def sentinel_failover(self, new_master_name): ... + def sentinel_ckquorum(self, new_master_name): ... + def sentinel_flushconfig(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/__init__.pyi new file mode 100644 index 000000000000..fae4f849b17b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/__init__.pyi @@ -0,0 +1,13 @@ +from typing import Any + +from ...client import Pipeline as ClientPipeline +from .commands import TimeSeriesCommands + +class TimeSeries(TimeSeriesCommands): + MODULE_CALLBACKS: dict[str, Any] + client: Any + execute_command: Any + def __init__(self, client: Any | None = ..., **kwargs) -> None: ... + def pipeline(self, transaction: bool = ..., shard_hint: Any | None = ...) -> Pipeline: ... + +class Pipeline(TimeSeriesCommands, ClientPipeline): ... # type: ignore[misc] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/commands.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/commands.pyi new file mode 100644 index 000000000000..4a5a19af17ba --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/commands.pyi @@ -0,0 +1,95 @@ +from typing import Any +from typing_extensions import Literal + +ADD_CMD: Literal["TS.ADD"] +ALTER_CMD: Literal["TS.ALTER"] +CREATERULE_CMD: Literal["TS.CREATERULE"] +CREATE_CMD: Literal["TS.CREATE"] +DECRBY_CMD: Literal["TS.DECRBY"] +DELETERULE_CMD: Literal["TS.DELETERULE"] +DEL_CMD: Literal["TS.DEL"] +GET_CMD: Literal["TS.GET"] +INCRBY_CMD: Literal["TS.INCRBY"] +INFO_CMD: Literal["TS.INFO"] +MADD_CMD: Literal["TS.MADD"] +MGET_CMD: Literal["TS.MGET"] +MRANGE_CMD: Literal["TS.MRANGE"] +MREVRANGE_CMD: Literal["TS.MREVRANGE"] +QUERYINDEX_CMD: Literal["TS.QUERYINDEX"] +RANGE_CMD: Literal["TS.RANGE"] +REVRANGE_CMD: Literal["TS.REVRANGE"] + +class TimeSeriesCommands: + def create(self, key, **kwargs): ... + def alter(self, key, **kwargs): ... + def add(self, key, timestamp, value, **kwargs): ... + def madd(self, ktv_tuples): ... + def incrby(self, key, value, **kwargs): ... + def decrby(self, key, value, **kwargs): ... + def delete(self, key, from_time, to_time): ... + def createrule(self, source_key, dest_key, aggregation_type, bucket_size_msec): ... + def deleterule(self, source_key, dest_key): ... + def range( + self, + key, + from_time, + to_time, + count: Any | None = ..., + aggregation_type: Any | None = ..., + bucket_size_msec: int = ..., + filter_by_ts: Any | None = ..., + filter_by_min_value: Any | None = ..., + filter_by_max_value: Any | None = ..., + align: Any | None = ..., + ): ... + def revrange( + self, + key, + from_time, + to_time, + count: Any | None = ..., + aggregation_type: Any | None = ..., + bucket_size_msec: int = ..., + filter_by_ts: Any | None = ..., + filter_by_min_value: Any | None = ..., + filter_by_max_value: Any | None = ..., + align: Any | None = ..., + ): ... + def mrange( + self, + from_time, + to_time, + filters, + count: Any | None = ..., + aggregation_type: Any | None = ..., + bucket_size_msec: int = ..., + with_labels: bool = ..., + filter_by_ts: Any | None = ..., + filter_by_min_value: Any | None = ..., + filter_by_max_value: Any | None = ..., + groupby: Any | None = ..., + reduce: Any | None = ..., + select_labels: Any | None = ..., + align: Any | None = ..., + ): ... + def mrevrange( + self, + from_time, + to_time, + filters, + count: Any | None = ..., + aggregation_type: Any | None = ..., + bucket_size_msec: int = ..., + with_labels: bool = ..., + filter_by_ts: Any | None = ..., + filter_by_min_value: Any | None = ..., + filter_by_max_value: Any | None = ..., + groupby: Any | None = ..., + reduce: Any | None = ..., + select_labels: Any | None = ..., + align: Any | None = ..., + ): ... + def get(self, key): ... + def mget(self, filters, with_labels: bool = ...): ... + def info(self, key): ... + def queryindex(self, filters): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/info.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/info.pyi new file mode 100644 index 000000000000..425dd29d5024 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/info.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class TSInfo: + rules: list[Any] + labels: list[Any] + sourceKey: Any | None + chunk_count: Any | None + memory_usage: Any | None + total_samples: Any | None + retention_msecs: Any | None + last_time_stamp: Any | None + first_time_stamp: Any | None + + max_samples_per_chunk: Any | None + chunk_size: Any | None + duplicate_policy: Any | None + def __init__(self, args) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/utils.pyi new file mode 100644 index 000000000000..4a0d52c4d5cc --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/commands/timeseries/utils.pyi @@ -0,0 +1,5 @@ +def list_to_dict(aList): ... +def parse_range(response): ... +def parse_m_range(response): ... +def parse_get(response): ... +def parse_m_get(response): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/connection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/connection.pyi index a8b4cc13c710..b8e466f8cfef 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/connection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/connection.pyi @@ -1,17 +1,18 @@ -from typing import Any, Mapping, Text, Tuple, Type +from _typeshed import Self +from typing import Any, Mapping + +from .retry import Retry ssl_available: Any -hiredis_version: Any -HIREDIS_SUPPORTS_CALLABLE_ERRORS: Any -HIREDIS_SUPPORTS_BYTE_BUFFER: Any -msg: Any -HIREDIS_USE_BYTE_BUFFER: Any SYM_STAR: Any SYM_DOLLAR: Any SYM_CRLF: Any SYM_EMPTY: Any SERVER_CLOSED_CONNECTION_ERROR: Any +# Options as passed to Pool.get_connection(). +_ConnectionPoolOptions = Any + class BaseParser: EXCEPTION_CLASSES: Any def parse_error(self, response): ... @@ -37,24 +38,24 @@ class PythonParser(BaseParser): def on_connect(self, connection): ... def on_disconnect(self): ... def can_read(self, timeout): ... - def read_response(self): ... + def read_response(self, disable_decoding: bool = ...): ... class HiredisParser(BaseParser): socket_read_size: Any def __init__(self, socket_read_size) -> None: ... def __del__(self): ... - def on_connect(self, connection): ... + def on_connect(self, connection, **kwargs): ... def on_disconnect(self): ... def can_read(self, timeout): ... def read_from_socket(self, timeout=..., raise_on_timeout: bool = ...) -> bool: ... - def read_response(self): ... + def read_response(self, disable_decoding: bool = ...): ... DefaultParser: Any class Encoder: def __init__(self, encoding, encoding_errors, decode_responses: bool) -> None: ... - def encode(self, value: Text | bytes | memoryview | bool | float) -> bytes: ... - def decode(self, value: Text | bytes | memoryview, force: bool = ...) -> Text: ... + def encode(self, value: str | bytes | memoryview | bool | float) -> bytes: ... + def decode(self, value: str | bytes | memoryview, force: bool = ...) -> str: ... class Connection: description_format: Any @@ -68,44 +69,51 @@ class Connection: socket_keepalive: Any socket_keepalive_options: Any retry_on_timeout: Any + retry_on_error: Any encoding: Any encoding_errors: Any decode_responses: Any + retry: Retry + redis_connect_func: Any | None def __init__( self, - host: Text = ..., + host: str = ..., port: int = ..., db: int = ..., - password: Text | None = ..., + password: str | None = ..., socket_timeout: float | None = ..., socket_connect_timeout: float | None = ..., socket_keepalive: bool = ..., socket_keepalive_options: Mapping[str, int | str] | None = ..., socket_type: int = ..., retry_on_timeout: bool = ..., - encoding: Text = ..., - encoding_errors: Text = ..., + retry_on_error=..., + encoding: str = ..., + encoding_errors: str = ..., decode_responses: bool = ..., - parser_class: Type[BaseParser] = ..., + parser_class: type[BaseParser] = ..., socket_read_size: int = ..., health_check_interval: int = ..., - client_name: Text | None = ..., - username: Text | None = ..., + client_name: str | None = ..., + username: str | None = ..., + retry: Retry | None = ..., + redis_connect_func: Any | None = ..., ) -> None: ... def __del__(self): ... def register_connect_callback(self, callback): ... def clear_connect_callbacks(self): ... + def set_parser(self, parser_class): ... def connect(self): ... def on_connect(self): ... - def disconnect(self): ... + def disconnect(self, *args: object) -> None: ... # 'args' added in redis 4.1.2 def check_health(self) -> None: ... def send_packed_command(self, command, check_health: bool = ...): ... def send_command(self, *args): ... def can_read(self, timeout=...): ... - def read_response(self): ... + def read_response(self, disable_decoding: bool = ...): ... def pack_command(self, *args): ... def pack_commands(self, commands): ... - def repr_pieces(self) -> list[Tuple[Text, Text]]: ... + def repr_pieces(self) -> list[tuple[str, str]]: ... class SSLConnection(Connection): description_format: Any @@ -113,8 +121,27 @@ class SSLConnection(Connection): certfile: Any cert_reqs: Any ca_certs: Any + ca_path: Any | None + check_hostname: bool + certificate_password: Any | None + ssl_validate_ocsp: bool + ssl_validate_ocsp_stapled: bool # added in 4.1.1 + ssl_ocsp_context: Any | None # added in 4.1.1 + ssl_ocsp_expected_cert: Any | None # added in 4.1.1 def __init__( - self, ssl_keyfile=..., ssl_certfile=..., ssl_cert_reqs=..., ssl_ca_certs=..., ssl_check_hostname: bool = ..., **kwargs + self, + ssl_keyfile=..., + ssl_certfile=..., + ssl_cert_reqs=..., + ssl_ca_certs=..., + ssl_check_hostname: bool = ..., + ssl_ca_path: Any | None = ..., + ssl_password: Any | None = ..., + ssl_validate_ocsp: bool = ..., + ssl_validate_ocsp_stapled: bool = ..., # added in 4.1.1 + ssl_ocsp_context: Any | None = ..., # added in 4.1.1 + ssl_ocsp_expected_cert: Any | None = ..., # added in 4.1.1 + **kwargs, ) -> None: ... class UnixDomainSocketConnection(Connection): @@ -128,6 +155,7 @@ class UnixDomainSocketConnection(Connection): encoding: Any encoding_errors: Any decode_responses: Any + retry: Retry def __init__( self, path=..., @@ -135,29 +163,30 @@ class UnixDomainSocketConnection(Connection): username=..., password=..., socket_timeout=..., - encoding=..., - encoding_errors=..., - decode_responses=..., - retry_on_timeout=..., + encoding: str = ..., + encoding_errors: str = ..., + decode_responses: bool = ..., + retry_on_timeout: bool = ..., + retry_on_error=..., parser_class=..., socket_read_size: int = ..., health_check_interval: int = ..., client_name=..., + retry: Retry | None = ..., + redis_connect_func: Any | None = ..., ) -> None: ... - def repr_pieces(self) -> list[Tuple[Text, Text]]: ... - -def to_bool(value: object) -> bool: ... + def repr_pieces(self) -> list[tuple[str, str]]: ... class ConnectionPool: @classmethod - def from_url(cls, url: Text, db: int | None = ..., decode_components: bool = ..., **kwargs) -> ConnectionPool: ... + def from_url(cls: type[Self], url: str, *, db: int = ..., decode_components: bool = ..., **kwargs) -> Self: ... connection_class: Any connection_kwargs: Any max_connections: Any def __init__(self, connection_class=..., max_connections=..., **connection_kwargs) -> None: ... pid: Any def reset(self): ... - def get_connection(self, command_name, *keys, **options): ... + def get_connection(self, command_name, *keys, **options: _ConnectionPoolOptions): ... def make_connection(self): ... def release(self, connection): ... def disconnect(self, inuse_connections: bool = ...): ... @@ -172,6 +201,9 @@ class BlockingConnectionPool(ConnectionPool): pool: Any def reset(self): ... def make_connection(self): ... - def get_connection(self, command_name, *keys, **options): ... + def get_connection(self, command_name, *keys, **options: _ConnectionPoolOptions): ... def release(self, connection): ... def disconnect(self): ... + +def to_bool(value: object) -> bool: ... +def parse_url(url: str) -> dict[str, Any]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/crc.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/crc.pyi new file mode 100644 index 000000000000..af2e5f7f71cb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/crc.pyi @@ -0,0 +1,3 @@ +REDIS_CLUSTER_HASH_SLOTS: int + +def key_slot(key, bucket=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/exceptions.pyi index 05f09ab64238..1820d3d9163d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/exceptions.pyi @@ -1,10 +1,8 @@ class RedisError(Exception): ... - -def __unicode__(self): ... - class AuthenticationError(RedisError): ... class ConnectionError(RedisError): ... class TimeoutError(RedisError): ... +class AuthorizationError(ConnectionError): ... class BusyLoadingError(ConnectionError): ... class InvalidResponse(RedisError): ... class ResponseError(RedisError): ... @@ -15,7 +13,30 @@ class NoScriptError(ResponseError): ... class ExecAbortError(ResponseError): ... class ReadOnlyError(ResponseError): ... class NoPermissionError(ResponseError): ... +class ModuleError(ResponseError): ... class LockError(RedisError, ValueError): ... class LockNotOwnedError(LockError): ... class ChildDeadlockedError(Exception): ... class AuthenticationWrongNumberOfArgsError(ResponseError): ... +class RedisClusterException(Exception): ... +class ClusterError(RedisError): ... + +class ClusterDownError(ClusterError, ResponseError): + args: tuple[str] + message: str + def __init__(self, resp: str) -> None: ... + +class AskError(ResponseError): + args: tuple[str] + message: str + slot_id: int + node_addr: tuple[str, int] + host: str + port: int + def __init__(self, resp: str) -> None: ... + +class TryAgainError(ResponseError): ... +class ClusterCrossSlotError(ResponseError): ... +class MovedError(AskError): ... +class MasterDownError(ClusterDownError): ... +class SlotNotCoveredError(RedisClusterException): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/lock.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/lock.pyi index e3dc25b6111c..7fad1f11ac79 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/lock.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/lock.pyi @@ -1,11 +1,20 @@ +from _typeshed import Self from types import TracebackType -from typing import Any, Text, Type, Union +from typing import Any, ClassVar, Protocol from redis.client import Redis -_TokenValue = Union[bytes, Text] +class _Local(Protocol): + token: str | bytes | None class Lock: + LUA_EXTEND_SCRIPT: ClassVar[str] + LUA_REACQUIRE_SCRIPT: ClassVar[str] + LUA_RELEASE_SCRIPT: ClassVar[str] + lua_extend: ClassVar[Any | None] + lua_reacquire: ClassVar[Any | None] + lua_release: ClassVar[Any | None] + local: _Local def __init__( self, redis: Redis[Any], @@ -17,18 +26,18 @@ class Lock: thread_local: bool = ..., ) -> None: ... def register_scripts(self) -> None: ... - def __enter__(self) -> Lock: ... + def __enter__(self: Self) -> Self: ... def __exit__( - self, exc_type: Type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None + self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None ) -> bool | None: ... def acquire( - self, blocking: bool | None = ..., blocking_timeout: None | int | float = ..., token: _TokenValue | None = ... + self, blocking: bool | None = ..., blocking_timeout: None | int | float = ..., token: str | bytes | None = ... ) -> bool: ... - def do_acquire(self, token: _TokenValue) -> bool: ... + def do_acquire(self, token: str | bytes) -> bool: ... def locked(self) -> bool: ... def owned(self) -> bool: ... def release(self) -> None: ... - def do_release(self, expected_token: _TokenValue) -> None: ... + def do_release(self, expected_token: str | bytes) -> None: ... def extend(self, additional_time: int | float, replace_ttl: bool = ...) -> bool: ... def do_extend(self, additional_time: int | float, replace_ttl: bool) -> bool: ... def reacquire(self) -> bool: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/ocsp.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/ocsp.pyi new file mode 100644 index 000000000000..41d87972076b --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/ocsp.pyi @@ -0,0 +1,13 @@ +from typing import Any + +class OCSPVerifier: + SOCK: Any + HOST: Any + PORT: Any + CA_CERTS: Any + def __init__(self, sock, host, port, ca_certs: Any | None = ...) -> None: ... + def components_from_socket(self): ... + def components_from_direct_connection(self): ... + def build_certificate_url(self, server, cert, issuer_cert): ... + def check_certificate(self, server, cert, issuer_url): ... + def is_valid(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/retry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/retry.pyi new file mode 100644 index 000000000000..ec14a37504f2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/retry.pyi @@ -0,0 +1,4 @@ +class Retry: + def __init__(self, backoff, retries, supported_errors=...) -> None: ... + def update_supported_erros(self, specified_errors) -> None: ... + def call_with_retry(self, do, fail): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/sentinel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/sentinel.pyi new file mode 100644 index 000000000000..702a796c268d --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/sentinel.pyi @@ -0,0 +1,55 @@ +from typing import Any, TypeVar, overload +from typing_extensions import Literal + +from redis.client import Redis +from redis.commands.sentinel import SentinelCommands +from redis.connection import Connection, ConnectionPool, SSLConnection +from redis.exceptions import ConnectionError + +_Redis = TypeVar("_Redis", bound=Redis[Any]) + +class MasterNotFoundError(ConnectionError): ... +class SlaveNotFoundError(ConnectionError): ... + +class SentinelManagedConnection(Connection): + connection_pool: Any + def __init__(self, **kwargs) -> None: ... + def connect_to(self, address) -> None: ... + def connect(self) -> None: ... + def read_response(self, disable_decoding: bool = ...): ... + +class SentinelManagedSSLConnection(SentinelManagedConnection, SSLConnection): ... + +class SentinelConnectionPool(ConnectionPool): + is_master: bool + check_connection: bool + connection_kwargs: Any + service_name: str + sentinel_manager: Any + def __init__(self, service_name, sentinel_manager, **kwargs) -> None: ... + def reset(self) -> None: ... + def owns_connection(self, connection) -> bool: ... + def get_master_address(self): ... + def rotate_slaves(self): ... + +class Sentinel(SentinelCommands): + sentinel_kwargs: Any + sentinels: Any + min_other_sentinels: int + connection_kwargs: Any + def __init__( + self, sentinels, min_other_sentinels: int = ..., sentinel_kwargs: Any | None = ..., **connection_kwargs + ) -> None: ... + def check_master_state(self, state, service_name) -> bool: ... + def discover_master(self, service_name): ... + def filter_slaves(self, slaves): ... + def discover_slaves(self, service_name): ... + @overload + def master_for(self, service_name: str, *, connection_pool_class=..., **kwargs) -> Redis[Any]: ... + @overload + def master_for(self, service_name: str, redis_class: type[_Redis] = ..., connection_pool_class=..., **kwargs) -> _Redis: ... + @overload + def slave_for(self, service_name: str, connection_pool_class=..., **kwargs) -> Redis[Any]: ... + @overload + def slave_for(self, service_name: str, redis_class: type[_Redis] = ..., connection_pool_class=..., **kwargs) -> _Redis: ... + def execute_command(self, *args, **kwargs) -> Literal[True]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/utils.pyi index aa0e827f2e1e..5180003bfed5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/redis/redis/utils.pyi @@ -1,4 +1,5 @@ -from typing import Any, ContextManager, Text, TypeVar, overload +from contextlib import AbstractContextManager +from typing import Any, TypeVar, overload from typing_extensions import Literal from .client import Pipeline, Redis, _StrType @@ -6,16 +7,18 @@ from .client import Pipeline, Redis, _StrType _T = TypeVar("_T") HIREDIS_AVAILABLE: bool +CRYPTOGRAPHY_AVAILABLE: bool @overload -def from_url(url: Text, db: int | None = ..., *, decode_responses: Literal[True], **kwargs: Any) -> Redis[str]: ... +def from_url(url: str, *, db: int = ..., decode_responses: Literal[True], **kwargs: Any) -> Redis[str]: ... @overload -def from_url(url: Text, db: int | None = ..., *, decode_responses: Literal[False] = ..., **kwargs: Any) -> Redis[bytes]: ... +def from_url(url: str, *, db: int = ..., decode_responses: Literal[False] = ..., **kwargs: Any) -> Redis[bytes]: ... +def pipeline(redis_obj: Redis[_StrType]) -> AbstractContextManager[Pipeline[_StrType]]: ... @overload -def str_if_bytes(value: bytes) -> str: ... # type: ignore +def str_if_bytes(value: bytes) -> str: ... # type: ignore[misc] @overload def str_if_bytes(value: _T) -> _T: ... def safe_str(value: object) -> str: ... -def pipeline(redis_obj: Redis[_StrType]) -> ContextManager[Pipeline[_StrType]]: ... - -class dummy: ... +def dict_merge(*dicts): ... +def list_keys_to_dict(key_list, callback): ... +def merge_result(command, res): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml new file mode 100644 index 000000000000..1ebc1e5e1410 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/METADATA.toml @@ -0,0 +1 @@ +version = "2021.11.10" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi new file mode 100644 index 000000000000..f310be632191 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/__init__.pyi @@ -0,0 +1 @@ +from .regex import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi new file mode 100644 index 000000000000..ee5445fd7d80 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex.pyi @@ -0,0 +1,195 @@ +from _typeshed import Self +from typing import Any, AnyStr, Callable, Generic, Mapping, TypeVar, overload +from typing_extensions import Literal, final + +_T = TypeVar("_T") + +@final +class Pattern(Generic[AnyStr]): + pattern: AnyStr + flags: int + groups: int + groupindex: Mapping[str, int] + named_lists: Mapping[str, frozenset[AnyStr]] + def search( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[AnyStr] | None: ... + def match( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[AnyStr] | None: ... + def fullmatch( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Match[AnyStr] | None: ... + def split( + self, string: AnyStr, maxsplit: int = ..., concurrent: bool | None = ..., timeout: float | None = ... + ) -> list[AnyStr | Any]: ... + def splititer( + self, string: AnyStr, maxsplit: int = ..., concurrent: bool | None = ..., timeout: float | None = ... + ) -> Splitter[AnyStr]: ... + def findall( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> list[Any]: ... + def finditer( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Scanner[AnyStr]: ... + def sub( + self, + repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> AnyStr: ... + def subf( + self, + format: AnyStr | Callable[[Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> AnyStr: ... + def subn( + self, + repl: AnyStr | Callable[[Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[AnyStr, int]: ... + def subfn( + self, + format: AnyStr | Callable[[Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> tuple[AnyStr, int]: ... + def scanner( + self, + string: AnyStr, + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ) -> Scanner[AnyStr]: ... + +@final +class Match(Generic[AnyStr]): + + re: Pattern[AnyStr] + string: AnyStr + pos: int + endpos: int + partial: bool + regs: tuple[tuple[int, int], ...] + fuzzy_counts: tuple[int, int, int] + fuzzy_changes: tuple[list[int], list[int], list[int]] + lastgroup: str | None + lastindex: int | None + @overload + def group(self, __group: Literal[0] = ...) -> AnyStr: ... + @overload + def group(self, __group: int | str = ...) -> AnyStr | Any: ... + @overload + def group(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: None = ...) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... + @overload + def groupdict(self, default: None = ...) -> dict[str, AnyStr | Any]: ... + @overload + def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... + @overload + def span(self, __group: int | str = ...) -> tuple[int, int]: ... + @overload + def span(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[tuple[int, int], ...]: ... + @overload + def spans(self, __group: int | str = ...) -> list[tuple[int, int]]: ... + @overload + def spans(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[tuple[int, int]], ...]: ... + @overload + def start(self, __group: int | str = ...) -> int: ... + @overload + def start(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[int, ...]: ... + @overload + def starts(self, __group: int | str = ...) -> list[int]: ... + @overload + def starts(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[int], ...]: ... + @overload + def end(self, __group: int | str = ...) -> int: ... + @overload + def end(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[int, ...]: ... + @overload + def ends(self, __group: int | str = ...) -> list[int]: ... + @overload + def ends(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[int], ...]: ... + def expand(self, template: AnyStr) -> AnyStr: ... + def expandf(self, format: AnyStr) -> AnyStr: ... + @overload + def captures(self, __group: int | str = ...) -> list[AnyStr]: ... + @overload + def captures(self, __group1: int | str, __group2: int | str, *groups: int | str) -> tuple[list[AnyStr], ...]: ... + def capturesdict(self) -> dict[str, list[AnyStr]]: ... + def detach_string(self) -> None: ... + @overload + def __getitem__(self, __key: Literal[0]) -> AnyStr: ... + @overload + def __getitem__(self, __key: int | str) -> AnyStr | Any: ... + +@final +class Splitter(Generic[AnyStr]): + + pattern: Pattern[AnyStr] + def __iter__(self: Self) -> Self: ... + def __next__(self) -> AnyStr | Any: ... + def split(self) -> AnyStr | Any: ... + +@final +class Scanner(Generic[AnyStr]): + + pattern: Pattern[AnyStr] + def __iter__(self: Self) -> Self: ... + def __next__(self) -> Match[AnyStr]: ... + def match(self) -> Match[AnyStr] | None: ... + def search(self) -> Match[AnyStr] | None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi new file mode 100644 index 000000000000..c3f5685c13dd --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/_regex_core.pyi @@ -0,0 +1,41 @@ +from typing import AnyStr + +class error(Exception): + def __init__(self, message: str, pattern: AnyStr | None = ..., pos: int | None = ...) -> None: ... + +A: int +ASCII: int +B: int +BESTMATCH: int +D: int +DEBUG: int +E: int +ENHANCEMATCH: int +F: int +FULLCASE: int +I: int +IGNORECASE: int +L: int +LOCALE: int +M: int +MULTILINE: int +P: int +POSIX: int +R: int +REVERSE: int +T: int +TEMPLATE: int +S: int +DOTALL: int +U: int +UNICODE: int +V0: int +VERSION0: int +V1: int +VERSION1: int +W: int +WORD: int +X: int +VERBOSE: int + +DEFAULT_VERSION: int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi new file mode 100644 index 000000000000..a8bfd3bbe7c4 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/regex/regex/regex.pyi @@ -0,0 +1,154 @@ +from typing import Any, AnyStr, Callable, overload + +from . import _regex +from ._regex_core import * + +__version__: str + +def compile( + pattern: AnyStr | _regex.Pattern[AnyStr], flags: int = ..., ignore_unused: bool = ..., **kwargs: Any +) -> _regex.Pattern[AnyStr]: ... +def search( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Match[AnyStr] | None: ... +def match( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Match[AnyStr] | None: ... +def fullmatch( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Match[AnyStr] | None: ... +def split( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[AnyStr | Any]: ... +def splititer( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + maxsplit: int = ..., + flags: int = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Splitter[AnyStr]: ... +def findall( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> list[Any]: ... +def finditer( + pattern: AnyStr | _regex.Pattern[AnyStr], + string: AnyStr, + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + overlapped: bool = ..., + partial: bool = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> _regex.Scanner[AnyStr]: ... +def sub( + pattern: AnyStr | _regex.Pattern[AnyStr], + repl: AnyStr | Callable[[_regex.Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> AnyStr: ... +def subf( + pattern: AnyStr | _regex.Pattern[AnyStr], + format: AnyStr | Callable[[_regex.Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> AnyStr: ... +def subn( + pattern: AnyStr | _regex.Pattern[AnyStr], + repl: AnyStr | Callable[[_regex.Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[AnyStr, int]: ... +def subfn( + pattern: AnyStr | _regex.Pattern[AnyStr], + format: AnyStr | Callable[[_regex.Match[AnyStr]], AnyStr], + string: AnyStr, + count: int = ..., + flags: int = ..., + pos: int | None = ..., + endpos: int | None = ..., + concurrent: bool | None = ..., + timeout: float | None = ..., + ignore_unused: bool = ..., + **kwargs: Any, +) -> tuple[AnyStr, int]: ... +def purge() -> None: ... +@overload +def cache_all(value: bool = ...) -> None: ... +@overload +def cache_all(value: None) -> bool: ... +def escape(pattern: AnyStr, special_only: bool = ..., literal_spaces: bool = ...) -> AnyStr: ... +def template(pattern: AnyStr | _regex.Pattern[AnyStr], flags: int = ...) -> _regex.Pattern[AnyStr]: ... + +Pattern = _regex.Pattern[AnyStr] +Match = _regex.Match[AnyStr] +Regex = compile diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/requests/METADATA.toml index 77efa142fb42..6ed98019159d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/METADATA.toml @@ -1,2 +1,3 @@ -version = "2.25" +version = "2.27.*" +requires = ["types-urllib3<1.27"] # keep in sync with requests's setup.py python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/__init__.pyi index be5664fbf918..5e8e10a84370 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/__init__.pyi @@ -16,6 +16,7 @@ from .exceptions import ( ConnectTimeout as ConnectTimeout, FileModeWarning as FileModeWarning, HTTPError as HTTPError, + JSONDecodeError as JSONDecodeError, ReadTimeout as ReadTimeout, RequestException as RequestException, Timeout as Timeout, @@ -35,4 +36,4 @@ __version__: Any class NullHandler(logging.Handler): def emit(self, record): ... -def check_compatibility(urllib3_version: Text, chardet_version: Text) -> None: ... +def check_compatibility(urllib3_version: Text, chardet_version: Text | None, charset_normalizer_version: Text | None) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/adapters.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/adapters.pyi index bbcdd2adcccc..575525ea28e2 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/adapters.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/adapters.pyi @@ -1,8 +1,9 @@ -from typing import Any, Container, Mapping, Text, Tuple +from typing import Any, Container, Mapping, Text + +from urllib3 import exceptions as urllib3_exceptions, poolmanager, response +from urllib3.util import retry from . import cookies, exceptions, models, structures, utils -from .packages.urllib3 import exceptions as urllib3_exceptions, poolmanager, response -from .packages.urllib3.util import retry PreparedRequest = models.PreparedRequest Response = models.Response @@ -40,7 +41,7 @@ class BaseAdapter: self, request: PreparedRequest, stream: bool = ..., - timeout: None | float | Tuple[float, float] | Tuple[float, None] = ..., + timeout: None | float | tuple[float, float] | tuple[float, None] = ..., verify: bool | str = ..., cert: None | bytes | Text | Container[bytes | Text] = ..., proxies: Mapping[str, str] | None = ..., @@ -69,7 +70,7 @@ class HTTPAdapter(BaseAdapter): self, request: PreparedRequest, stream: bool = ..., - timeout: None | float | Tuple[float, float] | Tuple[float, None] = ..., + timeout: None | float | tuple[float, float] | tuple[float, None] = ..., verify: bool | str = ..., cert: None | bytes | Text | Container[bytes | Text] = ..., proxies: Mapping[str, str] | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi index 6ad059a3c01c..88479bbca5f0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/exceptions.pyi @@ -1,12 +1,14 @@ from typing import Any -from .packages.urllib3.exceptions import HTTPError as BaseHTTPError +from urllib3.exceptions import HTTPError as BaseHTTPError class RequestException(IOError): response: Any request: Any def __init__(self, *args, **kwargs) -> None: ... +class InvalidJSONError(RequestException): ... +class JSONDecodeError(InvalidJSONError): ... class HTTPError(RequestException): ... class ConnectionError(RequestException): ... class ProxyError(ConnectionError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/models.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/models.pyi index ff0822671842..d3d0a4b39e9f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/models.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/models.pyi @@ -1,13 +1,17 @@ import datetime +from _typeshed import Self from json import JSONDecoder -from typing import Any, Callable, Iterator, Text, Type +from typing import Any, Callable, Iterator, Text, TypeVar + +from urllib3 import exceptions as urllib3_exceptions, fields, filepost, util from . import auth, cookies, exceptions, hooks, status_codes, structures, utils from .cookies import RequestsCookieJar -from .packages.urllib3 import exceptions as urllib3_exceptions, fields, filepost, util + +_VT = TypeVar("_VT") default_hooks = hooks.default_hooks -CaseInsensitiveDict = structures.CaseInsensitiveDict +CaseInsensitiveDict = structures.CaseInsensitiveDict[_VT] HTTPBasicAuth = auth.HTTPBasicAuth cookiejar_from_dict = cookies.cookiejar_from_dict get_cookie_header = cookies.get_cookie_header @@ -93,7 +97,7 @@ class Response: headers: CaseInsensitiveDict[str] raw: Any url: str - encoding: str + encoding: str | None history: list[Response] reason: str cookies: RequestsCookieJar @@ -103,7 +107,7 @@ class Response: def __bool__(self) -> bool: ... def __nonzero__(self) -> bool: ... def __iter__(self) -> Iterator[bytes]: ... - def __enter__(self) -> Response: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args: Any) -> None: ... @property def next(self) -> PreparedRequest | None: ... @@ -126,7 +130,7 @@ class Response: def json( self, *, - cls: Type[JSONDecoder] | None = ..., + cls: type[JSONDecoder] | None = ..., object_hook: Callable[[dict[Any, Any]], Any] | None = ..., parse_float: Callable[[str], Any] | None = ..., parse_int: Callable[[str], Any] | None = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/__init__.pyi index b50dba35e4ae..370324481c48 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/__init__.pyi @@ -1,3 +1,5 @@ +# requests also imports urllib3 as requests.packages.urllib3, the stubs don't reflect that. + class VendorAlias: def __init__(self, package_names) -> None: ... def find_module(self, fullname, path=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/exceptions.pyi deleted file mode 100644 index ddb4e83ba77e..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/exceptions.pyi +++ /dev/null @@ -1,50 +0,0 @@ -from typing import Any - -class HTTPError(Exception): ... -class HTTPWarning(Warning): ... - -class PoolError(HTTPError): - pool: Any - def __init__(self, pool, message) -> None: ... - def __reduce__(self): ... - -class RequestError(PoolError): - url: Any - def __init__(self, pool, url, message) -> None: ... - def __reduce__(self): ... - -class SSLError(HTTPError): ... -class ProxyError(HTTPError): ... -class DecodeError(HTTPError): ... -class ProtocolError(HTTPError): ... - -ConnectionError: Any - -class MaxRetryError(RequestError): - reason: Any - def __init__(self, pool, url, reason=...) -> None: ... - -class HostChangedError(RequestError): - retries: Any - def __init__(self, pool, url, retries=...) -> None: ... - -class TimeoutStateError(HTTPError): ... -class TimeoutError(HTTPError): ... -class ReadTimeoutError(TimeoutError, RequestError): ... -class ConnectTimeoutError(TimeoutError): ... -class EmptyPoolError(PoolError): ... -class ClosedPoolError(PoolError): ... -class LocationValueError(ValueError, HTTPError): ... - -class LocationParseError(LocationValueError): - location: Any - def __init__(self, location) -> None: ... - -class ResponseError(HTTPError): - GENERIC_ERROR: Any - SPECIFIC_ERROR: Any - -class SecurityWarning(HTTPWarning): ... -class InsecureRequestWarning(SecurityWarning): ... -class SystemTimeWarning(SecurityWarning): ... -class InsecurePlatformWarning(SecurityWarning): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/response.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/response.pyi deleted file mode 100644 index 1c78b48a2572..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/response.pyi +++ /dev/null @@ -1,66 +0,0 @@ -import io -from typing import Any - -from . import _collections, exceptions -from .connection import BaseSSLError as BaseSSLError, HTTPException as HTTPException -from .util import response - -HTTPHeaderDict = _collections.HTTPHeaderDict -ProtocolError = exceptions.ProtocolError -DecodeError = exceptions.DecodeError -ReadTimeoutError = exceptions.ReadTimeoutError -binary_type = bytes # six.binary_type -PY3 = True # six.PY3 -is_fp_closed = response.is_fp_closed - -class DeflateDecoder: - def __init__(self) -> None: ... - def __getattr__(self, name): ... - def decompress(self, data): ... - -class GzipDecoder: - def __init__(self) -> None: ... - def __getattr__(self, name): ... - def decompress(self, data): ... - -class HTTPResponse(io.IOBase): - CONTENT_DECODERS: Any - REDIRECT_STATUSES: Any - headers: Any - status: Any - version: Any - reason: Any - strict: Any - decode_content: Any - def __init__( - self, - body=..., - headers=..., - status=..., - version=..., - reason=..., - strict=..., - preload_content=..., - decode_content=..., - original_response=..., - pool=..., - connection=..., - ) -> None: ... - def get_redirect_location(self): ... - def release_conn(self): ... - @property - def data(self): ... - def tell(self): ... - def read(self, amt=..., decode_content=..., cache_content=...): ... - def stream(self, amt=..., decode_content=...): ... - @classmethod - def from_httplib(cls, r, **response_kw): ... - def getheaders(self): ... - def getheader(self, name, default=...): ... - def close(self): ... - @property - def closed(self): ... - def fileno(self): ... - def flush(self): ... - def readable(self): ... - def readinto(self, b): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/retry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/retry.pyi deleted file mode 100644 index c4aedaa6cc62..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/retry.pyi +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Any - -from .. import exceptions - -ConnectTimeoutError = exceptions.ConnectTimeoutError -MaxRetryError = exceptions.MaxRetryError -ProtocolError = exceptions.ProtocolError -ReadTimeoutError = exceptions.ReadTimeoutError -ResponseError = exceptions.ResponseError - -log: Any - -class Retry: - DEFAULT_METHOD_WHITELIST: Any - BACKOFF_MAX: Any - total: Any - connect: Any - read: Any - redirect: Any - status_forcelist: Any - method_whitelist: Any - backoff_factor: Any - raise_on_redirect: Any - def __init__( - self, - total=..., - connect=..., - read=..., - redirect=..., - status=..., - other=..., - allowed_methods=..., - status_forcelist=..., - backoff_factor=..., - raise_on_redirect=..., - raise_on_status=..., - history=..., - respect_retry_after_header=..., - remove_headers_on_redirect=..., - method_whitelist=..., - ) -> None: ... - def new(self, **kw): ... - @classmethod - def from_int(cls, retries, redirect=..., default=...): ... - def get_backoff_time(self): ... - def sleep(self): ... - def is_forced_retry(self, method, status_code): ... - def is_exhausted(self): ... - def increment(self, method=..., url=..., response=..., error=..., _pool=..., _stacktrace=...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/sessions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/sessions.pyi index 524f5d14ac5c..21693216eb5a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/sessions.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/sessions.pyi @@ -1,11 +1,15 @@ -from _typeshed import SupportsItems -from typing import IO, Any, Callable, Iterable, List, Mapping, MutableMapping, Optional, Text, Tuple, TypeVar, Union +from _typeshed import Self, SupportsItems +from typing import IO, Any, Callable, Iterable, Mapping, MutableMapping, Optional, Text, TypeVar, Union + +from urllib3 import _collections from . import adapters, auth as _auth, compat, cookies, exceptions, hooks, models, status_codes, structures, utils from .models import Response -from .packages.urllib3 import _collections -BaseAdapter = adapters.BaseAdapter +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") + +_BaseAdapter = adapters.BaseAdapter OrderedDict = compat.OrderedDict cookiejar_from_dict = cookies.cookiejar_from_dict extract_cookies_to_jar = cookies.extract_cookies_to_jar @@ -23,8 +27,8 @@ TooManyRedirects = exceptions.TooManyRedirects InvalidSchema = exceptions.InvalidSchema ChunkedEncodingError = exceptions.ChunkedEncodingError ContentDecodingError = exceptions.ContentDecodingError -RecentlyUsedContainer = _collections.RecentlyUsedContainer -CaseInsensitiveDict = structures.CaseInsensitiveDict +RecentlyUsedContainer = _collections.RecentlyUsedContainer[_KT, _VT] +CaseInsensitiveDict = structures.CaseInsensitiveDict[_VT] HTTPAdapter = adapters.HTTPAdapter requote_uri = utils.requote_uri get_environ_proxies = utils.get_environ_proxies @@ -43,42 +47,41 @@ class SessionRedirectMixin: def rebuild_proxies(self, prepared_request, proxies): ... def should_strip_auth(self, old_url, new_url): ... -_Data = Union[None, Text, bytes, Mapping[str, Any], Mapping[Text, Any], Iterable[Tuple[Text, Optional[Text]]], IO[Any]] +_Data = Union[None, Text, bytes, Mapping[str, Any], Mapping[Text, Any], Iterable[tuple[Text, Optional[Text]]], IO[Any]] _Hook = Callable[[Response], Any] -_Hooks = MutableMapping[Text, List[_Hook]] +_Hooks = MutableMapping[Text, _Hook | list[_Hook]] _HooksInput = MutableMapping[Text, Union[Iterable[_Hook], _Hook]] _ParamsMappingKeyType = Union[Text, bytes, int, float] _ParamsMappingValueType = Union[Text, bytes, int, float, Iterable[Union[Text, bytes, int, float]], None] _Params = Union[ SupportsItems[_ParamsMappingKeyType, _ParamsMappingValueType], - Tuple[_ParamsMappingKeyType, _ParamsMappingValueType], - Iterable[Tuple[_ParamsMappingKeyType, _ParamsMappingValueType]], + tuple[_ParamsMappingKeyType, _ParamsMappingValueType], + Iterable[tuple[_ParamsMappingKeyType, _ParamsMappingValueType]], Union[Text, bytes], ] _TextMapping = MutableMapping[Text, Text] -_SessionT = TypeVar("_SessionT", bound=Session) class Session(SessionRedirectMixin): __attrs__: Any headers: CaseInsensitiveDict[Text] - auth: None | Tuple[Text, Text] | _auth.AuthBase | Callable[[PreparedRequest], PreparedRequest] + auth: None | tuple[Text, Text] | _auth.AuthBase | Callable[[PreparedRequest], PreparedRequest] proxies: _TextMapping hooks: _Hooks params: _Params stream: bool verify: None | bool | Text - cert: None | Text | Tuple[Text, Text] + cert: None | Text | tuple[Text, Text] max_redirects: int trust_env: bool cookies: RequestsCookieJar adapters: MutableMapping[Any, Any] redirect_cache: RecentlyUsedContainer[Any, Any] def __init__(self) -> None: ... - def __enter__(self: _SessionT) -> _SessionT: ... + def __enter__(self: Self) -> Self: ... def __exit__(self, *args) -> None: ... - def prepare_request(self, request): ... + def prepare_request(self, request: Request) -> PreparedRequest: ... def request( self, method: str, @@ -88,18 +91,18 @@ class Session(SessionRedirectMixin): headers: _TextMapping | None = ..., cookies: None | RequestsCookieJar | _TextMapping = ..., files: MutableMapping[Text, IO[Any]] - | MutableMapping[Text, Tuple[Text, IO[Any]]] - | MutableMapping[Text, Tuple[Text, IO[Any], Text]] - | MutableMapping[Text, Tuple[Text, IO[Any], Text, _TextMapping]] + | MutableMapping[Text, tuple[Text, IO[Any]]] + | MutableMapping[Text, tuple[Text, IO[Any], Text]] + | MutableMapping[Text, tuple[Text, IO[Any], Text, _TextMapping]] | None = ..., - auth: None | Tuple[Text, Text] | _auth.AuthBase | Callable[[PreparedRequest], PreparedRequest] = ..., - timeout: None | float | Tuple[float, float] | Tuple[float, None] = ..., + auth: None | tuple[Text, Text] | _auth.AuthBase | Callable[[PreparedRequest], PreparedRequest] = ..., + timeout: None | float | tuple[float, float] | tuple[float, None] = ..., allow_redirects: bool | None = ..., proxies: _TextMapping | None = ..., hooks: _HooksInput | None = ..., stream: bool | None = ..., verify: None | bool | Text = ..., - cert: Text | Tuple[Text, Text] | None = ..., + cert: Text | tuple[Text, Text] | None = ..., json: Any | None = ..., ) -> Response: ... def get( @@ -228,10 +231,12 @@ class Session(SessionRedirectMixin): cert: Any | None = ..., json: Any | None = ..., ) -> Response: ... - def send(self, request: PreparedRequest, **kwargs) -> Response: ... + def send( + self, request: PreparedRequest, *, stream=..., verify=..., cert=..., proxies=..., allow_redirects: bool = ..., **kwargs + ) -> Response: ... def merge_environment_settings(self, url, proxies, stream, verify, cert): ... - def get_adapter(self, url): ... + def get_adapter(self, url: str) -> _BaseAdapter: ... def close(self) -> None: ... - def mount(self, prefix: Text | bytes, adapter: BaseAdapter) -> None: ... + def mount(self, prefix: Text | bytes, adapter: _BaseAdapter) -> None: ... def session() -> Session: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/structures.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/structures.pyi index c8fcfe538ca3..93e343f9b170 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/structures.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/structures.pyi @@ -1,10 +1,10 @@ -from typing import Any, Dict, Generic, Iterable, Iterator, Mapping, MutableMapping, Tuple, TypeVar +from typing import Any, Generic, Iterable, Iterator, Mapping, MutableMapping, TypeVar _VT = TypeVar("_VT") class CaseInsensitiveDict(MutableMapping[str, _VT], Generic[_VT]): - def __init__(self, data: Mapping[str, _VT] | Iterable[Tuple[str, _VT]] | None = ..., **kwargs: _VT) -> None: ... - def lower_items(self) -> Iterator[Tuple[str, _VT]]: ... + def __init__(self, data: Mapping[str, _VT] | Iterable[tuple[str, _VT]] | None = ..., **kwargs: _VT) -> None: ... + def lower_items(self) -> Iterator[tuple[str, _VT]]: ... def __setitem__(self, key: str, value: _VT) -> None: ... def __getitem__(self, key: str) -> _VT: ... def __delitem__(self, key: str) -> None: ... @@ -12,9 +12,9 @@ class CaseInsensitiveDict(MutableMapping[str, _VT], Generic[_VT]): def __len__(self) -> int: ... def copy(self) -> CaseInsensitiveDict[_VT]: ... -class LookupDict(Dict[str, _VT]): +class LookupDict(dict[str, _VT]): name: Any def __init__(self, name: Any = ...) -> None: ... - def __getitem__(self, key: str) -> _VT | None: ... # type: ignore + def __getitem__(self, key: str) -> _VT | None: ... # type: ignore[override] def __getattr__(self, attr: str) -> _VT: ... - def __setattr__(self, attr: str, value: _VT) -> None: ... + def __setattr__(self, __attr: str, __value: _VT) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/utils.pyi b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/utils.pyi index 0ac3a161c41d..994652846491 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/utils.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/utils.pyi @@ -1,4 +1,4 @@ -from typing import Any, AnyStr, Iterable, Mapping, Text, Tuple +from typing import Any, AnyStr, Iterable, Mapping, Text from . import compat, cookies, exceptions, structures @@ -51,4 +51,4 @@ def get_auth_from_url(url): ... def to_native_string(string, encoding=...): ... def urldefragauth(url): ... def rewind_body(prepared_request): ... -def check_header_validity(header: Tuple[AnyStr, AnyStr]) -> None: ... +def check_header_validity(header: tuple[AnyStr, AnyStr]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/retry/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/retry/METADATA.toml index ca732ba9cc52..6f2870bcfb82 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/retry/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/retry/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.9" +version = "0.9.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/retry/retry/api.pyi b/packages/pyright-internal/typeshed-fallback/stubs/retry/retry/api.pyi index dd7f3761f748..c74e32ee52a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/retry/retry/api.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/retry/retry/api.pyi @@ -1,6 +1,6 @@ from _typeshed import IdentityFunction from logging import Logger -from typing import Any, Callable, Sequence, Tuple, Type, TypeVar +from typing import Any, Callable, Sequence, TypeVar _R = TypeVar("_R") @@ -8,20 +8,20 @@ def retry_call( f: Callable[..., _R], fargs: Sequence[Any] | None = ..., fkwargs: dict[str, Any] | None = ..., - exceptions: Type[Exception] | Tuple[Type[Exception], ...] = ..., + exceptions: type[Exception] | tuple[type[Exception], ...] = ..., tries: int = ..., delay: float = ..., max_delay: float | None = ..., backoff: float = ..., - jitter: Tuple[float, float] | float = ..., + jitter: tuple[float, float] | float = ..., logger: Logger | None = ..., ) -> _R: ... def retry( - exceptions: Type[Exception] | Tuple[Type[Exception], ...] = ..., + exceptions: type[Exception] | tuple[type[Exception], ...] = ..., tries: int = ..., delay: float = ..., max_delay: float | None = ..., backoff: float = ..., - jitter: Tuple[float, float] | float = ..., + jitter: tuple[float, float] | float = ..., logger: Logger | None = ..., ) -> IdentityFunction: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/selenium/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/selenium/METADATA.toml index 115c7fe9a674..9d7ee7b9ceec 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/selenium/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/selenium/METADATA.toml @@ -1 +1,2 @@ -version = "3.141" +version = "3.141.*" +obsolete_since = "4.1.2" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/common/actions/pointer_input.pyi b/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/common/actions/pointer_input.pyi index ba8b834607ee..1647f90f1afd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/common/actions/pointer_input.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/common/actions/pointer_input.pyi @@ -13,5 +13,5 @@ class PointerInput(InputDevice): def create_pointer_down(self, button) -> None: ... def create_pointer_up(self, button) -> None: ... def create_pointer_cancel(self) -> None: ... - def create_pause(self, pause_duration) -> None: ... # type: ignore + def create_pause(self, pause_duration) -> None: ... # type: ignore[override] def encode(self): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/remote/webdriver.pyi b/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/remote/webdriver.pyi index 95ab425f078c..2f117cb0016d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/remote/webdriver.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/selenium/selenium/webdriver/remote/webdriver.pyi @@ -38,49 +38,49 @@ class WebDriver: @property def mobile(self): ... @property - def name(self): ... + def name(self) -> str: ... def start_client(self) -> None: ... def stop_client(self) -> None: ... w3c: Any def start_session(self, capabilities, browser_profile: Any | None = ...) -> None: ... - def create_web_element(self, element_id): ... + def create_web_element(self, element_id) -> WebElement: ... def execute(self, driver_command, params: Any | None = ...): ... def get(self, url) -> None: ... @property - def title(self): ... - def find_element_by_id(self, id_): ... - def find_elements_by_id(self, id_): ... - def find_element_by_xpath(self, xpath): ... - def find_elements_by_xpath(self, xpath): ... - def find_element_by_link_text(self, link_text): ... - def find_elements_by_link_text(self, text): ... - def find_element_by_partial_link_text(self, link_text): ... - def find_elements_by_partial_link_text(self, link_text): ... - def find_element_by_name(self, name): ... - def find_elements_by_name(self, name): ... - def find_element_by_tag_name(self, name): ... - def find_elements_by_tag_name(self, name): ... - def find_element_by_class_name(self, name): ... - def find_elements_by_class_name(self, name): ... - def find_element_by_css_selector(self, css_selector): ... - def find_elements_by_css_selector(self, css_selector): ... + def title(self) -> str: ... + def find_element_by_id(self, id_) -> WebElement: ... + def find_elements_by_id(self, id_) -> list[WebElement]: ... + def find_element_by_xpath(self, xpath) -> WebElement: ... + def find_elements_by_xpath(self, xpath) -> list[WebElement]: ... + def find_element_by_link_text(self, link_text) -> WebElement: ... + def find_elements_by_link_text(self, text) -> list[WebElement]: ... + def find_element_by_partial_link_text(self, link_text) -> WebElement: ... + def find_elements_by_partial_link_text(self, link_text) -> list[WebElement]: ... + def find_element_by_name(self, name) -> WebElement: ... + def find_elements_by_name(self, name) -> list[WebElement]: ... + def find_element_by_tag_name(self, name) -> WebElement: ... + def find_elements_by_tag_name(self, name) -> list[WebElement]: ... + def find_element_by_class_name(self, name) -> WebElement: ... + def find_elements_by_class_name(self, name) -> list[WebElement]: ... + def find_element_by_css_selector(self, css_selector) -> WebElement: ... + def find_elements_by_css_selector(self, css_selector) -> list[WebElement]: ... def execute_script(self, script, *args): ... def execute_async_script(self, script, *args): ... @property - def current_url(self): ... + def current_url(self) -> str: ... @property - def page_source(self): ... + def page_source(self) -> str: ... def close(self) -> None: ... def quit(self) -> None: ... @property - def current_window_handle(self): ... + def current_window_handle(self) -> str: ... @property - def window_handles(self): ... + def window_handles(self) -> list[str]: ... def maximize_window(self) -> None: ... def fullscreen_window(self) -> None: ... def minimize_window(self) -> None: ... @property - def switch_to(self): ... + def switch_to(self) -> SwitchTo: ... def switch_to_active_element(self): ... def switch_to_window(self, window_name) -> None: ... def switch_to_frame(self, frame_reference) -> None: ... @@ -97,14 +97,14 @@ class WebDriver: def implicitly_wait(self, time_to_wait) -> None: ... def set_script_timeout(self, time_to_wait) -> None: ... def set_page_load_timeout(self, time_to_wait) -> None: ... - def find_element(self, by=..., value: Any | None = ...): ... - def find_elements(self, by=..., value: Any | None = ...): ... + def find_element(self, by=..., value: Any | None = ...) -> WebElement: ... + def find_elements(self, by=..., value: Any | None = ...) -> list[WebElement]: ... @property def desired_capabilities(self): ... - def get_screenshot_as_file(self, filename): ... - def save_screenshot(self, filename): ... - def get_screenshot_as_png(self): ... - def get_screenshot_as_base64(self): ... + def get_screenshot_as_file(self, filename) -> bool: ... + def save_screenshot(self, filename) -> bool: ... + def get_screenshot_as_png(self) -> bytes: ... + def get_screenshot_as_base64(self) -> str: ... def set_window_size(self, width, height, windowHandle: str = ...) -> None: ... def get_window_size(self, windowHandle: str = ...): ... def set_window_position(self, x, y, windowHandle: str = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/METADATA.toml index b575da9647f9..ff6e4177e062 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/METADATA.toml @@ -1 +1 @@ -version = "57.4" +version = "57.4.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/pkg_resources/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/pkg_resources/__init__.pyi index ce3699ca329c..d4f65651a87b 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/pkg_resources/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/pkg_resources/__init__.pyi @@ -1,8 +1,9 @@ import importlib.abc import types import zipimport +from _typeshed import Self from abc import ABCMeta -from typing import IO, Any, Callable, Generator, Iterable, Optional, Sequence, Set, Tuple, TypeVar, Union, overload +from typing import IO, Any, Callable, Generator, Iterable, Optional, Sequence, TypeVar, Union, overload LegacyVersion = Any # from packaging.version Version = Any # from packaging.version @@ -36,7 +37,7 @@ class WorkingSet: def subscribe(self, callback: Callable[[Distribution], None]) -> None: ... def find_plugins( self, plugin_env: Environment, full_env: Environment | None = ..., fallback: bool = ... - ) -> Tuple[list[Distribution], dict[Distribution, Exception]]: ... + ) -> tuple[list[Distribution], dict[Distribution, Exception]]: ... working_set: WorkingSet = ... @@ -53,7 +54,7 @@ class Environment: def remove(self, dist: Distribution) -> None: ... def can_add(self, dist: Distribution) -> bool: ... def __add__(self, other: Distribution | Environment) -> Environment: ... - def __iadd__(self, other: Distribution | Environment) -> Environment: ... + def __iadd__(self: Self, other: Distribution | Environment) -> Self: ... @overload def best_match(self, req: Requirement, working_set: WorkingSet) -> Distribution: ... @overload @@ -70,15 +71,15 @@ class Requirement: unsafe_name: str project_name: str key: str - extras: Tuple[str, ...] - specs: list[Tuple[str, str]] + extras: tuple[str, ...] + specs: list[tuple[str, str]] # TODO: change this to packaging.markers.Marker | None once we can import # packaging.markers marker: Any | None @staticmethod def parse(s: str | Iterable[str]) -> Requirement: ... - def __contains__(self, item: Distribution | str | Tuple[str, ...]) -> bool: ... - def __eq__(self, other_requirement: Any) -> bool: ... + def __contains__(self, item: Distribution | str | tuple[str, ...]) -> bool: ... + def __eq__(self, other_requirement: object) -> bool: ... def load_entry_point(dist: _EPDistType, group: str, name: str) -> Any: ... def get_entry_info(dist: _EPDistType, group: str, name: str) -> EntryPoint | None: ... @@ -90,15 +91,15 @@ def get_entry_map(dist: _EPDistType, group: str) -> dict[str, EntryPoint]: ... class EntryPoint: name: str module_name: str - attrs: Tuple[str, ...] - extras: Tuple[str, ...] + attrs: tuple[str, ...] + extras: tuple[str, ...] dist: Distribution | None def __init__( self, name: str, module_name: str, - attrs: Tuple[str, ...] = ..., - extras: Tuple[str, ...] = ..., + attrs: tuple[str, ...] = ..., + extras: tuple[str, ...] = ..., dist: Distribution | None = ..., ) -> None: ... @classmethod @@ -123,7 +124,7 @@ class Distribution(IResourceProvider, IMetadataProvider): key: str extras: list[str] version: str - parsed_version: Tuple[str, ...] + parsed_version: tuple[str, ...] py_version: str platform: str | None precedence: int @@ -145,7 +146,7 @@ class Distribution(IResourceProvider, IMetadataProvider): def from_filename(cls, filename: str, metadata: _MetadataType = ..., **kw: str | None | int) -> Distribution: ... def activate(self, path: list[str] | None = ...) -> None: ... def as_requirement(self) -> Requirement: ... - def requires(self, extras: Tuple[str, ...] = ...) -> list[Requirement]: ... + def requires(self, extras: tuple[str, ...] = ...) -> list[Requirement]: ... def clone(self, **kw: str | int | None) -> Requirement: ... def egg_name(self) -> str: ... def __cmp__(self, other: Any) -> bool: ... @@ -203,7 +204,7 @@ class DistributionNotFound(ResolutionError): @property def req(self) -> Requirement: ... @property - def requirers(self) -> Set[str]: ... + def requirers(self) -> set[str]: ... @property def requirers_str(self) -> str: ... def report(self) -> str: ... @@ -214,11 +215,11 @@ class VersionConflict(ResolutionError): @property def req(self) -> Any: ... def report(self) -> str: ... - def with_context(self, required_by: Set[Distribution | str]) -> VersionConflict: ... + def with_context(self, required_by: set[Distribution | str]) -> VersionConflict: ... class ContextualVersionConflict(VersionConflict): @property - def required_by(self) -> Set[Distribution | str]: ... + def required_by(self) -> set[Distribution | str]: ... class UnknownExtra(ResolutionError): ... @@ -255,7 +256,7 @@ class FileMetadata(EmptyProvider, IResourceProvider): def parse_version(v: str) -> Version | LegacyVersion: ... def yield_lines(strs: _NestedStr) -> Generator[str, None, None]: ... -def split_sections(strs: _NestedStr) -> Generator[Tuple[str | None, str], None, None]: ... +def split_sections(strs: _NestedStr) -> Generator[tuple[str | None, str], None, None]: ... def safe_name(name: str) -> str: ... def safe_version(version: str) -> str: ... def safe_extra(extra: str) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi index 25bd93d13671..4d1048d31193 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/__init__.pyi @@ -1,7 +1,7 @@ from abc import abstractmethod from collections.abc import Iterable, Mapping from distutils.core import Command as _Command -from typing import Any, Type +from typing import Any from setuptools._deprecation_warning import SetuptoolsDeprecationWarning as SetuptoolsDeprecationWarning from setuptools.depends import Require as Require @@ -36,14 +36,14 @@ def setup( scripts: list[str] = ..., ext_modules: list[Extension] = ..., classifiers: list[str] = ..., - distclass: Type[Distribution] = ..., + distclass: type[Distribution] = ..., script_name: str = ..., script_args: list[str] = ..., options: Mapping[str, Any] = ..., license: str = ..., keywords: list[str] | str = ..., platforms: list[str] | str = ..., - cmdclass: Mapping[str, Type[Command]] = ..., + cmdclass: Mapping[str, type[Command]] = ..., data_files: list[tuple[str, list[str]]] = ..., package_dir: Mapping[str, str] = ..., obsoletes: list[str] = ..., diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/develop.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/develop.pyi index 8bd432be5143..8b099dc802a0 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/develop.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/develop.pyi @@ -9,7 +9,7 @@ class develop(namespaces.DevelopInstaller, easy_install): boolean_options: Any command_consumes_arguments: bool multi_version: bool - def run(self) -> None: ... # type: ignore + def run(self) -> None: ... # type: ignore[override] uninstall: Any egg_path: Any setup_path: Any diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/easy_install.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/easy_install.pyi index 11f8224e69ad..6d7bc390a8da 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/easy_install.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/easy_install.pyi @@ -1,4 +1,4 @@ -from typing import Any, List +from typing import Any from pkg_resources import Environment from setuptools import Command, SetuptoolsDeprecationWarning @@ -106,7 +106,7 @@ class RewritePthDistributions(PthDistributions): prelude: Any postlude: Any -class CommandSpec(List[str]): +class CommandSpec(list[str]): options: Any split_args: Any @classmethod diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/test.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/test.pyi index c18892669ce3..8d0309ab023e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/test.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/test.pyi @@ -9,7 +9,7 @@ _T = TypeVar("_T") class ScanningLoader(TestLoader): def __init__(self) -> None: ... - def loadTestsFromModule(self, module: ModuleType, pattern: Any | None = ...) -> list[TestSuite]: ... # type: ignore + def loadTestsFromModule(self, module: ModuleType, pattern: Any | None = ...) -> list[TestSuite]: ... # type: ignore[override] class NonDataProperty(Generic[_T]): fget: Callable[..., _T] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/upload_docs.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/upload_docs.pyi index 21ef5416d55c..0660bff722dd 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/upload_docs.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/command/upload_docs.pyi @@ -15,4 +15,4 @@ class upload_docs(upload): def finalize_options(self) -> None: ... def create_zipfile(self, filename) -> None: ... def run(self) -> None: ... - def upload_file(self, filename) -> None: ... # type: ignore + def upload_file(self, filename) -> None: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/lib2to3_ex.pyi b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/lib2to3_ex.pyi index a7cc306bc72b..d8194ae031f7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/lib2to3_ex.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/setuptools/setuptools/lib2to3_ex.pyi @@ -2,7 +2,7 @@ from distutils.util import Mixin2to3 as _Mixin2to3 from lib2to3.refactor import RefactoringTool class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw) -> None: ... # type: ignore + def log_error(self, msg, *args, **kw) -> None: ... # type: ignore[override] def log_message(self, msg, *args) -> None: ... def log_debug(self, msg, *args) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simplejson/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/simplejson/METADATA.toml index 7a888dfdf446..e8d5c242ddd3 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/simplejson/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/simplejson/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.17" +version = "3.17.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/simplejson/simplejson/errors.pyi b/packages/pyright-internal/typeshed-fallback/stubs/simplejson/simplejson/errors.pyi new file mode 100644 index 000000000000..10cff3f28203 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/simplejson/simplejson/errors.pyi @@ -0,0 +1,16 @@ +__all__ = ["JSONDecodeError"] + +def linecol(doc: str, pos: int) -> tuple[int, int]: ... +def errmsg(msg: str, doc: str, pos: int, end: int | None = ...) -> str: ... + +class JSONDecodeError(ValueError): + msg: str + doc: str + pos: int + end: int | None + lineno: int + colno: int + endlineno: int | None + endcolno: int | None + def __init__(self, msg: str, doc: str, pos: int, end: int | None = ...) -> None: ... + def __reduce__(self) -> tuple[JSONDecodeError, tuple[str, str, int, int | None]]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/METADATA.toml index 9509ba1a7b30..839f7d3a9b8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/METADATA.toml @@ -1,2 +1,2 @@ -version = "3.7" +version = "3.7.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/singledispatch.pyi b/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/singledispatch.pyi index f264047ac201..28c735b8f125 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/singledispatch.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/singledispatch/singledispatch.pyi @@ -13,3 +13,15 @@ class _SingleDispatchCallable(Generic[_T]): def __call__(self, *args: Any, **kwargs: Any) -> _T: ... def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +class singledispatchmethod(Generic[_T]): + dispatcher: _SingleDispatchCallable[_T] + func: Callable[..., _T] + def __init__(self, func: Callable[..., _T]) -> None: ... + @overload + def register(self, cls: type[Any], method: None = ...) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Callable[..., _T], method: None = ...) -> Callable[..., _T]: ... + @overload + def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def __call__(self, *args: Any, **kwargs: Any) -> _T: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/six/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/six/METADATA.toml index ff19b61939c1..90e0456946aa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/six/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/six/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.16" +version = "1.16.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/six/six/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/six/six/__init__.pyi index 1f36799d6cfe..1c79752e0412 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/six/six/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/six/six/__init__.pyi @@ -1,12 +1,14 @@ from __future__ import print_function +import builtins import types import unittest from builtins import next as next from collections.abc import Callable, ItemsView, Iterable, Iterator as _Iterator, KeysView, Mapping, ValuesView from functools import wraps as wraps +from importlib.util import spec_from_loader as spec_from_loader from io import BytesIO as BytesIO, StringIO as StringIO -from typing import Any, AnyStr, NoReturn, Pattern, Tuple, Type, TypeVar, overload +from typing import Any, AnyStr, NoReturn, Pattern, TypeVar, overload from typing_extensions import Literal from . import moves as moves @@ -22,26 +24,29 @@ PY2: Literal[False] PY3: Literal[True] PY34: Literal[True] -string_types: tuple[Type[str]] -integer_types: tuple[Type[int]] -class_types: tuple[Type[Type[Any]]] +string_types: tuple[type[str]] +integer_types: tuple[type[int]] +class_types: tuple[type[type[Any]]] text_type = str binary_type = bytes MAXSIZE: int -def callable(obj: object) -> bool: ... +callable = builtins.callable + def get_unbound_function(unbound: types.FunctionType) -> types.FunctionType: ... -def create_bound_method(func: types.FunctionType, obj: object) -> types.MethodType: ... + +create_bound_method = types.MethodType + def create_unbound_method(func: types.FunctionType, cls: type) -> types.FunctionType: ... Iterator = object def get_method_function(meth: types.MethodType) -> types.FunctionType: ... def get_method_self(meth: types.MethodType) -> object | None: ... -def get_function_closure(fun: types.FunctionType) -> Tuple[types._Cell, ...] | None: ... +def get_function_closure(fun: types.FunctionType) -> tuple[types._Cell, ...] | None: ... def get_function_code(fun: types.FunctionType) -> types.CodeType: ... -def get_function_defaults(fun: types.FunctionType) -> Tuple[Any, ...] | None: ... +def get_function_defaults(fun: types.FunctionType) -> tuple[Any, ...] | None: ... def get_function_globals(fun: types.FunctionType) -> dict[str, Any]: ... def iterkeys(d: Mapping[_K, Any]) -> _Iterator[_K]: ... def itervalues(d: Mapping[Any, _V]) -> _Iterator[_V]: ... @@ -72,8 +77,8 @@ def assertRegex( exec_ = exec -def reraise(tp: Type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None = ...) -> NoReturn: ... -def raise_from(value: BaseException | Type[BaseException], from_value: BaseException | None) -> NoReturn: ... +def reraise(tp: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None = ...) -> NoReturn: ... +def raise_from(value: BaseException | type[BaseException], from_value: BaseException | None) -> NoReturn: ... print_ = print @@ -87,7 +92,7 @@ def python_2_unicode_compatible(klass: _T) -> _T: ... class _LazyDescr: name: str def __init__(self, name: str) -> None: ... - def __get__(self, obj: object | None, type: Type[Any] | None = ...) -> Any: ... + def __get__(self, obj: object | None, tp: object) -> Any: ... class MovedModule(_LazyDescr): mod: str diff --git a/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/__init__.pyi index 3455676db51e..ae64ae674f03 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/__init__.pyi @@ -20,7 +20,6 @@ from sys import intern as intern # import tkinter.scrolledtext as tkinter_scrolledtext # import tkinter.simpledialog as tkinter_simpledialog # import tkinter.tix as tkinter_tix -# import copyreg as copyreg # import dbm.gnu as dbm_gnu from . import ( BaseHTTPServer as BaseHTTPServer, @@ -30,6 +29,7 @@ from . import ( _thread as _thread, builtins as builtins, configparser as configparser, + copyreg as copyreg, cPickle as cPickle, email_mime_base as email_mime_base, email_mime_multipart as email_mime_multipart, diff --git a/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/_dummy_thread.pyi b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/_dummy_thread.pyi index 24879612a769..410232d07417 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/_dummy_thread.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/_dummy_thread.pyi @@ -1 +1,6 @@ -from _dummy_thread import * +import sys + +if sys.version_info >= (3, 9): + from _thread import * +else: + from _dummy_thread import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/copyreg.pyi b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/copyreg.pyi new file mode 100644 index 000000000000..1848b74c35a3 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/six/six/moves/copyreg.pyi @@ -0,0 +1 @@ +from copyreg import * diff --git a/packages/pyright-internal/typeshed-fallback/stubs/slumber/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/slumber/METADATA.toml index 12c154c06af8..7431acfe0fa8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/slumber/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/slumber/METADATA.toml @@ -1 +1 @@ -version = "0.7" +version = "0.7.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/stripe/METADATA.toml index 62a74a90d64b..5d262932f608 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/METADATA.toml @@ -1 +1 @@ -version = "2.59" +version = "2.59.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi index cd8de7131fbc..154355211a46 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/abstract/deletable_api_resource.pyi @@ -1,4 +1,7 @@ +from _typeshed import Self + from stripe.api_resources.abstract.api_resource import APIResource as APIResource class DeletableAPIResource(APIResource): - def delete(self, **params): ... + @classmethod + def delete(cls: type[Self], sid: str = ..., **params) -> Self: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/account.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/account.pyi index 8851ecb89d1a..39cb243e9763 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/account.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/account.pyi @@ -21,3 +21,53 @@ class Account(CreateableAPIResource, DeletableAPIResource, ListableAPIResource): def persons(self, **params): ... def deauthorize(self, **params): ... def serialize(self, previous): ... + @classmethod + def capabilitys_url(cls, id, nested_id=...): ... + @classmethod + def capabilitys_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def retrieve_capability(cls, id, nested_id, **params): ... + @classmethod + def modify_capability(cls, id, nested_id, **params): ... + @classmethod + def list_capabilities(cls, id, **params): ... + @classmethod + def external_accounts_url(cls, id, nested_id=...): ... + @classmethod + def external_accounts_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_external_account(cls, id, **params): ... + @classmethod + def retrieve_external_account(cls, id, nested_id, **params): ... + @classmethod + def modify_external_account(cls, id, nested_id, **params): ... + @classmethod + def delete_external_account(cls, id, nested_id, **params): ... + @classmethod + def list_external_accounts(cls, id, **params): ... + @classmethod + def login_links_url(cls, id, nested_id=...): ... + @classmethod + def login_links_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_login_link(cls, id, **params): ... + @classmethod + def persons_url(cls, id, nested_id=...): ... + @classmethod + def persons_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_person(cls, id, **params): ... + @classmethod + def retrieve_person(cls, id, nested_id, **params): ... + @classmethod + def modify_person(cls, id, nested_id, **params): ... + @classmethod + def delete_person(cls, id, nested_id, **params): ... + @classmethod + def list_persons(cls, id, **params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/customer.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/customer.pyi index c8694d0ff21c..4010c7a5c2be 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/customer.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/customer.pyi @@ -11,3 +11,43 @@ from stripe.api_resources.abstract import ( class Customer(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): OBJECT_NAME: str def delete_discount(self, **params) -> None: ... + @classmethod + def balance_transactions_url(cls, id, nested_id=...): ... + @classmethod + def balance_transactions_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_balance_transaction(cls, id, **params): ... + @classmethod + def retrieve_balance_transaction(cls, id, nested_id, **params): ... + @classmethod + def modify_balance_transaction(cls, id, nested_id, **params): ... + @classmethod + def list_balance_transactions(cls, id, **params): ... + @classmethod + def sources_url(cls, id, nested_id=...): ... + @classmethod + def sources_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_source(cls, id, **params): ... + @classmethod + def retrieve_source(cls, id, nested_id, **params): ... + @classmethod + def modify_source(cls, id, nested_id, **params): ... + @classmethod + def delete_source(cls, id, nested_id, **params): ... + @classmethod + def list_sources(cls, id, **params): ... + @classmethod + def tax_ids_url(cls, id, nested_id=...): ... + @classmethod + def tax_ids_request(cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params): ... + @classmethod + def create_tax_id(cls, id, **params): ... + @classmethod + def retrieve_tax_id(cls, id, nested_id, **params): ... + @classmethod + def delete_tax_id(cls, id, nested_id, **params): ... + @classmethod + def list_tax_ids(cls, id, **params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/subscription_item.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/subscription_item.pyi index b107e7eeef47..df891e925e34 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/subscription_item.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/api_resources/subscription_item.pyi @@ -9,3 +9,19 @@ from stripe.api_resources.abstract import ( class SubscriptionItem(CreateableAPIResource, DeletableAPIResource, ListableAPIResource, UpdateableAPIResource): OBJECT_NAME: str def usage_record_summaries(self, **params): ... + @classmethod + def usage_records_url(cls, id, nested_id=...): ... + @classmethod + def usage_records_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def create_usage_record(cls, id, **params): ... + @classmethod + def usage_record_summarys_url(cls, id, nested_id=...): ... + @classmethod + def usage_record_summarys_request( + cls, method, url, api_key=..., idempotency_key=..., stripe_version=..., stripe_account=..., **params + ): ... + @classmethod + def list_usage_record_summaries(cls, id, **params): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/stripe_object.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/stripe_object.pyi index 25ed72a9de70..f9de1deeb15f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/stripe_object.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/stripe_object.pyi @@ -1,11 +1,12 @@ import json -from typing import Any, Dict +from typing import Any from stripe import api_requestor as api_requestor -class StripeObject(Dict[Any, Any]): +class StripeObject(dict[Any, Any]): class ReprJSONEncoder(json.JSONEncoder): def default(self, obj): ... + def __init__( self, id: Any | None = ..., @@ -50,5 +51,5 @@ class StripeObject(Dict[Any, Any]): @property def stripe_id(self): ... def serialize(self, previous): ... - def __copy__(self): ... - def __deepcopy__(self, memo): ... + def __copy__(self) -> StripeObject: ... + def __deepcopy__(self, memo: Any) -> StripeObject: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/util.pyi b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/util.pyi index 67f93be7560b..e3590f18867a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/util.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/stripe/stripe/util.pyi @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, overload def utf8(value): ... def log_debug(message, **params) -> None: ... @@ -12,3 +12,8 @@ class class_method_variant: method: Any def __call__(self, method): ... def __get__(self, obj, objtype: Any | None = ...): ... + +@overload +def populate_headers(idempotency_key: None) -> None: ... +@overload +def populate_headers(idempotency_key: str) -> dict[str, str]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tabulate/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/tabulate/METADATA.toml index 59d3db90a226..78fa4d13f8bb 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/tabulate/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/tabulate/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.8" +version = "0.8.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tabulate/tabulate.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tabulate/tabulate.pyi index 8b5efde5bf08..1d0daa981c6c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/tabulate/tabulate.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/tabulate/tabulate.pyi @@ -1,4 +1,4 @@ -from typing import Any, Callable, Container, Iterable, List, Mapping, NamedTuple, Sequence, Union +from typing import Any, Callable, Container, Iterable, Mapping, NamedTuple, Sequence, Union LATEX_ESCAPE_RULES: dict[str, str] MIN_PADDING: int @@ -18,8 +18,8 @@ class DataRow(NamedTuple): sep: str end: str -_TableFormatLine = Union[None, Line, Callable[[List[int], List[str]], str]] -_TableFormatRow = Union[None, DataRow, Callable[[List[Any], List[int], List[str]], str]] +_TableFormatLine = Union[None, Line, Callable[[list[int], list[str]], str]] +_TableFormatRow = Union[None, DataRow, Callable[[list[Any], list[int], list[str]], str]] class TableFormat(NamedTuple): lineabove: _TableFormatLine diff --git a/packages/pyright-internal/typeshed-fallback/stubs/termcolor/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/termcolor/METADATA.toml index cf311fdb6550..c0a0050d4a68 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/termcolor/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/termcolor/METADATA.toml @@ -1,2 +1,2 @@ -version = "1.1" +version = "1.1.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/toml/METADATA.toml index b4e82d6f7834..343f2c4782a6 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/toml/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.10" +version = "0.10.*" python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml.pyi deleted file mode 100644 index 3f8580b33376..000000000000 --- a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml.pyi +++ /dev/null @@ -1,19 +0,0 @@ -import sys -from _typeshed import StrPath, SupportsWrite -from typing import IO, Any, Mapping, MutableMapping, Text, Type, Union - -if sys.version_info >= (3, 6): - _PathLike = StrPath -elif sys.version_info >= (3, 4): - import pathlib - - _PathLike = Union[StrPath, pathlib.PurePath] -else: - _PathLike = StrPath - -class TomlDecodeError(Exception): ... - -def load(f: _PathLike | list[Text] | IO[str], _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ... -def loads(s: Text, _dict: Type[MutableMapping[str, Any]] = ...) -> MutableMapping[str, Any]: ... -def dump(o: Mapping[str, Any], f: SupportsWrite[str]) -> str: ... -def dumps(o: Mapping[str, Any]) -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/__init__.pyi new file mode 100644 index 000000000000..61bf348596a1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/__init__.pyi @@ -0,0 +1,18 @@ +from . import decoder as decoder, encoder as encoder +from .decoder import ( + TomlDecodeError as TomlDecodeError, + TomlDecoder as TomlDecoder, + TomlPreserveCommentDecoder as TomlPreserveCommentDecoder, + load as load, + loads as loads, +) +from .encoder import ( + TomlArraySeparatorEncoder as TomlArraySeparatorEncoder, + TomlEncoder as TomlEncoder, + TomlNumpyEncoder as TomlNumpyEncoder, + TomlPathlibEncoder as TomlPathlibEncoder, + TomlPreserveCommentEncoder as TomlPreserveCommentEncoder, + TomlPreserveInlineDictEncoder as TomlPreserveInlineDictEncoder, + dump as dump, + dumps as dumps, +) diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/decoder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/decoder.pyi new file mode 100644 index 000000000000..6996ba5f75a5 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/decoder.pyi @@ -0,0 +1,74 @@ +import sys +from _typeshed import SupportsRead +from typing import Any, Callable, Generic, MutableMapping, Pattern, Text, TypeVar, overload + +_MutableMappingT = TypeVar("_MutableMappingT", bound=MutableMapping[str, Any]) + +if sys.version_info >= (3, 0): + from pathlib import PurePath + + FNFError = FileNotFoundError + _PathLike = str | bytes | PurePath +else: + FNFError = IOError + _PathLike = Text + +TIME_RE: Pattern[str] + +class TomlDecodeError(ValueError): + msg: str + doc: str + pos: int + lineno: int + colno: int + def __init__(self, msg: str, doc: str, pos: int) -> None: ... + +class CommentValue: + val: Any + comment: str + def __init__(self, val: Any, comment: str, beginline: bool, _dict: type[MutableMapping[str, Any]]) -> None: ... + def __getitem__(self, key: Any) -> Any: ... + def __setitem__(self, key: Any, value: Any) -> None: ... + def dump(self, dump_value_func: Callable[[Any], str]) -> str: ... + +@overload +def load( + f: _PathLike | list[Any] | SupportsRead[Text], # list[_PathLike] is invariance + _dict: type[_MutableMappingT], + decoder: TomlDecoder[_MutableMappingT] | None = ..., +) -> _MutableMappingT: ... +@overload +def load( + f: _PathLike | list[Any] | SupportsRead[Text], # list[_PathLike] is invariance + _dict: type[dict[str, Any]] = ..., + decoder: TomlDecoder[dict[str, Any]] | None = ..., +) -> dict[str, Any]: ... +@overload +def loads(s: Text, _dict: type[_MutableMappingT], decoder: TomlDecoder[_MutableMappingT] | None = ...) -> _MutableMappingT: ... +@overload +def loads(s: Text, _dict: type[dict[str, Any]] = ..., decoder: TomlDecoder[dict[str, Any]] | None = ...) -> dict[str, Any]: ... + +class InlineTableDict: ... + +class TomlDecoder(Generic[_MutableMappingT]): + _dict: type[_MutableMappingT] + @overload + def __init__(self, _dict: type[_MutableMappingT]) -> None: ... + @overload + def __init__(self: TomlDecoder[dict[str, Any]], _dict: type[dict[str, Any]] = ...) -> None: ... + def get_empty_table(self) -> _MutableMappingT: ... + def get_empty_inline_table(self) -> InlineTableDict: ... # incomplete python/typing#213 + def load_inline_object( + self, line: str, currentlevel: _MutableMappingT, multikey: bool = ..., multibackslash: bool = ... + ) -> None: ... + def load_line( + self, line: str, currentlevel: _MutableMappingT, multikey: bool | None, multibackslash: bool + ) -> tuple[bool | None, str, bool] | None: ... + def load_value(self, v: str, strictly_valid: bool = ...) -> tuple[Any, str]: ... + def bounded_string(self, s: str) -> bool: ... + def load_array(self, a: str) -> list[Any]: ... + def preserve_comment(self, line_no: int, key: str, comment: str, beginline: bool) -> None: ... + def embed_comments(self, idx: int, currentlevel: _MutableMappingT) -> None: ... + +class TomlPreserveCommentDecoder(TomlDecoder[_MutableMappingT]): + saved_comments: dict[int, tuple[str, str, bool]] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/encoder.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/encoder.pyi new file mode 100644 index 000000000000..0fdd7145b948 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/encoder.pyi @@ -0,0 +1,44 @@ +from _typeshed import SupportsWrite +from typing import Any, Callable, Generic, Iterable, Mapping, MutableMapping, TypeVar, overload + +_MappingT = TypeVar("_MappingT", bound=Mapping[str, Any]) + +def dump(o: _MappingT, f: SupportsWrite[str], encoder: TomlEncoder[_MappingT] | None = ...) -> str: ... +def dumps(o: _MappingT, encoder: TomlEncoder[_MappingT] | None = ...) -> str: ... + +class TomlEncoder(Generic[_MappingT]): + _dict: type[_MappingT] + preserve: bool + dump_funcs: MutableMapping[type[Any], Callable[[Any], str]] + @overload + def __init__(self, _dict: type[_MappingT], preserve: bool = ...) -> None: ... + @overload + def __init__(self: TomlEncoder[dict[str, Any]], _dict: type[dict[str, Any]] = ..., preserve: bool = ...) -> None: ... + def get_empty_table(self) -> _MappingT: ... + def dump_list(self, v: Iterable[object]) -> str: ... + def dump_inline_table(self, section: dict[str, Any] | Any) -> str: ... + def dump_value(self, v: Any) -> str: ... + def dump_sections(self, o: _MappingT, sup: str) -> tuple[str, _MappingT]: ... + +class TomlPreserveInlineDictEncoder(TomlEncoder[_MappingT]): + @overload + def __init__(self, _dict: type[_MappingT]) -> None: ... + @overload + def __init__(self: TomlPreserveInlineDictEncoder[dict[str, Any]], _dict: type[dict[str, Any]] = ...) -> None: ... + +class TomlArraySeparatorEncoder(TomlEncoder[_MappingT]): + separator: str + @overload + def __init__(self, _dict: type[_MappingT], preserve: bool = ..., separator: str = ...) -> None: ... + @overload + def __init__( + self: TomlArraySeparatorEncoder[dict[str, Any]], + _dict: type[dict[str, Any]] = ..., + preserve: bool = ..., + separator: str = ..., + ) -> None: ... + def dump_list(self, v: Iterable[Any]) -> str: ... + +class TomlNumpyEncoder(TomlEncoder[_MappingT]): ... +class TomlPreserveCommentEncoder(TomlEncoder[_MappingT]): ... +class TomlPathlibEncoder(TomlEncoder[_MappingT]): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/ordered.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/ordered.pyi new file mode 100644 index 000000000000..1c2f223b1757 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/ordered.pyi @@ -0,0 +1,11 @@ +from collections import OrderedDict +from typing import Any + +from .decoder import TomlDecoder +from .encoder import TomlEncoder + +class TomlOrderedDecoder(TomlDecoder[OrderedDict[str, Any]]): + def __init__(self) -> None: ... + +class TomlOrderedEncoder(TomlEncoder[OrderedDict[str, Any]]): + def __init__(self) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/tz.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/tz.pyi new file mode 100644 index 000000000000..398a3e5cd777 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/toml/toml/tz.pyi @@ -0,0 +1,10 @@ +from _typeshed import Self +from datetime import datetime, timedelta, tzinfo +from typing import Any + +class TomlTz(tzinfo): + def __init__(self, toml_offset: str) -> None: ... + def __deepcopy__(self: Self, memo: Any) -> Self: ... + def tzname(self, dt: datetime | None) -> str: ... + def utcoffset(self, dt: datetime | None) -> timedelta: ... + def dst(self, dt: datetime | None) -> timedelta: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toposort/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/toposort/METADATA.toml index 83bb457dfdfe..ccb25250bba1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/toposort/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/toposort/METADATA.toml @@ -1 +1 @@ -version = "1.6" +version = "1.7" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/toposort/toposort.pyi b/packages/pyright-internal/typeshed-fallback/stubs/toposort/toposort.pyi index 9410d82799fc..ccde29046763 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/toposort/toposort.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/toposort/toposort.pyi @@ -1,10 +1,16 @@ -from typing import Any, Iterator, TypeVar +from _typeshed import SupportsItems +from typing import Any, Iterable, Iterator, Protocol, TypeVar +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) _T = TypeVar("_T") +class _SupportsItemsAndLen(SupportsItems[_KT_co, _VT_co], Protocol[_KT_co, _VT_co]): + def __len__(self) -> int: ... + class CircularDependencyError(ValueError): data: dict[Any, set[Any]] def __init__(self, data: dict[Any, set[Any]]) -> None: ... -def toposort(data: dict[_T, set[_T]]) -> Iterator[set[_T]]: ... -def toposort_flatten(data: dict[_T, set[_T]], sort: bool = ...) -> list[_T]: ... +def toposort(data: _SupportsItemsAndLen[_T, Iterable[_T]]) -> Iterator[set[_T]]: ... +def toposort_flatten(data: _SupportsItemsAndLen[_T, Iterable[_T]], sort: bool = ...) -> list[_T]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/METADATA.toml index ffc5a1c5e98b..38c94680a9a8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/METADATA.toml @@ -1 +1 @@ -version = "3.2" +version = "3.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_style.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_style.pyi index 9566fb3f3352..e70eb8a481a5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_style.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_style.pyi @@ -8,5 +8,5 @@ class ThemedStyle(ttk.Style, ThemedWidget): self, master: tkinter.Misc | None = ..., *, theme: str | None = ..., gif_override: bool | None = ..., **kwargs ) -> None: ... # theme_use() can't return None (differs from ttk.Style) - def theme_use(self, theme_name: str | None = ...) -> str: ... # type: ignore - def theme_names(self) -> list[str]: ... # type: ignore + def theme_use(self, theme_name: str | None = ...) -> str: ... # type: ignore[override] + def theme_names(self) -> list[str]: ... # type: ignore[override] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_tk.pyi b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_tk.pyi index 3a2c9d748be1..1cf27bf3a00e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_tk.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/ttkthemes/ttkthemes/themed_tk.pyi @@ -23,8 +23,8 @@ class ThemedTk(tkinter.Tk, ThemedWidget): ) -> None: ... def set_theme(self, theme_name, toplevel: bool | None = ..., themebg: bool | None = ...) -> None: ... # TODO: currently no good way to say "use the same big list of kwargs as parent class but also add these" - def config(self, kw: Any | None = ..., **kwargs): ... # type: ignore + def config(self, kw: Any | None = ..., **kwargs): ... # type: ignore[override] def cget(self, k): ... - def configure(self, kw: Any | None = ..., **kwargs): ... # type: ignore + def configure(self, kw: Any | None = ..., **kwargs): ... # type: ignore[override] def __getitem__(self, k): ... def __setitem__(self, k, v): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/METADATA.toml index 46f5153ea243..97ceca8aa9f5 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/METADATA.toml @@ -1 +1 @@ -version = "1.4" +version = "1.5.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast27.pyi b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast27.pyi index 93dbae12ee4d..f7758e881506 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast27.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast27.pyi @@ -1,4 +1,3 @@ -import typing from typing import Any, Iterator class NodeVisitor: @@ -15,7 +14,7 @@ def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> bytes | None: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... -def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... def literal_eval(node_or_string: str | AST) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... @@ -26,8 +25,8 @@ PyCF_ONLY_AST: int identifier = str class AST: - _attributes: typing.Tuple[str, ...] - _fields: typing.Tuple[str, ...] + _attributes: tuple[str, ...] + _fields: tuple[str, ...] def __init__(self, *args: Any, **kwargs: Any) -> None: ... class mod(AST): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast3.pyi b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast3.pyi index 8d0a830541a9..996b07904e50 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast3.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/typed-ast/typed_ast/ast3.pyi @@ -1,4 +1,3 @@ -import typing from typing import Any, Iterator class NodeVisitor: @@ -15,7 +14,7 @@ def fix_missing_locations(node: AST) -> AST: ... def get_docstring(node: AST, clean: bool = ...) -> str | None: ... def increment_lineno(node: AST, n: int = ...) -> AST: ... def iter_child_nodes(node: AST) -> Iterator[AST]: ... -def iter_fields(node: AST) -> Iterator[typing.Tuple[str, Any]]: ... +def iter_fields(node: AST) -> Iterator[tuple[str, Any]]: ... def literal_eval(node_or_string: str | AST) -> Any: ... def walk(node: AST) -> Iterator[AST]: ... @@ -26,8 +25,8 @@ PyCF_ONLY_AST: int identifier = str class AST: - _attributes: typing.Tuple[str, ...] - _fields: typing.Tuple[str, ...] + _attributes: tuple[str, ...] + _fields: tuple[str, ...] def __init__(self, *args: Any, **kwargs: Any) -> None: ... class mod(AST): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/METADATA.toml index 43a3246aeece..d029dc444cfc 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/METADATA.toml @@ -1,3 +1,2 @@ -version = "0.1" -python2 = true +version = "4.1" requires = ["types-pytz"] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/tzlocal/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/tzlocal/__init__.pyi index 7df53ad0ade1..81c2351ad2e7 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/tzlocal/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/tzlocal/tzlocal/__init__.pyi @@ -2,3 +2,4 @@ from pytz import BaseTzInfo def reload_localzone() -> None: ... def get_localzone() -> BaseTzInfo: ... +def get_localzone_name() -> str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/ujson/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/ujson/METADATA.toml index 31f638bf33df..cb7498d03056 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/ujson/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/ujson/METADATA.toml @@ -1,2 +1 @@ -version = "0.1" -python2 = true +version = "4.2.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/METADATA.toml new file mode 100644 index 000000000000..aae196c41ab1 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/METADATA.toml @@ -0,0 +1,2 @@ +version = "1.26.*" +python2 = true diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/__init__.pyi similarity index 97% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/__init__.pyi index f63b8fa12da6..c24a98865d32 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/__init__.pyi @@ -5,6 +5,7 @@ from . import connectionpool, filepost, poolmanager, response from .util import request as _request, retry, timeout, url __license__: Any +__version__: str HTTPConnectionPool = connectionpool.HTTPConnectionPool HTTPSConnectionPool = connectionpool.HTTPSConnectionPool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/_collections.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/_collections.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/_collections.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/_collections.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/connection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/connection.pyi similarity index 95% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/connection.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/connection.pyi index 39d0f6abbc93..9493332d7027 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/connection.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/connection.pyi @@ -1,6 +1,6 @@ import ssl import sys -from typing import Any +from typing import IO, Any, Iterable from . import exceptions, util from .packages import ssl_match_hostname @@ -11,8 +11,11 @@ if sys.version_info >= (3, 0): from http.client import HTTPConnection as _HTTPConnection, HTTPException as HTTPException else: from httplib import HTTPConnection as _HTTPConnection, HTTPException as HTTPException + class ConnectionError(Exception): ... +_TYPE_BODY = bytes | IO[Any] | Iterable[bytes] | str + class DummyConnection: ... BaseSSLError = ssl.SSLError diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/connectionpool.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/connectionpool.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/connectionpool.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/connectionpool.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/contrib/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/contrib/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/exceptions.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/exceptions.pyi new file mode 100644 index 000000000000..8c2090613a8e --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/exceptions.pyi @@ -0,0 +1,87 @@ +from email.errors import MessageDefect +from http.client import IncompleteRead as httplib_IncompleteRead +from typing import Any + +from urllib3.connectionpool import ConnectionPool, HTTPResponse +from urllib3.util.retry import Retry + +class HTTPError(Exception): ... +class HTTPWarning(Warning): ... + +class PoolError(HTTPError): + pool: ConnectionPool + def __init__(self, pool: ConnectionPool, message: str) -> None: ... + +class RequestError(PoolError): + url: str + def __init__(self, pool: ConnectionPool, url: str, message: str) -> None: ... + +class SSLError(HTTPError): ... + +class ProxyError(HTTPError): + original_error: Exception + def __init__(self, message: str, error: Exception, *args: Any) -> None: ... + +class DecodeError(HTTPError): ... +class ProtocolError(HTTPError): ... + +ConnectionError = ProtocolError + +class MaxRetryError(RequestError): + reason: Exception | None + def __init__(self, pool: ConnectionPool, url: str, reason: Exception | None = ...) -> None: ... + +class HostChangedError(RequestError): + retries: Retry | int + def __init__(self, pool: ConnectionPool, url: str, retries: Retry | int = ...) -> None: ... + +class TimeoutStateError(HTTPError): ... +class TimeoutError(HTTPError): ... +class ReadTimeoutError(TimeoutError, RequestError): ... +class ConnectTimeoutError(TimeoutError): ... +class NewConnectionError(ConnectTimeoutError, HTTPError): ... +class EmptyPoolError(PoolError): ... +class ClosedPoolError(PoolError): ... +class LocationValueError(ValueError, HTTPError): ... + +class LocationParseError(LocationValueError): + location: str + def __init__(self, location: str) -> None: ... + +class URLSchemeUnknown(LocationValueError): + scheme: str + def __init__(self, scheme: str) -> None: ... + +class ResponseError(HTTPError): + GENERIC_ERROR: str + SPECIFIC_ERROR: str + +class SecurityWarning(HTTPWarning): ... +class SubjectAltNameWarning(SecurityWarning): ... +class InsecureRequestWarning(SecurityWarning): ... +class SystemTimeWarning(SecurityWarning): ... +class InsecurePlatformWarning(SecurityWarning): ... +class SNIMissingWarning(HTTPWarning): ... +class DependencyWarning(HTTPWarning): ... +class ResponseNotChunked(ProtocolError, ValueError): ... +class BodyNotHttplibCompatible(HTTPError): ... + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + response: HTTPResponse + length: bytes + def __init__(self, response: HTTPResponse, length: bytes) -> None: ... + +class InvalidHeader(HTTPError): ... + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + def __init__(self, scheme: str | None) -> None: ... + +class ProxySchemeUnsupported(ValueError): ... + +class HeaderParsingError(HTTPError): + def __init__(self, defects: list[MessageDefect], unparsed_data: str | bytes | None) -> None: ... + +class UnrewindableBodyError(HTTPError): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/fields.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/fields.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/fields.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/fields.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/filepost.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/filepost.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/filepost.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/filepost.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/ssl_match_hostname/__init__.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/ssl_match_hostname/__init__.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi new file mode 100644 index 000000000000..c21998013cf2 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/packages/ssl_match_hostname/_implementation.pyi @@ -0,0 +1,3 @@ +class CertificateError(ValueError): ... + +def match_hostname(cert, hostname): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/poolmanager.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/poolmanager.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/poolmanager.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/poolmanager.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/request.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/request.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/request.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/request.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/response.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/response.pyi new file mode 100644 index 000000000000..ff63225cc018 --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/response.pyi @@ -0,0 +1,100 @@ +import io +from _typeshed import Self +from http.client import HTTPMessage as _HttplibHTTPMessage, HTTPResponse as _HttplibHTTPResponse +from typing import Any, Iterator, Mapping +from typing_extensions import Literal + +from urllib3.connectionpool import HTTPConnection + +from . import HTTPConnectionPool, Retry +from ._collections import HTTPHeaderDict +from .connection import _TYPE_BODY + +class DeflateDecoder: + def __init__(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def decompress(self, data: bytes) -> bytes: ... + +class GzipDecoderState: + FIRST_MEMBER: Literal[0] + OTHER_MEMBERS: Literal[1] + SWALLOW_DATA: Literal[2] + +class GzipDecoder: + def __init__(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def decompress(self, data: bytes) -> bytes: ... + +# This class is only available if +# `brotli` is available for import. +class BrotliDecoder: + def __init__(self) -> None: ... + def flush(self) -> bytes: ... + +class MultiDecoder: + def __init__(self, modes: str) -> None: ... + def flush(self) -> bytes: ... + def decompress(self, data: bytes) -> bytes: ... + +class HTTPResponse(io.IOBase): + CONTENT_DECODERS: list[str] + REDIRECT_STATUSES: list[int] + headers: HTTPHeaderDict + status: int + version: int + reason: str | None + strict: int + decode_content: bool + retries: Retry | None + enforce_content_length: bool + auto_close: bool + msg: _HttplibHTTPMessage | None + chunked: bool + chunk_left: int | None + length_remaining: int | None + def __init__( + self, + body: _TYPE_BODY = ..., + headers: Mapping[str, str] | Mapping[bytes, bytes] | None = ..., + status: int = ..., + version: int = ..., + reason: str | None = ..., + strict: int = ..., + preload_content: bool = ..., + decode_content: bool = ..., + original_response: _HttplibHTTPResponse | None = ..., + pool: HTTPConnectionPool | None = ..., + connection: HTTPConnection | None = ..., + msg: _HttplibHTTPMessage | None = ..., + retries: Retry | None = ..., + enforce_content_length: bool = ..., + request_method: str | None = ..., + request_url: str | None = ..., + auto_close: bool = ..., + ) -> None: ... + def get_redirect_location(self) -> Literal[False] | str | None: ... + def release_conn(self) -> None: ... + def drain_conn(self) -> None: ... + @property + def data(self) -> bytes | Any: ... + @property + def connection(self) -> HTTPConnection | Any: ... + def isclosed(self) -> bool: ... + def tell(self) -> int: ... + def read(self, amt: int | None = ..., decode_content: bool | None = ..., cache_content: bool = ...) -> bytes: ... + def stream(self, amt: int | None = ..., decode_content: bool | None = ...) -> Iterator[bytes]: ... + @classmethod + def from_httplib(cls: type[Self], r: _HttplibHTTPResponse, **response_kw: Any) -> Self: ... + def getheaders(self) -> HTTPHeaderDict: ... + def getheader(self, name, default=...) -> str | None: ... + def info(self) -> HTTPHeaderDict: ... + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def readable(self) -> bool: ... + def readinto(self, b: bytearray) -> int: ... + def supports_chunked_reads(self) -> bool: ... + def read_chunked(self, amt: int | None = ..., decode_content: bool | None = ...) -> Iterator[bytes]: ... + def geturl(self) -> bool | str: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/__init__.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/__init__.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/__init__.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/connection.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/connection.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/connection.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/connection.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/request.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/request.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/request.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/request.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/response.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/response.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/response.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/response.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/retry.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/retry.pyi new file mode 100644 index 000000000000..9a6883ed9deb --- /dev/null +++ b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/retry.pyi @@ -0,0 +1,84 @@ +import logging +from _typeshed import Self +from types import TracebackType +from typing import Any, ClassVar, Collection, NamedTuple +from typing_extensions import Literal + +from .. import exceptions +from ..connectionpool import ConnectionPool +from ..response import HTTPResponse + +ConnectTimeoutError = exceptions.ConnectTimeoutError +MaxRetryError = exceptions.MaxRetryError +ProtocolError = exceptions.ProtocolError +ReadTimeoutError = exceptions.ReadTimeoutError +ResponseError = exceptions.ResponseError + +log: logging.Logger + +class RequestHistory(NamedTuple): + method: str | None + url: str | None + error: Exception | None + status: int | None + redirect_location: str | None + +class Retry: + DEFAULT_ALLOWED_METHODS: ClassVar[frozenset[str]] + RETRY_AFTER_STATUS_CODES: ClassVar[frozenset[int]] + DEFAULT_REMOVE_HEADERS_ON_REDIRECT: ClassVar[frozenset[str]] + DEFAULT_BACKOFF_MAX: ClassVar[int] + + total: bool | int | None + connect: int | None + read: int | None + redirect: Literal[True] | int | None + status: int | None + other: int | None + allowed_methods: Collection[str] | Literal[False] | None + status_forcelist: Collection[int] + backoff_factor: float + raise_on_redirect: bool + raise_on_status: bool + history: tuple[RequestHistory, ...] + respect_retry_after_header: bool + remove_headers_on_redirect: frozenset[str] + def __init__( + self, + total: bool | int | None = ..., + connect: int | None = ..., + read: int | None = ..., + redirect: bool | int | None = ..., + status: int | None = ..., + other: int | None = ..., + allowed_methods: Collection[str] | Literal[False] | None = ..., + status_forcelist: Collection[int] | None = ..., + backoff_factor: float = ..., + raise_on_redirect: bool = ..., + raise_on_status: bool = ..., + history: tuple[RequestHistory, ...] | None = ..., + respect_retry_after_header: bool = ..., + remove_headers_on_redirect: Collection[str] = ..., + method_whitelist: Collection[str] | None = ..., + ) -> None: ... + def new(self: Self, **kw: Any) -> Self: ... + @classmethod + def from_int( + cls, retries: Retry | bool | int | None, redirect: bool | int | None = ..., default: Retry | bool | int | None = ... + ) -> Retry: ... + def get_backoff_time(self) -> float: ... + def parse_retry_after(self, retry_after: str) -> float: ... + def get_retry_after(self, response: HTTPResponse) -> float | None: ... + def sleep_for_retry(self, response: HTTPResponse | None = ...) -> bool: ... + def sleep(self, response: HTTPResponse | None = ...) -> None: ... + def is_retry(self, method: str, status_code: int, has_retry_after: bool = ...) -> bool: ... + def is_exhausted(self) -> bool: ... + def increment( + self, + method: str | None = ..., + url: str | None = ..., + response: HTTPResponse | None = ..., + error: Exception | None = ..., + _pool: ConnectionPool | None = ..., + _stacktrace: TracebackType | None = ..., + ) -> Retry: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/ssl_.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/ssl_.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/ssl_.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/ssl_.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/timeout.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/timeout.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/timeout.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/timeout.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/url.pyi b/packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/url.pyi similarity index 100% rename from packages/pyright-internal/typeshed-fallback/stubs/requests/requests/packages/urllib3/util/url.pyi rename to packages/pyright-internal/typeshed-fallback/stubs/urllib3/urllib3/util/url.pyi diff --git a/packages/pyright-internal/typeshed-fallback/stubs/vobject/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/vobject/METADATA.toml index 16fd217d5d8e..51e869b47983 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/vobject/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/vobject/METADATA.toml @@ -1 +1 @@ -version = "0.9" +version = "0.9.*" diff --git a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/base.pyi b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/base.pyi index 213b3d2d696b..98407fc433c1 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/base.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/base.pyi @@ -1,6 +1,6 @@ from _typeshed import SupportsWrite from collections.abc import Iterable -from typing import Any, TypeVar, overload +from typing import Any, Iterator, TypeVar, overload from typing_extensions import Literal DEBUG: bool @@ -143,7 +143,7 @@ class Stack: def readComponents( streamOrString, validate: bool = ..., transform: bool = ..., ignoreUnreadable: bool = ..., allowQP: bool = ... -) -> None: ... +) -> Iterator[Component]: ... def readOne(stream, validate: bool = ..., transform: bool = ..., ignoreUnreadable: bool = ..., allowQP: bool = ...): ... def registerBehavior(behavior, name: Any | None = ..., default: bool = ..., id: Any | None = ...) -> None: ... def getBehavior(name, id: Any | None = ...): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/icalendar.pyi b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/icalendar.pyi index 72f9ae5f2609..6a9728e0ae9c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/icalendar.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/icalendar.pyi @@ -1,15 +1,15 @@ from datetime import timedelta -from typing import Any, Tuple +from typing import Any from .base import Component from .behavior import Behavior -DATENAMES: Tuple[str, ...] -RULENAMES: Tuple[str, ...] -DATESANDRULES: Tuple[str, ...] +DATENAMES: tuple[str, ...] +RULENAMES: tuple[str, ...] +DATESANDRULES: tuple[str, ...] PRODID: str -WEEKDAYS: Tuple[str, ...] -FREQUENCIES: Tuple[str, ...] +WEEKDAYS: tuple[str, ...] +FREQUENCIES: tuple[str, ...] zeroDelta: timedelta twoHours: timedelta @@ -36,7 +36,7 @@ class TimezoneComponent(Component): normal_attributes: Any @staticmethod def pickTzid(tzinfo, allowUTC: bool = ...): ... - def prettyPrint(self, level, tabwidth) -> None: ... # type: ignore + def prettyPrint(self, level, tabwidth) -> None: ... # type: ignore[override] class RecurringComponent(Component): isNative: bool diff --git a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/vcard.pyi b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/vcard.pyi index 08d6836b9f34..8dfd0bec392a 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/vcard.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/vcard.pyi @@ -78,7 +78,7 @@ class Photo(VCardTextBehavior): @classmethod def valueRepr(cls, line): ... @classmethod - def serialize(cls, obj, buf, lineLength, validate) -> None: ... # type: ignore + def serialize(cls, obj, buf, lineLength, validate) -> None: ... # type: ignore[override] def toListOrString(string): ... def splitFields(string): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/win32tz.pyi b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/win32tz.pyi index a36ceb3812c9..32936fbe519d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/win32tz.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/vobject/vobject/win32tz.pyi @@ -9,13 +9,16 @@ if sys.platform == "win32": localkey: Any WEEKS: Any def list_timezones(): ... + class win32tz(datetime.tzinfo): data: Any def __init__(self, name) -> None: ... def utcoffset(self, dt): ... def dst(self, dt): ... def tzname(self, dt): ... + def pickNthWeekday(year, month, dayofweek, hour, minute, whichweek): ... + class win32tz_data: display: Any dstname: Any @@ -33,4 +36,5 @@ if sys.platform == "win32": dsthour: Any dstminute: Any def __init__(self, path) -> None: ... + def valuesToDict(key): ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/waitress/METADATA.toml index 40f3c61c10ae..a035bd0a01ba 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/METADATA.toml @@ -1,2 +1,2 @@ -version = "0.1" +version = "2.0.*" requires = [] diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/__init__.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/__init__.pyi index 2abd726d3e38..f3aef6fbab97 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/__init__.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/__init__.pyi @@ -1,7 +1,7 @@ -from typing import Any, Tuple +from typing import Any from waitress.server import create_server as create_server def serve(app: Any, **kw: Any) -> None: ... def serve_paste(app: Any, global_conf: Any, **kw: Any) -> int: ... -def profile(cmd: Any, globals: Any, locals: Any, sort_order: Tuple[str, ...], callers: bool) -> None: ... +def profile(cmd: Any, globals: Any, locals: Any, sort_order: tuple[str, ...], callers: bool) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/adjustments.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/adjustments.pyi index 1374444d0fcb..9a62dd85d48f 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/adjustments.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/adjustments.pyi @@ -1,17 +1,17 @@ from socket import socket -from typing import Any, FrozenSet, Iterable, Sequence, Set, Tuple +from typing import Any, Iterable, Sequence from .compat import HAS_IPV6 as HAS_IPV6, PY2 as PY2, WIN as WIN, string_types as string_types from .proxy_headers import PROXY_HEADERS as PROXY_HEADERS -truthy: FrozenSet[Any] -KNOWN_PROXY_HEADERS: FrozenSet[Any] +truthy: frozenset[Any] +KNOWN_PROXY_HEADERS: frozenset[Any] def asbool(s: bool | str | int | None) -> bool: ... def asoctal(s: str) -> int: ... def aslist_cronly(value: str) -> list[str]: ... def aslist(value: str) -> list[str]: ... -def asset(value: str | None) -> Set[str]: ... +def asset(value: str | None) -> set[str]: ... def slash_fixed_str(s: str | None) -> str: ... def str_iftruthy(s: str | None) -> str | None: ... def as_socket_list(sockets: Sequence[object]) -> list[socket]: ... @@ -27,7 +27,7 @@ class Adjustments: threads: int = ... trusted_proxy: str | None = ... trusted_proxy_count: int | None = ... - trusted_proxy_headers: Set[str] = ... + trusted_proxy_headers: set[str] = ... log_untrusted_proxy_headers: bool = ... clear_untrusted_proxy_headers: _bool_marker | bool = ... url_scheme: str = ... @@ -48,7 +48,7 @@ class Adjustments: expose_tracebacks: bool = ... unix_socket: str | None = ... unix_socket_perms: int = ... - socket_options: list[Tuple[int, int, int]] = ... + socket_options: list[tuple[int, int, int]] = ... asyncore_loop_timeout: int = ... asyncore_use_poll: bool = ... ipv4: bool = ... @@ -56,6 +56,6 @@ class Adjustments: sockets: list[socket] = ... def __init__(self, **kw: Any) -> None: ... @classmethod - def parse_args(cls, argv: str) -> Tuple[dict[str, Any], Any]: ... + def parse_args(cls, argv: str) -> tuple[dict[str, Any], Any]: ... @classmethod def check_sockets(cls, sockets: Iterable[socket]) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/channel.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/channel.pyi index 1e3b84cec185..eee8c317cf8e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/channel.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/channel.pyi @@ -1,6 +1,6 @@ from socket import socket from threading import Condition, Lock -from typing import Mapping, Sequence, Tuple +from typing import Mapping, Sequence from waitress.adjustments import Adjustments from waitress.buffers import OverflowableBuffer @@ -31,7 +31,7 @@ class HTTPChannel(wasyncore.dispatcher): sendbuf_len: int = ... task_lock: Lock = ... outbuf_lock: Condition = ... - addr: Tuple[str, int] = ... + addr: tuple[str, int] = ... def __init__( self, server: BaseWSGIServer, sock: socket, addr: str, adj: Adjustments, map: Mapping[int, socket] | None = ... ) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/compat.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/compat.pyi index 94bbb9eb18ea..07757c9fe04d 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/compat.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/compat.pyi @@ -1,12 +1,12 @@ from io import TextIOWrapper -from typing import Any, Tuple +from typing import Any PY2: bool PY3: bool WIN: bool -string_types: Tuple[str] -integer_types: Tuple[int] -class_types: Tuple[type] +string_types: tuple[str] +integer_types: tuple[int] +class_types: tuple[type] text_type = str binary_type = bytes long = int diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/parser.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/parser.pyi index 8b7a09121f66..193c4a97347e 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/parser.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/parser.pyi @@ -1,5 +1,5 @@ from io import BytesIO -from typing import Any, Mapping, Pattern, Sequence, Tuple +from typing import Any, Mapping, Pattern, Sequence from waitress.adjustments import Adjustments from waitress.receiver import ChunkedReceiver, FixedStreamReceiver @@ -35,9 +35,9 @@ class HTTPRequestParser: def get_body_stream(self) -> BytesIO: ... def close(self) -> None: ... -def split_uri(uri: bytes) -> Tuple[str, str, bytes, str, str]: ... +def split_uri(uri: bytes) -> tuple[str, str, bytes, str, str]: ... def get_header_lines(header: bytes) -> Sequence[bytes]: ... first_line_re: Pattern[Any] -def crack_first_line(line: str) -> Tuple[bytes, bytes, bytes]: ... +def crack_first_line(line: str) -> tuple[bytes, bytes, bytes]: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/proxy_headers.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/proxy_headers.pyi index b13a2e98b129..3d3c6e1d8ea8 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/proxy_headers.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/proxy_headers.pyi @@ -1,5 +1,5 @@ from logging import Logger -from typing import Any, Callable, Mapping, NamedTuple, Sequence, Set +from typing import Any, Callable, Mapping, NamedTuple, Sequence from .utilities import BadRequest as BadRequest @@ -21,14 +21,14 @@ def proxy_headers_middleware( app: Any, trusted_proxy: str | None = ..., trusted_proxy_count: int = ..., - trusted_proxy_headers: Set[str] | None = ..., + trusted_proxy_headers: set[str] | None = ..., clear_untrusted: bool = ..., log_untrusted: bool = ..., logger: Logger = ..., ) -> Callable[..., Any]: ... def parse_proxy_headers( - environ: Mapping[str, str], trusted_proxy_count: int, trusted_proxy_headers: Set[str], logger: Logger = ... -) -> Set[str]: ... + environ: Mapping[str, str], trusted_proxy_count: int, trusted_proxy_headers: set[str], logger: Logger = ... +) -> set[str]: ... def strip_brackets(addr: str) -> str: ... def clear_untrusted_headers( environ: Mapping[str, str], untrusted_headers: Sequence[str], log_warning: bool = ..., logger: Logger = ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/runner.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/runner.pyi index ad7e9fd53a2a..9ce1156c3864 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/runner.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/runner.pyi @@ -1,10 +1,10 @@ from io import TextIOWrapper -from typing import Any, Callable, Pattern, Sequence, Tuple +from typing import Any, Callable, Pattern, Sequence HELP: str RUNNER_PATTERN: Pattern[Any] -def match(obj_name: str) -> Tuple[str, str]: ... +def match(obj_name: str) -> tuple[str, str]: ... def resolve(module_name: str, object_name: str) -> Any: ... def show_help(stream: TextIOWrapper, name: str, error: str | None = ...) -> None: ... def show_exception(stream: TextIOWrapper) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/server.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/server.pyi index 269a390757d2..8179b6743eaa 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/server.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/server.pyi @@ -1,5 +1,5 @@ from socket import socket -from typing import Any, Sequence, Tuple +from typing import Any, Sequence from waitress.adjustments import Adjustments from waitress.channel import HTTPChannel @@ -20,13 +20,13 @@ class MultiSocketServer: asyncore: Any = ... adj: Adjustments = ... map: Any = ... - effective_listen: Sequence[Tuple[str, int]] = ... + effective_listen: Sequence[tuple[str, int]] = ... task_dispatcher: ThreadedTaskDispatcher = ... def __init__( self, map: Any | None = ..., adj: Adjustments | None = ..., - effective_listen: Sequence[Tuple[str, int]] | None = ..., + effective_listen: Sequence[tuple[str, int]] | None = ..., dispatcher: ThreadedTaskDispatcher | None = ..., ) -> None: ... def print_listen(self, format_str: str) -> None: ... @@ -38,7 +38,7 @@ class BaseWSGIServer(wasyncore.dispatcher): next_channel_cleanup: int = ... socketmod: socket = ... asyncore: Any = ... - sockinfo: Tuple[int, int, int, Tuple[str, int]] = ... + sockinfo: tuple[int, int, int, tuple[str, int]] = ... family: int = ... socktype: int = ... application: Any = ... @@ -80,7 +80,7 @@ class BaseWSGIServer(wasyncore.dispatcher): class TcpWSGIServer(BaseWSGIServer): def bind_server_socket(self) -> None: ... - def getsockname(self) -> Tuple[str, Tuple[str, int]]: ... + def getsockname(self) -> tuple[str, tuple[str, int]]: ... def set_socket_options(self, conn: socket) -> None: ... class UnixWSGIServer(BaseWSGIServer): @@ -96,8 +96,8 @@ class UnixWSGIServer(BaseWSGIServer): **kw: Any, ) -> None: ... def bind_server_socket(self) -> None: ... - def getsockname(self) -> Tuple[str, Tuple[str, int]]: ... - def fix_addr(self, addr: Any) -> Tuple[str, None]: ... + def getsockname(self) -> tuple[str, tuple[str, int]]: ... + def fix_addr(self, addr: Any) -> tuple[str, None]: ... def get_server_name(self, ip: Any) -> str: ... WSGIServer: TcpWSGIServer diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/task.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/task.pyi index c0dea9f5889a..d7a17d6bb39c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/task.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/task.pyi @@ -1,6 +1,7 @@ +from collections import deque from logging import Logger from threading import Condition, Lock -from typing import Any, Deque, Mapping, Sequence, Tuple +from typing import Any, Mapping, Sequence from .channel import HTTPChannel from .utilities import Error @@ -14,7 +15,7 @@ class ThreadedTaskDispatcher: logger: Logger = ... queue_logger: Logger = ... threads: set[Any] = ... - queue: Deque[Task] = ... + queue: deque[Task] = ... lock: Lock = ... queue_cv: Condition = ... thread_exit_cv: Condition = ... @@ -39,7 +40,7 @@ class Task: logger: Logger = ... channel: HTTPChannel = ... request: Error = ... - response_headers: Sequence[Tuple[str, str]] = ... + response_headers: Sequence[tuple[str, str]] = ... version: str = ... def __init__(self, channel: HTTPChannel, request: Error) -> None: ... def service(self) -> None: ... @@ -60,7 +61,7 @@ class ErrorTask(Task): class WSGITask(Task): environ: Any | None = ... - response_headers: Sequence[Tuple[str, str]] = ... + response_headers: Sequence[tuple[str, str]] = ... complete: bool = ... status: str = ... content_length: int = ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/utilities.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/utilities.pyi index a7b7b3c27865..9f7482b51399 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/utilities.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/utilities.pyi @@ -1,5 +1,5 @@ from logging import Logger -from typing import Any, Callable, Mapping, Match, Pattern, Sequence, Tuple +from typing import Any, Callable, Mapping, Match, Pattern, Sequence from .rfc7230 import OBS_TEXT as OBS_TEXT, VCHAR as VCHAR @@ -23,12 +23,12 @@ months_reg: str rfc822_date: str rfc822_reg: Pattern[Any] -def unpack_rfc822(m: Match[Any]) -> Tuple[int, int, int, int, int, int, int, int, int]: ... +def unpack_rfc822(m: Match[Any]) -> tuple[int, int, int, int, int, int, int, int, int]: ... rfc850_date: str rfc850_reg: Pattern[Any] -def unpack_rfc850(m: Match[Any]) -> Tuple[int, int, int, int, int, int, int, int, int]: ... +def unpack_rfc850(m: Match[Any]) -> tuple[int, int, int, int, int, int, int, int, int]: ... weekdayname: Sequence[str] monthname: Sequence[str] @@ -52,8 +52,8 @@ class Error: reason: str = ... body: str = ... def __init__(self, body: str) -> None: ... - def to_response(self) -> Tuple[str, Sequence[Tuple[str, str]], str]: ... - def wsgi_response(self, environ: Any, start_response: Callable[[str, Sequence[Tuple[str, str]]], None]) -> str: ... + def to_response(self) -> tuple[str, Sequence[tuple[str, str]], str]: ... + def wsgi_response(self, environ: Any, start_response: Callable[[str, Sequence[tuple[str, str]]], None]) -> str: ... class BadRequest(Error): code: int = ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/wasyncore.pyi b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/wasyncore.pyi index 003c17e67a44..b7bb0b5d864c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/wasyncore.pyi +++ b/packages/pyright-internal/typeshed-fallback/stubs/waitress/waitress/wasyncore.pyi @@ -1,7 +1,7 @@ from io import BytesIO from logging import Logger from socket import socket -from typing import Any, Callable, Mapping, Tuple +from typing import Any, Callable, Mapping from . import compat as compat, utilities as utilities @@ -21,7 +21,7 @@ def poll2(timeout: float = ..., map: Mapping[int, socket] | None = ...) -> None: poll3 = poll2 def loop(timeout: float = ..., use_poll: bool = ..., map: Mapping[int, socket] | None = ..., count: int | None = ...) -> None: ... -def compact_traceback() -> Tuple[Tuple[str, str, str], BaseException, BaseException, str]: ... +def compact_traceback() -> tuple[tuple[str, str, str], BaseException, BaseException, str]: ... class dispatcher: debug: bool = ... @@ -29,24 +29,24 @@ class dispatcher: accepting: bool = ... connecting: bool = ... closing: bool = ... - addr: Tuple[str, int] | None = ... + addr: tuple[str, int] | None = ... ignore_log_types: frozenset[Any] logger: Logger = ... - compact_traceback: Callable[[], Tuple[Tuple[str, str, str], BaseException, BaseException, str]] = ... + compact_traceback: Callable[[], tuple[tuple[str, str, str], BaseException, BaseException, str]] = ... socket: _socket | None = ... def __init__(self, sock: _socket | None = ..., map: Mapping[int, _socket] | None = ...) -> None: ... def add_channel(self, map: Mapping[int, _socket] | None = ...) -> None: ... def del_channel(self, map: Mapping[int, _socket] | None = ...) -> None: ... - family_and_type: Tuple[int, int] = ... + family_and_type: tuple[int, int] = ... def create_socket(self, family: int = ..., type: int = ...) -> None: ... def set_socket(self, sock: _socket, map: Mapping[int, _socket] | None = ...) -> None: ... def set_reuse_addr(self) -> None: ... def readable(self) -> bool: ... def writable(self) -> bool: ... def listen(self, num: int) -> None: ... - def bind(self, addr: Tuple[str, int]) -> None: ... - def connect(self, address: Tuple[str, int]) -> None: ... - def accept(self) -> Tuple[_socket, Tuple[str, int]] | None: ... + def bind(self, addr: tuple[str, int]) -> None: ... + def connect(self, address: tuple[str, int]) -> None: ... + def accept(self) -> tuple[_socket, tuple[str, int]] | None: ... def send(self, data: bytes) -> int: ... def recv(self, buffer_size: int) -> bytes: ... def close(self) -> None: ... @@ -71,7 +71,7 @@ class dispatcher_with_send(dispatcher): def initiate_send(self) -> None: ... handle_write: Callable[[], None] = ... def writable(self) -> bool: ... - def send(self, data: bytes) -> None: ... # type: ignore + def send(self, data: bytes) -> None: ... # type: ignore[override] def close_all(map: Mapping[int, socket] | None = ..., ignore_all: bool = ...) -> None: ... diff --git a/packages/pyright-internal/typeshed-fallback/stubs/xxhash/METADATA.toml b/packages/pyright-internal/typeshed-fallback/stubs/xxhash/METADATA.toml index 0f01cc4b1d64..72294377102c 100644 --- a/packages/pyright-internal/typeshed-fallback/stubs/xxhash/METADATA.toml +++ b/packages/pyright-internal/typeshed-fallback/stubs/xxhash/METADATA.toml @@ -1,2 +1,2 @@ -version = "2.0" +version = "2.0.*" python2 = true diff --git a/packages/pyright/package-lock.json b/packages/pyright/package-lock.json index 3620a6fa1e4e..6c9e05c9ea5c 100644 --- a/packages/pyright/package-lock.json +++ b/packages/pyright/package-lock.json @@ -1,13 +1,13 @@ { "name": "pyright", - "version": "1.1.170", + "version": "1.1.225", "lockfileVersion": 1, "requires": true, "dependencies": { "@discoveryjs/json-ext": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.3.tgz", - "integrity": "sha512-Fxt+AfXgjMoin2maPIYzFZnQjAXjAL0PHscM5pRTtatFqB+vZxAM9tLp2Optnuw3QOQC40jTNeGYFOMvyf7v9g==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.6.tgz", + "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==", "dev": true }, "@nodelib/fs.scandir": { @@ -37,20 +37,18 @@ } }, "@types/copy-webpack-plugin": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@types/copy-webpack-plugin/-/copy-webpack-plugin-8.0.1.tgz", - "integrity": "sha512-TwEeGse0/wq+t3SFW0DEwroMS/cDkwVZT+vj7tMAYTp7llt/yz6NuW2n04X2M5P/kSfBQOORhrHAN2mqZdmybg==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/@types/copy-webpack-plugin/-/copy-webpack-plugin-10.1.0.tgz", + "integrity": "sha512-Dk0NUW3X6hVQdkH2n9R7NejjPNCocZBiv8XF8Ac5su2d6EKzCcG/yWDwnWGrEsAWvogoADJyUKULwncx0G9Jkg==", "dev": true, "requires": { - "@types/node": "*", - "tapable": "^2.0.0", - "webpack": "^5.1.0" + "copy-webpack-plugin": "*" } }, "@types/eslint": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.28.0.tgz", - "integrity": "sha512-07XlgzX0YJUn4iG1ocY4IX9DzKSmMGUs6ESKlxWhZRaa0fatIWaHWUVapcuGa8r5HFnTqzj+4OCjd5f7EZ/i/A==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", + "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", "dev": true, "requires": { "@types/estree": "*", @@ -58,9 +56,9 @@ } }, "@types/eslint-scope": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.1.tgz", - "integrity": "sha512-SCFeogqiptms4Fg29WpOTk5nHIzfpKCemSN63ksBQYKTcXoJEmJagV+DhVmbapZzY4/5YaOV1nZwrsU79fFm1g==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", "dev": true, "requires": { "@types/eslint": "*", @@ -80,9 +78,9 @@ "dev": true }, "@types/node": { - "version": "12.20.24", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.24.tgz", - "integrity": "sha512-yxDeaQIAJlMav7fH5AQqPH1u8YIuhYJXYBzxaQ4PifsU0GDO38MSdmEDeRlIxrKbC6NbEaaEHDanWb+y30U8SQ==", + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", "dev": true }, "@webassemblyjs/ast": { @@ -232,24 +230,24 @@ } }, "@webpack-cli/configtest": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.4.tgz", - "integrity": "sha512-cs3XLy+UcxiP6bj0A6u7MLLuwdXJ1c3Dtc0RkKg+wiI1g/Ti1om8+/2hc2A2B60NbBNAbMgyBMHvyymWm/j4wQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", "dev": true }, "@webpack-cli/info": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.3.0.tgz", - "integrity": "sha512-ASiVB3t9LOKHs5DyVUcxpraBXDOKubYu/ihHhU+t1UPpxsivg6Od2E2qU4gJCekfEddzRBzHhzA/Acyw/mlK/w==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", "dev": true, "requires": { "envinfo": "^7.7.3" } }, "@webpack-cli/serve": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.5.2.tgz", - "integrity": "sha512-vgJ5OLWadI8aKjDlOH3rb+dYyPd2GTZuQC/Tihjct6F9GpXGZINo3Y/IVuZVTM1eDQB+/AOsjPUWH/WySDaXvw==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", "dev": true }, "@xtuc/ieee754": { @@ -265,34 +263,46 @@ "dev": true }, "acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "dev": true }, "acorn-import-assertions": { - "version": "1.7.6", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.7.6.tgz", - "integrity": "sha512-FlVvVFA1TX6l3lp8VjDnYYq7R1nyW6x3svAt4nDgrWQ9SBaSh9CnbwgSUTasgfNfOG5HlM1ehugCvM+hjo56LA==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", "dev": true }, "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.10.0.tgz", + "integrity": "sha512-bzqAEZOjkrUMl2afH8dknrq5KEk2SrwdBROR+vH1EKVQTqaUbJVPdc/gEdggTMM0Se+s+Ja4ju4TlNcStKl2Hw==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, + "ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dev": true, + "requires": { + "ajv": "^8.0.0" + } + }, "ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "dev": true + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.3" + } }, "ansi-styles": { "version": "4.3.0", @@ -304,9 +314,9 @@ } }, "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz", + "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==", "dev": true }, "balanced-match": { @@ -335,16 +345,16 @@ } }, "browserslist": { - "version": "4.17.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.17.0.tgz", - "integrity": "sha512-g2BJ2a0nEYvEFQC208q8mVAhfNwpZ5Mu8BwgtCdZKO3qx98HChmeg448fPdUzld8aFmfLgVh7yymqV+q1lJZ5g==", + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001254", - "colorette": "^1.3.0", - "electron-to-chromium": "^1.3.830", + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", "escalade": "^3.1.1", - "node-releases": "^1.1.75" + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" } }, "buffer-from": { @@ -354,9 +364,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001255", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001255.tgz", - "integrity": "sha512-F+A3N9jTZL882f/fg/WWVnKSu6IOo3ueLz4zwaOPbPYHNmM/ZaDUyzyJwS1mZhX7Ex5jqTyW599Gdelh5PDYLQ==", + "version": "1.0.30001307", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001307.tgz", + "integrity": "sha512-+MXEMczJ4FuxJAUp0jvAl6Df0NI/OfW1RWEE61eSmzS7hw6lz4IKutbhbXendwq8BljfFuHtu26VWsg4afQ7Ng==", "dev": true }, "chalk": { @@ -367,17 +377,6 @@ "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" - }, - "dependencies": { - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } } }, "chrome-trace-event": { @@ -413,9 +412,9 @@ "dev": true }, "colorette": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", - "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==", + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", "dev": true }, "commander": { @@ -431,17 +430,16 @@ "dev": true }, "copy-webpack-plugin": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-9.0.1.tgz", - "integrity": "sha512-14gHKKdYIxF84jCEgPgYXCPpldbwpxxLbCmA7LReY7gvbaT555DgeBWBgBZM116tv/fO6RRJrsivBqRyRlukhw==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-10.2.4.tgz", + "integrity": "sha512-xFVltahqlsRcyyJqQbDY6EYTtyQZF9rf+JPjwHObLdPFMEISqkFkr7mFoVOC6BfYS/dNThyoQKvziugm+OnwBg==", "dev": true, "requires": { - "fast-glob": "^3.2.5", - "glob-parent": "^6.0.0", - "globby": "^11.0.3", + "fast-glob": "^3.2.7", + "glob-parent": "^6.0.1", + "globby": "^12.0.2", "normalize-path": "^3.0.0", - "p-limit": "^3.1.0", - "schema-utils": "^3.0.0", + "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0" } }, @@ -466,15 +464,15 @@ } }, "electron-to-chromium": { - "version": "1.3.833", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.833.tgz", - "integrity": "sha512-h+9aVaUHjyunLqtCjJF2jrJ73tYcJqo2cCGKtVAXH9WmnBsb8hiChRQ0P1uXjdxR6Wcfxibephy41c1YlZA/pA==", + "version": "1.4.64", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.64.tgz", + "integrity": "sha512-8mec/99xgLUZCIZZq3wt61Tpxg55jnOSpxGYapE/1Ma9MpFEYYaz4QNYm0CM1rrnCo7i3FRHhbaWjeCLsveGjQ==", "dev": true }, "enhanced-resolve": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", - "integrity": "sha512-F27oB3WuHDzvR2DOGNTaYy0D5o0cnrv8TeI482VM4kYgQd/FT9lUQwuNsJ0oOHtBUq7eiW5ytqzp7nBFknL+GA==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", + "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", "dev": true, "requires": { "graceful-fs": "^4.2.4", @@ -488,9 +486,9 @@ "dev": true }, "es-module-lexer": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.7.1.tgz", - "integrity": "sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", "dev": true }, "escalade": { @@ -519,9 +517,9 @@ }, "dependencies": { "estraverse": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", - "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true } } @@ -562,9 +560,9 @@ "dev": true }, "fast-glob": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz", - "integrity": "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==", + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", "dev": true, "requires": { "@nodelib/fs.stat": "^2.0.2", @@ -598,9 +596,9 @@ "dev": true }, "fastq": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.12.0.tgz", - "integrity": "sha512-VNX0QkHK3RsXVKr9KrlUv/FoTa0NdbYoHHl7uXHv2rzyHSlxjdNAKug2twd9luJxpcyNeAgf5iPPMutJO67Dfg==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "dev": true, "requires": { "reusify": "^1.0.4" @@ -644,9 +642,9 @@ "dev": true }, "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -658,12 +656,12 @@ } }, "glob-parent": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.1.tgz", - "integrity": "sha512-kEVjS71mQazDBHKcsq4E9u/vUzaLcw1A8EtUeydawvIWQCJM0qQ08G1H7/XTjFUulla6XQiDOG6MXSaG0HDKog==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, "requires": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" } }, "glob-to-regexp": { @@ -673,23 +671,23 @@ "dev": true }, "globby": { - "version": "11.0.4", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.4.tgz", - "integrity": "sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==", + "version": "12.2.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz", + "integrity": "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==", "dev": true, "requires": { - "array-union": "^2.1.0", + "array-union": "^3.0.1", "dir-glob": "^3.0.1", - "fast-glob": "^3.1.1", - "ignore": "^5.1.4", - "merge2": "^1.3.0", - "slash": "^3.0.0" + "fast-glob": "^3.2.7", + "ignore": "^5.1.9", + "merge2": "^1.4.1", + "slash": "^4.0.0" } }, "graceful-fs": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", - "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", "dev": true }, "has": { @@ -714,15 +712,15 @@ "dev": true }, "ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "dev": true }, "import-local": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.2.tgz", - "integrity": "sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "requires": { "pkg-dir": "^4.2.0", @@ -752,9 +750,9 @@ "dev": true }, "is-core-module": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", - "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", "dev": true, "requires": { "has": "^1.0.3" @@ -767,9 +765,9 @@ "dev": true }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "requires": { "is-extglob": "^2.1.1" @@ -809,14 +807,25 @@ "dev": true }, "jest-worker": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.1.1.tgz", - "integrity": "sha512-XJKCL7tu+362IUYTWvw8+3S75U7qMiYiRU6u5yqscB48bTvzwN6i8L/7wVTXiFLwkRsxARNM7TISnTvcgv9hxA==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.6.tgz", + "integrity": "sha512-gHWJF/6Xi5CTG5QCvROr6GcmpIqNYpDJyc8A1h/DyXqH1tD6SnRCM0d3U5msV31D2LB/U+E0M+W4oyvKV44oNw==", "dev": true, "requires": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" + }, + "dependencies": { + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "json-parse-better-errors": { @@ -826,9 +835,9 @@ "dev": true }, "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, "kind-of": { @@ -884,18 +893,18 @@ } }, "mime-db": { - "version": "1.49.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "dev": true }, "mime-types": { - "version": "2.1.32", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", - "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dev": true, "requires": { - "mime-db": "1.49.0" + "mime-db": "1.51.0" } }, "mimic-fn": { @@ -926,9 +935,9 @@ "dev": true }, "node-releases": { - "version": "1.1.75", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", - "integrity": "sha512-Qe5OUajvqrqDSy6wrWFmMwfJ0jVgwiw4T3KqmbTcZ62qW0gQkheXYhcFM1+lOVcGUoRxcEcfyvFMAnDgaF1VWw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", "dev": true }, "normalize-path": { @@ -965,12 +974,12 @@ } }, "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, "requires": { - "yocto-queue": "^0.1.0" + "p-try": "^2.0.0" } }, "p-locate": { @@ -980,17 +989,6 @@ "dev": true, "requires": { "p-limit": "^2.2.0" - }, - "dependencies": { - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - } } }, "p-try": { @@ -1029,10 +1027,16 @@ "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", "dev": true }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, "pkg-dir": { @@ -1074,14 +1078,21 @@ "resolve": "^1.1.6" } }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -1121,14 +1132,15 @@ "dev": true }, "schema-utils": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", - "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dev": true, "requires": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" + "@types/json-schema": "^7.0.9", + "ajv": "^8.8.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.0.0" } }, "semver": { @@ -1174,9 +1186,9 @@ "dev": true }, "shelljs": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", - "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dev": true, "requires": { "glob": "^7.0.0", @@ -1185,25 +1197,25 @@ } }, "shx": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.3.tgz", - "integrity": "sha512-nZJ3HFWVoTSyyB+evEKjJ1STiixGztlqwKLTUNV5KqMWtGey9fTd4KU1gdZ1X9BV6215pswQ/Jew9NsuS/fNDA==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", "dev": true, "requires": { "minimist": "^1.2.3", - "shelljs": "^0.8.4" + "shelljs": "^0.8.5" } }, "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", "dev": true }, "source-map": { @@ -1213,9 +1225,9 @@ "dev": true }, "source-map-support": { - "version": "0.5.20", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.20.tgz", - "integrity": "sha512-n1lZZ8Ve4ksRqizaBQgxXDgKwttHDhyfQjA6YZZn8+AroHbsIz+JjwxQDxbp+7y5OYCI8t1Yk7etjD9CRd2hIw==", + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dev": true, "requires": { "buffer-from": "^1.0.0", @@ -1229,29 +1241,35 @@ "dev": true }, "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "tapable": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.0.tgz", - "integrity": "sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", "dev": true }, "terser": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.2.tgz", - "integrity": "sha512-0Omye+RD4X7X69O0eql3lC4Heh/5iLj3ggxR/B5ketZLOtLiOqukUgjw3q4PDnNQbsrkKr3UMypqStQG3XKRvw==", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", + "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", "dev": true, "requires": { "commander": "^2.20.0", "source-map": "~0.7.2", - "source-map-support": "~0.5.19" + "source-map-support": "~0.5.20" }, "dependencies": { "source-map": { @@ -1263,17 +1281,53 @@ } }, "terser-webpack-plugin": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.2.4.tgz", - "integrity": "sha512-E2CkNMN+1cho04YpdANyRrn8CyN4yMy+WdFKZIySFZrGXZxJwJP6PMNGGc/Mcr6qygQHUUqRxnAPmi0M9f00XA==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz", + "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==", "dev": true, "requires": { - "jest-worker": "^27.0.6", - "p-limit": "^3.1.0", + "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1", "terser": "^5.7.2" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + } } }, "to-regex-range": { @@ -1286,9 +1340,9 @@ } }, "ts-loader": { - "version": "9.2.5", - "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.2.5.tgz", - "integrity": "sha512-al/ATFEffybdRMUIr5zMEWQdVnCGMUA9d3fXJ8dBVvBlzytPvIszoG9kZoR+94k6/i293RnVOXwMaWbXhNy9pQ==", + "version": "9.2.6", + "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.2.6.tgz", + "integrity": "sha512-QMTC4UFzHmu9wU2VHZEmWWE9cUajjfcdcws+Gh7FhiO+Dy0RnR1bNz0YCHqhI0yRowCE9arVnNxYHqELOy9Hjw==", "dev": true, "requires": { "chalk": "^4.1.0", @@ -1298,9 +1352,9 @@ } }, "typescript": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz", - "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.4.tgz", + "integrity": "sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==", "dev": true }, "uri-js": { @@ -1312,16 +1366,10 @@ "punycode": "^2.1.0" } }, - "v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", - "dev": true - }, "watchpack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.2.0.tgz", - "integrity": "sha512-up4YAn/XHgZHIxFBVCdlMiWDj6WaLKpwVeGQk2I5thdYxF/KmF0aaz6TfJZ/hfl1h/XlcDr7k1KH7ThDagpFaA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", "dev": true, "requires": { "glob-to-regexp": "^0.4.1", @@ -1329,9 +1377,9 @@ } }, "webpack": { - "version": "5.52.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.52.0.tgz", - "integrity": "sha512-yRZOat8jWGwBwHpco3uKQhVU7HYaNunZiJ4AkAVQkPCUGoZk/tiIXiwG+8HIy/F+qsiZvSOa+GLQOj3q5RKRYg==", + "version": "5.68.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.68.0.tgz", + "integrity": "sha512-zUcqaUO0772UuuW2bzaES2Zjlm/y3kRBQDVFVCge+s2Y8mwuUTdperGaAv65/NtRL/1zanpSJOq/MD8u61vo6g==", "dev": true, "requires": { "@types/eslint-scope": "^3.7.0", @@ -1343,12 +1391,12 @@ "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.8.0", - "es-module-lexer": "^0.7.1", + "enhanced-resolve": "^5.8.3", + "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.4", + "graceful-fs": "^4.2.9", "json-parse-better-errors": "^1.0.2", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", @@ -1356,28 +1404,64 @@ "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", - "watchpack": "^2.2.0", - "webpack-sources": "^3.2.0" + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + } } }, "webpack-cli": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.8.0.tgz", - "integrity": "sha512-+iBSWsX16uVna5aAYN6/wjhJy1q/GKk4KjKvfg90/6hykCTSgozbfz5iRgDTSJt/LgSbYxdBX3KBHeobIs+ZEw==", + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", "dev": true, "requires": { "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^1.0.4", - "@webpack-cli/info": "^1.3.0", - "@webpack-cli/serve": "^1.5.2", - "colorette": "^1.2.1", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", "commander": "^7.0.0", "execa": "^5.0.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", "interpret": "^2.2.0", "rechoir": "^0.7.0", - "v8-compile-cache": "^2.2.0", "webpack-merge": "^5.7.3" }, "dependencies": { @@ -1415,9 +1499,9 @@ } }, "webpack-sources": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.0.tgz", - "integrity": "sha512-fahN08Et7P9trej8xz/Z7eRu8ltyiygEo/hnRi9KqBUs80KeDcnf96ZJo++ewWd84fEf3xSX9bp4ZS9hbw0OBw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", "dev": true }, "which": { @@ -1446,12 +1530,6 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", "dev": true - }, - "yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true } } } diff --git a/packages/pyright/package.json b/packages/pyright/package.json index d6655d004d12..009d53b1e385 100644 --- a/packages/pyright/package.json +++ b/packages/pyright/package.json @@ -2,7 +2,7 @@ "name": "pyright", "displayName": "Pyright", "description": "Type checker for the Python language", - "version": "1.1.170", + "version": "1.1.225", "license": "MIT", "author": { "name": "Microsoft Corporation" @@ -23,14 +23,14 @@ "webpack": "webpack --mode development --progress" }, "devDependencies": { - "@types/copy-webpack-plugin": "^8.0.1", - "@types/node": "^12.20.24", - "copy-webpack-plugin": "^9.0.1", - "shx": "^0.3.3", - "ts-loader": "^9.2.5", - "typescript": "~4.4.2", - "webpack": "^5.52.0", - "webpack-cli": "^4.8.0" + "@types/copy-webpack-plugin": "^10.1.0", + "@types/node": "^17.0.14", + "copy-webpack-plugin": "^10.2.4", + "shx": "^0.3.4", + "ts-loader": "^9.2.6", + "typescript": "~4.4.4", + "webpack": "^5.68.0", + "webpack-cli": "^4.9.2" }, "files": [ "/dist", diff --git a/packages/vscode-pyright/LICENSE.txt b/packages/vscode-pyright/LICENSE.txt new file mode 100644 index 000000000000..cb01ff54620c --- /dev/null +++ b/packages/vscode-pyright/LICENSE.txt @@ -0,0 +1,22 @@ +MIT License + +Pyright - A static type checker for the Python language +Copyright (c) Microsoft Corporation. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE diff --git a/packages/vscode-pyright/README.md b/packages/vscode-pyright/README.md index 83662c115cd2..ecbcb8002232 100644 --- a/packages/vscode-pyright/README.md +++ b/packages/vscode-pyright/README.md @@ -26,10 +26,12 @@ Pyright supports [configuration files](/docs/configuration.md) that provide gran * [PEP 612](https://www.python.org/dev/peps/pep-0612/) parameter specification variables * [PEP 613](https://www.python.org/dev/peps/pep-0613/) explicit type aliases * [PEP 635](https://www.python.org/dev/peps/pep-0635/) structural pattern matching -* [PEP 637](https://www.python.org/dev/peps/pep-0637/) indexing with keyword arguments * [PEP 646](https://www.python.org/dev/peps/pep-0646/) variadic generics * [PEP 647](https://www.python.org/dev/peps/pep-0647/) user-defined type guards * [PEP 655](https://www.python.org/dev/peps/pep-0655/) required typed dictionary items +* [PEP 673](https://www.python.org/dev/peps/pep-0673/) Self type +* [PEP 675](https://www.python.org/dev/peps/pep-0675/) arbitrary literal strings +* [PEP 681](https://www.python.org/dev/peps/pep-0681/) dataclass transform * Type inference for function return values, instance variables, class variables, and globals * Type guards that understand conditional code flow constructs like if/else statements diff --git a/packages/vscode-pyright/package-lock.json b/packages/vscode-pyright/package-lock.json index fa0f09c00307..33f77124eb64 100644 --- a/packages/vscode-pyright/package-lock.json +++ b/packages/vscode-pyright/package-lock.json @@ -1,13 +1,13 @@ { "name": "vscode-pyright", - "version": "1.1.170", + "version": "1.1.225", "lockfileVersion": 1, "requires": true, "dependencies": { "@discoveryjs/json-ext": { - "version": "0.5.3", - "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.3.tgz", - "integrity": "sha512-Fxt+AfXgjMoin2maPIYzFZnQjAXjAL0PHscM5pRTtatFqB+vZxAM9tLp2Optnuw3QOQC40jTNeGYFOMvyf7v9g==", + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.6.tgz", + "integrity": "sha512-ws57AidsDvREKrZKYffXddNkyaF14iHNHm8VQnZH6t99E8gczjNN0GpvcGny0imC80yQ0tHz1xVUKk/KFQSUyA==", "dev": true }, "@nodelib/fs.scandir": { @@ -37,20 +37,18 @@ } }, "@types/copy-webpack-plugin": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@types/copy-webpack-plugin/-/copy-webpack-plugin-8.0.1.tgz", - "integrity": "sha512-TwEeGse0/wq+t3SFW0DEwroMS/cDkwVZT+vj7tMAYTp7llt/yz6NuW2n04X2M5P/kSfBQOORhrHAN2mqZdmybg==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/@types/copy-webpack-plugin/-/copy-webpack-plugin-10.1.0.tgz", + "integrity": "sha512-Dk0NUW3X6hVQdkH2n9R7NejjPNCocZBiv8XF8Ac5su2d6EKzCcG/yWDwnWGrEsAWvogoADJyUKULwncx0G9Jkg==", "dev": true, "requires": { - "@types/node": "*", - "tapable": "^2.0.0", - "webpack": "^5.1.0" + "copy-webpack-plugin": "*" } }, "@types/eslint": { - "version": "7.28.0", - "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-7.28.0.tgz", - "integrity": "sha512-07XlgzX0YJUn4iG1ocY4IX9DzKSmMGUs6ESKlxWhZRaa0fatIWaHWUVapcuGa8r5HFnTqzj+4OCjd5f7EZ/i/A==", + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.1.tgz", + "integrity": "sha512-GE44+DNEyxxh2Kc6ro/VkIj+9ma0pO0bwv9+uHSyBrikYOHr8zYcdPvnBOp1aw8s+CjRvuSx7CyWqRrNFQ59mA==", "dev": true, "requires": { "@types/estree": "*", @@ -58,9 +56,9 @@ } }, "@types/eslint-scope": { - "version": "3.7.1", - "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.1.tgz", - "integrity": "sha512-SCFeogqiptms4Fg29WpOTk5nHIzfpKCemSN63ksBQYKTcXoJEmJagV+DhVmbapZzY4/5YaOV1nZwrsU79fFm1g==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", "dev": true, "requires": { "@types/eslint": "*", @@ -80,15 +78,15 @@ "dev": true }, "@types/node": { - "version": "12.20.24", - "resolved": "https://registry.npmjs.org/@types/node/-/node-12.20.24.tgz", - "integrity": "sha512-yxDeaQIAJlMav7fH5AQqPH1u8YIuhYJXYBzxaQ4PifsU0GDO38MSdmEDeRlIxrKbC6NbEaaEHDanWb+y30U8SQ==", + "version": "17.0.14", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.14.tgz", + "integrity": "sha512-SbjLmERksKOGzWzPNuW7fJM7fk3YXVTFiZWB/Hs99gwhk+/dnrQRPBQjPW9aO+fi1tAffi9PrwFvsmOKmDTyng==", "dev": true }, "@types/vscode": { - "version": "1.57.1", - "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.57.1.tgz", - "integrity": "sha512-I+NlKdnDnUZZ5HYu3F99ye3ERORnoqdyPer6nXVC7ToU/4WEjrCQOlLosmLyVoi75+UbKCJMFqTgeZuID+8yoA==", + "version": "1.63.2", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.63.2.tgz", + "integrity": "sha512-awvdx4vX7SkMKyvWIlRjycjb4blYRSQI3Bav0YMn+lJLGN6gJgb20urN/dQCv/2ejDu5S6ADEBt6O15DOpIAkg==", "dev": true }, "@webassemblyjs/ast": { @@ -238,24 +236,24 @@ } }, "@webpack-cli/configtest": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.0.4.tgz", - "integrity": "sha512-cs3XLy+UcxiP6bj0A6u7MLLuwdXJ1c3Dtc0RkKg+wiI1g/Ti1om8+/2hc2A2B60NbBNAbMgyBMHvyymWm/j4wQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", "dev": true }, "@webpack-cli/info": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.3.0.tgz", - "integrity": "sha512-ASiVB3t9LOKHs5DyVUcxpraBXDOKubYu/ihHhU+t1UPpxsivg6Od2E2qU4gJCekfEddzRBzHhzA/Acyw/mlK/w==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", "dev": true, "requires": { "envinfo": "^7.7.3" } }, "@webpack-cli/serve": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.5.2.tgz", - "integrity": "sha512-vgJ5OLWadI8aKjDlOH3rb+dYyPd2GTZuQC/Tihjct6F9GpXGZINo3Y/IVuZVTM1eDQB+/AOsjPUWH/WySDaXvw==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", "dev": true }, "@xtuc/ieee754": { @@ -271,33 +269,51 @@ "dev": true }, "acorn": { - "version": "8.5.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz", - "integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==", + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.0.tgz", + "integrity": "sha512-V/LGr1APy+PXIwKebEWrkZPwoeoF+w1jiOBUmuxuiUIaOHtob8Qc9BTrYo7VuI5fR8tqsy+buA2WFooR5olqvQ==", "dev": true }, "acorn-import-assertions": { - "version": "1.7.6", - "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.7.6.tgz", - "integrity": "sha512-FlVvVFA1TX6l3lp8VjDnYYq7R1nyW6x3svAt4nDgrWQ9SBaSh9CnbwgSUTasgfNfOG5HlM1ehugCvM+hjo56LA==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", "dev": true }, "ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.10.0.tgz", + "integrity": "sha512-bzqAEZOjkrUMl2afH8dknrq5KEk2SrwdBROR+vH1EKVQTqaUbJVPdc/gEdggTMM0Se+s+Ja4ju4TlNcStKl2Hw==", "dev": true, "requires": { "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", "uri-js": "^4.2.2" } }, + "ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dev": true, + "requires": { + "ajv": "^8.0.0" + } + }, "ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.3" + } + }, + "ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", "dev": true }, "ansi-styles": { @@ -309,25 +325,38 @@ "color-convert": "^2.0.1" } }, - "argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "aproba": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-1.2.0.tgz", + "integrity": "sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==", + "dev": true + }, + "are-we-there-yet": { + "version": "1.1.7", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-1.1.7.tgz", + "integrity": "sha512-nxwy40TuMiUGqMyRHgCSWZ9FM4VAoRP4xUYSTv5ImRog+h9yISPbVH7H8fASCIzYn9wlEv4zvFL7uKDMCFQm3g==", "dev": true, "requires": { - "sprintf-js": "~1.0.2" + "delegates": "^1.0.0", + "readable-stream": "^2.0.6" } }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, "array-union": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", - "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-3.0.1.tgz", + "integrity": "sha512-1OvF9IbWwaeiM9VhzYXVQacMibxpXOMYVNIvMtKRyX9SImBXpKcFr8XvFDeEslCyuH/t6KRt7HEO94AlP8Iatw==", "dev": true }, "azure-devops-node-api": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-11.0.1.tgz", - "integrity": "sha512-YMdjAw9l5p/6leiyIloxj3k7VIvYThKjvqgiQn88r3nhT93ENwsoDS3A83CyJ4uTWzCZ5f5jCi6c27rTU5Pz+A==", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/azure-devops-node-api/-/azure-devops-node-api-11.1.0.tgz", + "integrity": "sha512-6/2YZuf+lJzJLrjXNYEA5RXAkMCb8j/4VcHD0qJQRsgG/KsRMYo0HgDh0by1FGHyZkQWY5LmQyJqCwRVUB3Y7Q==", "dev": true, "requires": { "tunnel": "0.0.6", @@ -339,6 +368,36 @@ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "dev": true + }, + "bl": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, + "requires": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, "boolbase": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", @@ -364,16 +423,26 @@ } }, "browserslist": { - "version": "4.17.0", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.17.0.tgz", - "integrity": "sha512-g2BJ2a0nEYvEFQC208q8mVAhfNwpZ5Mu8BwgtCdZKO3qx98HChmeg448fPdUzld8aFmfLgVh7yymqV+q1lJZ5g==", + "version": "4.19.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.19.1.tgz", + "integrity": "sha512-u2tbbG5PdKRTUoctO3NBD8FQ5HdPh1ZXPHzp1rwaa5jTc+RV9/+RlWiAIKmjRPQF+xbGM9Kklj5bZQFa2s/38A==", "dev": true, "requires": { - "caniuse-lite": "^1.0.30001254", - "colorette": "^1.3.0", - "electron-to-chromium": "^1.3.830", + "caniuse-lite": "^1.0.30001286", + "electron-to-chromium": "^1.4.17", "escalade": "^3.1.1", - "node-releases": "^1.1.75" + "node-releases": "^2.0.1", + "picocolors": "^1.0.0" + } + }, + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" } }, "buffer-crc32": { @@ -399,9 +468,9 @@ } }, "caniuse-lite": { - "version": "1.0.30001255", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001255.tgz", - "integrity": "sha512-F+A3N9jTZL882f/fg/WWVnKSu6IOo3ueLz4zwaOPbPYHNmM/ZaDUyzyJwS1mZhX7Ex5jqTyW599Gdelh5PDYLQ==", + "version": "1.0.30001307", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001307.tgz", + "integrity": "sha512-+MXEMczJ4FuxJAUp0jvAl6Df0NI/OfW1RWEE61eSmzS7hw6lz4IKutbhbXendwq8BljfFuHtu26VWsg4afQ7Ng==", "dev": true }, "chalk": { @@ -412,17 +481,6 @@ "requires": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" - }, - "dependencies": { - "supports-color": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", - "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", - "dev": true, - "requires": { - "has-flag": "^4.0.0" - } - } } }, "cheerio": { @@ -453,6 +511,12 @@ "domutils": "^2.7.0" } }, + "chownr": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz", + "integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==", + "dev": true + }, "chrome-trace-event": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", @@ -470,6 +534,12 @@ "shallow-clone": "^3.0.0" } }, + "code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -486,15 +556,15 @@ "dev": true }, "colorette": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/colorette/-/colorette-1.4.0.tgz", - "integrity": "sha512-Y2oEozpomLn7Q3HFP7dpww7AtMJplbM9lGZP6RDfHqmbeRjiwRg4n6VM6j4KLmRke85uWEI7JqF17f3pqdRA0g==", + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", "dev": true }, "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", + "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", "dev": true }, "concat-map": { @@ -502,21 +572,32 @@ "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" }, + "console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=", + "dev": true + }, "copy-webpack-plugin": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-9.0.1.tgz", - "integrity": "sha512-14gHKKdYIxF84jCEgPgYXCPpldbwpxxLbCmA7LReY7gvbaT555DgeBWBgBZM116tv/fO6RRJrsivBqRyRlukhw==", + "version": "10.2.4", + "resolved": "https://registry.npmjs.org/copy-webpack-plugin/-/copy-webpack-plugin-10.2.4.tgz", + "integrity": "sha512-xFVltahqlsRcyyJqQbDY6EYTtyQZF9rf+JPjwHObLdPFMEISqkFkr7mFoVOC6BfYS/dNThyoQKvziugm+OnwBg==", "dev": true, "requires": { - "fast-glob": "^3.2.5", - "glob-parent": "^6.0.0", - "globby": "^11.0.3", + "fast-glob": "^3.2.7", + "glob-parent": "^6.0.1", + "globby": "^12.0.2", "normalize-path": "^3.0.0", - "p-limit": "^3.1.0", - "schema-utils": "^3.0.0", + "schema-utils": "^4.0.0", "serialize-javascript": "^6.0.0" } }, + "core-util-is": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "dev": true + }, "cross-spawn": { "version": "7.0.3", "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", @@ -529,28 +610,43 @@ } }, "css-select": { - "version": "4.1.3", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.1.3.tgz", - "integrity": "sha512-gT3wBNd9Nj49rAbmtFHj1cljIAOLYSX1nZ8CB7TBO3INYckygm5B7LISU/szY//YmdiSLbJvDLOx9VnMVpMBxA==", + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-4.2.1.tgz", + "integrity": "sha512-/aUslKhzkTNCQUB2qTX84lVmfia9NyjP3WpDGtj/WxhwBzWBYUV3DgUpurHTme8UTPcPlAD1DJ+b0nN/t50zDQ==", "dev": true, "requires": { "boolbase": "^1.0.0", - "css-what": "^5.0.0", - "domhandler": "^4.2.0", - "domutils": "^2.6.0", - "nth-check": "^2.0.0" + "css-what": "^5.1.0", + "domhandler": "^4.3.0", + "domutils": "^2.8.0", + "nth-check": "^2.0.1" } }, "css-what": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-5.0.1.tgz", - "integrity": "sha512-FYDTSHb/7KXsWICVsxdmiExPjCfRC4qRFBdVwv7Ax9hMnvMmEjP9RfxTEZ3qPZGmADDn2vAKSo9UcN1jKVYscg==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-5.1.0.tgz", + "integrity": "sha512-arSMRWIIFY0hV8pIxZMEfmMI47Wj3R/aWpZDDxWYCPEiOMv6tfOrnpDtgxBYPEQD4V0Y/958+1TdC3iWTFcUPw==", "dev": true }, - "denodeify": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/denodeify/-/denodeify-1.2.1.tgz", - "integrity": "sha1-OjYof1A05pnnV3kBBSwubJQlFjE=", + "decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "dev": true, + "requires": { + "mimic-response": "^3.1.0" + } + }, + "deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true + }, + "delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=", "dev": true }, "detect-indent": { @@ -559,6 +655,12 @@ "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==", "dev": true }, + "detect-libc": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.0.tgz", + "integrity": "sha512-S55LzUl8HUav8l9E2PBTlC5PAJrHK7tkM+XXFGD+fbsbkTzhCpG6K05LxJcUOEWzMa4v6ptcMZ9s3fOdJDu0Zw==", + "dev": true + }, "dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -586,9 +688,9 @@ "dev": true }, "domhandler": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.2.2.tgz", - "integrity": "sha512-PzE9aBMsdZO8TK4BnuJwH0QT41wgMbRzuZrHUcpYncEjmQazq8QEaBWgLG7ZyC/DAZKEgglpIA6j4Qn/HmxS3w==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-4.3.0.tgz", + "integrity": "sha512-fC0aXNQXqKSFTr2wDNZDhsEYjCiYsDWl3D01kwt25hm1YIPyDGHvvi3rw+PLqHAl/m71MaiF7d5zvBr0p5UB2g==", "dev": true, "requires": { "domelementtype": "^2.2.0" @@ -606,15 +708,24 @@ } }, "electron-to-chromium": { - "version": "1.3.833", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.3.833.tgz", - "integrity": "sha512-h+9aVaUHjyunLqtCjJF2jrJ73tYcJqo2cCGKtVAXH9WmnBsb8hiChRQ0P1uXjdxR6Wcfxibephy41c1YlZA/pA==", + "version": "1.4.64", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.64.tgz", + "integrity": "sha512-8mec/99xgLUZCIZZq3wt61Tpxg55jnOSpxGYapE/1Ma9MpFEYYaz4QNYm0CM1rrnCo7i3FRHhbaWjeCLsveGjQ==", "dev": true }, + "end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "requires": { + "once": "^1.4.0" + } + }, "enhanced-resolve": { - "version": "5.8.2", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.2.tgz", - "integrity": "sha512-F27oB3WuHDzvR2DOGNTaYy0D5o0cnrv8TeI482VM4kYgQd/FT9lUQwuNsJ0oOHtBUq7eiW5ytqzp7nBFknL+GA==", + "version": "5.8.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.8.3.tgz", + "integrity": "sha512-EGAbGvH7j7Xt2nc0E7D99La1OiEs8LnyimkRgwExpUMScN6O+3x9tIWs7PLQZVNx4YD+00skHXPXi1yQHpAmZA==", "dev": true, "requires": { "graceful-fs": "^4.2.4", @@ -634,9 +745,9 @@ "dev": true }, "es-module-lexer": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.7.1.tgz", - "integrity": "sha512-MgtWFl5No+4S3TmhDmCz2ObFGm6lEpTnzbQi+Dd+pw4mlTIZTmM2iAs5gRlmx5zS9luzobCSBSI90JM/1/JgOw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", "dev": true }, "escalade": { @@ -671,9 +782,9 @@ }, "dependencies": { "estraverse": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.2.0.tgz", - "integrity": "sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ==", + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true } } @@ -707,6 +818,12 @@ "strip-final-newline": "^2.0.0" } }, + "expand-template": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/expand-template/-/expand-template-2.0.3.tgz", + "integrity": "sha512-XYfuKMvj4O35f/pOXLObndIRvyQ+/+6AhODh+OKWj9S9498pHHn/IMszH+gt0fBCRWMNfk1ZSp5x3AifmnI2vg==", + "dev": true + }, "fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -714,9 +831,9 @@ "dev": true }, "fast-glob": { - "version": "3.2.7", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.7.tgz", - "integrity": "sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q==", + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", "dev": true, "requires": { "@nodelib/fs.stat": "^2.0.2", @@ -750,9 +867,9 @@ "dev": true }, "fastq": { - "version": "1.12.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.12.0.tgz", - "integrity": "sha512-VNX0QkHK3RsXVKr9KrlUv/FoTa0NdbYoHHl7uXHv2rzyHSlxjdNAKug2twd9luJxpcyNeAgf5iPPMutJO67Dfg==", + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.13.0.tgz", + "integrity": "sha512-YpkpUnK8od0o1hmeSc7UUs/eB/vIPWJYjKck2QKIzAf71Vm1AAQ3EbuZB3g2JIy+pg+ERD0vqI79KyZiB2e2Nw==", "dev": true, "requires": { "reusify": "^1.0.4" @@ -786,6 +903,12 @@ "path-exists": "^4.0.0" } }, + "fs-constants": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", + "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==", + "dev": true + }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -798,6 +921,22 @@ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", "dev": true }, + "gauge": { + "version": "2.7.4", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz", + "integrity": "sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=", + "dev": true, + "requires": { + "aproba": "^1.0.3", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.0", + "object-assign": "^4.1.0", + "signal-exit": "^3.0.0", + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wide-align": "^1.1.0" + } + }, "get-intrinsic": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", @@ -815,10 +954,16 @@ "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true }, + "github-from-package": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/github-from-package/-/github-from-package-0.0.0.tgz", + "integrity": "sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=", + "dev": true + }, "glob": { - "version": "7.1.7", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", - "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-lmLf6gtyrPq8tTjSmrO94wBeQbFR3HbLHbuyD69wuyQkImp2hWqMGB47OX65FBkPffO641IP9jWa1z4ivqG26Q==", "dev": true, "requires": { "fs.realpath": "^1.0.0", @@ -830,12 +975,12 @@ } }, "glob-parent": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.1.tgz", - "integrity": "sha512-kEVjS71mQazDBHKcsq4E9u/vUzaLcw1A8EtUeydawvIWQCJM0qQ08G1H7/XTjFUulla6XQiDOG6MXSaG0HDKog==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, "requires": { - "is-glob": "^4.0.1" + "is-glob": "^4.0.3" } }, "glob-to-regexp": { @@ -845,23 +990,23 @@ "dev": true }, "globby": { - "version": "11.0.4", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.4.tgz", - "integrity": "sha512-9O4MVG9ioZJ08ffbcyVYyLOJLk5JQ688pJ4eMGLpdWLHq/Wr1D9BlriLQyL0E+jbkuePVZXYFj47QM/v093wHg==", + "version": "12.2.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-12.2.0.tgz", + "integrity": "sha512-wiSuFQLZ+urS9x2gGPl1H5drc5twabmm4m2gTR27XDFyjUHJUNsS8o/2aKyIF6IoBaR630atdher0XJ5g6OMmA==", "dev": true, "requires": { - "array-union": "^2.1.0", + "array-union": "^3.0.1", "dir-glob": "^3.0.1", - "fast-glob": "^3.1.1", - "ignore": "^5.1.4", - "merge2": "^1.3.0", - "slash": "^3.0.0" + "fast-glob": "^3.2.7", + "ignore": "^5.1.9", + "merge2": "^1.4.1", + "slash": "^4.0.0" } }, "graceful-fs": { - "version": "4.2.8", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.8.tgz", - "integrity": "sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==", + "version": "4.2.9", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz", + "integrity": "sha512-NtNxqUcXgpW2iMrfqSfR73Glt39K+BLwWsPs94yR63v45T0Wbej7eRmL5cWfwEgqXnmjQp3zaJTshdRW/qC2ZQ==", "dev": true }, "has": { @@ -885,6 +1030,21 @@ "integrity": "sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw==", "dev": true }, + "has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=", + "dev": true + }, + "hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "requires": { + "lru-cache": "^6.0.0" + } + }, "htmlparser2": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-6.1.0.tgz", @@ -903,16 +1063,22 @@ "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true }, + "ieee754": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", + "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", + "dev": true + }, "ignore": { - "version": "5.1.8", - "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", - "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", "dev": true }, "import-local": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.0.2.tgz", - "integrity": "sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", "dev": true, "requires": { "pkg-dir": "^4.2.0", @@ -935,6 +1101,12 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, + "ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", + "dev": true + }, "interpret": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", @@ -942,9 +1114,9 @@ "dev": true }, "is-core-module": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.6.0.tgz", - "integrity": "sha512-wShG8vs60jKfPWpF2KZRaAtvt3a20OAn7+IJ6hLPECpSABLcKtFKTTI4ZtH5QcBruBHlq+WsdHWyz0BCZW7svQ==", + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", "dev": true, "requires": { "has": "^1.0.3" @@ -956,10 +1128,19 @@ "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", "dev": true }, + "is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "requires": { + "number-is-nan": "^1.0.0" + } + }, "is-glob": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", - "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, "requires": { "is-extglob": "^2.1.1" @@ -986,6 +1167,12 @@ "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true }, + "isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=", + "dev": true + }, "isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -999,14 +1186,25 @@ "dev": true }, "jest-worker": { - "version": "27.1.1", - "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.1.1.tgz", - "integrity": "sha512-XJKCL7tu+362IUYTWvw8+3S75U7qMiYiRU6u5yqscB48bTvzwN6i8L/7wVTXiFLwkRsxARNM7TISnTvcgv9hxA==", + "version": "27.4.6", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.4.6.tgz", + "integrity": "sha512-gHWJF/6Xi5CTG5QCvROr6GcmpIqNYpDJyc8A1h/DyXqH1tD6SnRCM0d3U5msV31D2LB/U+E0M+W4oyvKV44oNw==", "dev": true, "requires": { "@types/node": "*", "merge-stream": "^2.0.0", "supports-color": "^8.0.0" + }, + "dependencies": { + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } } }, "json-parse-better-errors": { @@ -1016,11 +1214,21 @@ "dev": true }, "json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "dev": true }, + "keytar": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/keytar/-/keytar-7.8.0.tgz", + "integrity": "sha512-mR+BqtAOIW8j+T5FtLVyckCbvROWQD+4FzPeFMuk5njEZkXLpVPCGF26Y3mTyxMAAL1XCfswR7S6kIf+THSRFA==", + "dev": true, + "requires": { + "node-addon-api": "^4.3.0", + "prebuild-install": "^7.0.1" + } + }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -1034,9 +1242,9 @@ "dev": true }, "linkify-it": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-2.2.0.tgz", - "integrity": "sha512-GnAl/knGn+i1U/wjBz3akz2stz+HrHLsxMwHQGofCDfPvlf+gDKN58UtfmUquTY4/MXeE2x7k19KQmeoZi94Iw==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-3.0.3.tgz", + "integrity": "sha512-ynTsyrFSdE5oZ/O9GEf00kPngmOfVwazR5GKDq6EYfhlpFug3J2zybX56a2PRRpc9P+FuSoGNAwjlbDs9jJBPQ==", "dev": true, "requires": { "uc.micro": "^1.0.1" @@ -1057,12 +1265,6 @@ "p-locate": "^4.1.0" } }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, "lru-cache": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", @@ -1072,22 +1274,22 @@ } }, "markdown-it": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-10.0.0.tgz", - "integrity": "sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==", + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-12.3.2.tgz", + "integrity": "sha512-TchMembfxfNVpHkbtriWltGWc+m3xszaRD0CZup7GFFhzIgQqxIfn3eGj1yZpfuflzPvfkt611B2Q/Bsk1YnGg==", "dev": true, "requires": { - "argparse": "^1.0.7", - "entities": "~2.0.0", - "linkify-it": "^2.0.0", + "argparse": "^2.0.1", + "entities": "~2.1.0", + "linkify-it": "^3.0.1", "mdurl": "^1.0.1", "uc.micro": "^1.0.5" }, "dependencies": { "entities": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/entities/-/entities-2.0.3.tgz", - "integrity": "sha512-MyoZ0jgnLvB2X3Lg5HqpFmn1kybDiIfEQmKzTb5apr51Rb+T3KdmMiqa70T+bhGnyv7bQ6WMj2QMHpGMmlrUYQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-2.1.0.tgz", + "integrity": "sha512-hCx1oky9PFrJ611mf0ifBLBRW8lUUVRlFolb5gWRfIELabBlbp9xZvrqZLZAs+NxFnbfQoeGd8wDkygjg7U85w==", "dev": true } } @@ -1127,18 +1329,18 @@ "dev": true }, "mime-db": { - "version": "1.49.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.49.0.tgz", - "integrity": "sha512-CIc8j9URtOVApSFCQIF+VBkX1RwXp/oMMOrqdyXSBXq5RWNEsRfyj1kiRnQgmNXmHxPoFIxOroKA3zcU9P+nAA==", + "version": "1.51.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.51.0.tgz", + "integrity": "sha512-5y8A56jg7XVQx2mbv1lu49NR4dokRnhZYTtL+KGfaa27uq4pSTXkwQkFJl4pkRMyNFz/EtYDSkiiEHx3F7UN6g==", "dev": true }, "mime-types": { - "version": "2.1.32", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.32.tgz", - "integrity": "sha512-hJGaVS4G4c9TSMYh2n6SQAGrC4RnfU+daP8G7cSCmaqNjiOoUY0VHCMS42pxnQmVF1GWwFhbHWn3RIxCqTmZ9A==", + "version": "2.1.34", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.34.tgz", + "integrity": "sha512-6cP692WwGIs9XXdOO4++N+7qjqv0rqxxVvJ3VHPh/Sc9mVZcQP+ZGhkKiTvWMQRr2tbHkJP/Yn7Y0npb3ZBs4A==", "dev": true, "requires": { - "mime-db": "1.49.0" + "mime-db": "1.51.0" } }, "mimic-fn": { @@ -1147,6 +1349,12 @@ "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true }, + "mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "dev": true + }, "minimatch": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", @@ -1161,22 +1369,49 @@ "integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw==", "dev": true }, + "mkdirp-classic": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz", + "integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==", + "dev": true + }, "mute-stream": { "version": "0.0.8", "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", "dev": true }, + "napi-build-utils": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/napi-build-utils/-/napi-build-utils-1.0.2.tgz", + "integrity": "sha512-ONmRUqK7zj7DWX0D9ADe03wbwOBZxNAfF20PlGfCWQcD3+/MakShIHrMqx9YwPTfxDdF1zLeL+RGZiR9kGMLdg==", + "dev": true + }, "neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", "dev": true }, + "node-abi": { + "version": "3.8.0", + "resolved": "https://registry.npmjs.org/node-abi/-/node-abi-3.8.0.tgz", + "integrity": "sha512-tzua9qWWi7iW4I42vUPKM+SfaF0vQSLAm4yO5J83mSwB7GeoWrDKC/K+8YCnYNwqP5duwazbw2X9l4m8SC2cUw==", + "dev": true, + "requires": { + "semver": "^7.3.5" + } + }, + "node-addon-api": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-4.3.0.tgz", + "integrity": "sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ==", + "dev": true + }, "node-releases": { - "version": "1.1.75", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-1.1.75.tgz", - "integrity": "sha512-Qe5OUajvqrqDSy6wrWFmMwfJ0jVgwiw4T3KqmbTcZ62qW0gQkheXYhcFM1+lOVcGUoRxcEcfyvFMAnDgaF1VWw==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.1.tgz", + "integrity": "sha512-CqyzN6z7Q6aMeF/ktcMVTzhAHCEpf8SOarwpzpf8pNBY2k5/oM34UHldUwp8VKI7uxct2HxSRdJjBaZeESzcxA==", "dev": true }, "normalize-path": { @@ -1194,19 +1429,43 @@ "path-key": "^3.0.0" } }, + "npmlog": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-4.1.2.tgz", + "integrity": "sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==", + "dev": true, + "requires": { + "are-we-there-yet": "~1.1.2", + "console-control-strings": "~1.1.0", + "gauge": "~2.7.3", + "set-blocking": "~2.0.0" + } + }, "nth-check": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.0.tgz", - "integrity": "sha512-i4sc/Kj8htBrAiH1viZ0TgU8Y5XqCaV/FziYK6TBczxmeKm3AEFWqqF3195yKudrarqy7Zu80Ra5dobFjn9X/Q==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.0.1.tgz", + "integrity": "sha512-it1vE95zF6dTT9lBsYbxvqh0Soy4SPowchj0UBGj/V6cTPnXXtQOPUbhZ6CmGzAD/rW22LQK6E96pcdJXk4A4w==", "dev": true, "requires": { "boolbase": "^1.0.0" } }, + "number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true + }, + "object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "dev": true + }, "object-inspect": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.11.0.tgz", - "integrity": "sha512-jp7ikS6Sd3GxQfZJPyH3cjcbJF6GZPClgdV+EFygjFLQ5FmW/dRUnTd9PQ9k0JhoNDabWFbpF1yCdSWCC6gexg==", + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz", + "integrity": "sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g==", "dev": true }, "once": { @@ -1227,35 +1486,13 @@ "mimic-fn": "^2.1.0" } }, - "os-homedir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-homedir/-/os-homedir-1.0.2.tgz", - "integrity": "sha1-/7xJiDNuDoM94MFox+8VISGqf7M=", - "dev": true - }, - "os-tmpdir": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", - "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", - "dev": true - }, - "osenv": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/osenv/-/osenv-0.1.5.tgz", - "integrity": "sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==", - "dev": true, - "requires": { - "os-homedir": "^1.0.0", - "os-tmpdir": "^1.0.0" - } - }, "p-limit": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", - "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", "dev": true, "requires": { - "yocto-queue": "^0.1.0" + "p-try": "^2.0.0" } }, "p-locate": { @@ -1265,17 +1502,6 @@ "dev": true, "requires": { "p-limit": "^2.2.0" - }, - "dependencies": { - "p-limit": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", - "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", - "dev": true, - "requires": { - "p-try": "^2.0.0" - } - } } }, "p-try": { @@ -1352,10 +1578,16 @@ "integrity": "sha1-elfrVQpng/kRUzH89GY9XI4AelA=", "dev": true }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true + }, "picomatch": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.0.tgz", - "integrity": "sha512-lY1Q/PiJGC2zOv/z391WOTD+Z02bCgsFfvxoXXf6h7kv9o+WmsmzYqrAwY63sNgOxE4xEdq0WyUnXfKeBrSvYw==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true }, "pkg-dir": { @@ -1367,6 +1599,43 @@ "find-up": "^4.0.0" } }, + "prebuild-install": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/prebuild-install/-/prebuild-install-7.0.1.tgz", + "integrity": "sha512-QBSab31WqkyxpnMWQxubYAHR5S9B2+r81ucocew34Fkl98FhvKIF50jIJnNOBmAZfyNV7vE5T6gd3hTVWgY6tg==", + "dev": true, + "requires": { + "detect-libc": "^2.0.0", + "expand-template": "^2.0.3", + "github-from-package": "0.0.0", + "minimist": "^1.2.3", + "mkdirp-classic": "^0.5.3", + "napi-build-utils": "^1.0.1", + "node-abi": "^3.3.0", + "npmlog": "^4.0.1", + "pump": "^3.0.0", + "rc": "^1.2.7", + "simple-get": "^4.0.0", + "tar-fs": "^2.0.0", + "tunnel-agent": "^0.6.0" + } + }, + "process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true + }, + "pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "requires": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, "punycode": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", @@ -1374,9 +1643,9 @@ "dev": true }, "qs": { - "version": "6.10.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.1.tgz", - "integrity": "sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg==", + "version": "6.10.3", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.3.tgz", + "integrity": "sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ==", "dev": true, "requires": { "side-channel": "^1.0.4" @@ -1397,6 +1666,18 @@ "safe-buffer": "^5.1.0" } }, + "rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dev": true, + "requires": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + } + }, "read": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", @@ -1406,6 +1687,29 @@ "mute-stream": "~0.0.4" } }, + "readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + } + } + }, "rechoir": { "version": "0.6.2", "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", @@ -1415,14 +1719,21 @@ "resolve": "^1.1.6" } }, + "require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "dev": true + }, "resolve": { - "version": "1.20.0", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.20.0.tgz", - "integrity": "sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A==", + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", "dev": true, "requires": { - "is-core-module": "^2.2.0", - "path-parse": "^1.0.6" + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" } }, "resolve-cwd": { @@ -1470,15 +1781,22 @@ "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "dev": true }, + "sax": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", + "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==", + "dev": true + }, "schema-utils": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", - "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-4.0.0.tgz", + "integrity": "sha512-1edyXKgh6XnJsJSQ8mKWXnN/BVaIbFMLpouRUrXgVq7WYne5kw3MW7UPhO44uRXQSIpTSXoJbmrR2X0w9kUTyg==", "dev": true, "requires": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" + "@types/json-schema": "^7.0.9", + "ajv": "^8.8.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.0.0" } }, "semver": { @@ -1498,6 +1816,12 @@ "randombytes": "^2.1.0" } }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "dev": true + }, "shallow-clone": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", @@ -1523,9 +1847,9 @@ "dev": true }, "shelljs": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.4.tgz", - "integrity": "sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==", + "version": "0.8.5", + "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", + "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", "dev": true, "requires": { "glob": "^7.0.0", @@ -1534,13 +1858,13 @@ } }, "shx": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.3.tgz", - "integrity": "sha512-nZJ3HFWVoTSyyB+evEKjJ1STiixGztlqwKLTUNV5KqMWtGey9fTd4KU1gdZ1X9BV6215pswQ/Jew9NsuS/fNDA==", + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", + "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", "dev": true, "requires": { "minimist": "^1.2.3", - "shelljs": "^0.8.4" + "shelljs": "^0.8.5" } }, "side-channel": { @@ -1555,15 +1879,32 @@ } }, "signal-exit": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", - "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", "dev": true }, + "simple-concat": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/simple-concat/-/simple-concat-1.0.1.tgz", + "integrity": "sha512-cSFtAPtRhljv69IK0hTVZQ+OfE9nePi/rtJmw5UjHeVyVroEqJXP1sFztKUy1qU+xvz3u/sfYJLa947b7nAN2Q==", + "dev": true + }, + "simple-get": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/simple-get/-/simple-get-4.0.1.tgz", + "integrity": "sha512-brv7p5WgH0jmQJr1ZDDfKDOSeWWg+OVypG99A/5vYGPqJ6pxiaHLy8nxtFjBA7oMa01ebA9gfh1uMCFqOuXxvA==", + "dev": true, + "requires": { + "decompress-response": "^6.0.0", + "once": "^1.3.1", + "simple-concat": "^1.0.0" + } + }, "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-4.0.0.tgz", + "integrity": "sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==", "dev": true }, "source-map": { @@ -1573,20 +1914,51 @@ "dev": true }, "source-map-support": { - "version": "0.5.20", - "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.20.tgz", - "integrity": "sha512-n1lZZ8Ve4ksRqizaBQgxXDgKwttHDhyfQjA6YZZn8+AroHbsIz+JjwxQDxbp+7y5OYCI8t1Yk7etjD9CRd2hIw==", + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", "dev": true, "requires": { "buffer-from": "^1.0.0", "source-map": "^0.6.0" } }, - "sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", - "dev": true + "string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "requires": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + } + }, + "string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "requires": { + "safe-buffer": "~5.1.0" + }, + "dependencies": { + "safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + } + } + }, + "strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "requires": { + "ansi-regex": "^2.0.0" + } }, "strip-final-newline": { "version": "2.0.0", @@ -1594,32 +1966,88 @@ "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "dev": true }, + "strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "dev": true + }, "supports-color": { - "version": "8.1.1", - "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", - "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", "dev": true, "requires": { "has-flag": "^4.0.0" } }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, "tapable": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.0.tgz", - "integrity": "sha512-FBk4IesMV1rBxX2tfiK8RAmogtWn53puLOQlvO8XuwlgxcYbP4mVPS9Ph4aeamSyyVjOl24aYWAuc8U5kCVwMw==", + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", "dev": true }, + "tar-fs": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.1.1.tgz", + "integrity": "sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng==", + "dev": true, + "requires": { + "chownr": "^1.1.1", + "mkdirp-classic": "^0.5.2", + "pump": "^3.0.0", + "tar-stream": "^2.1.4" + } + }, + "tar-stream": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz", + "integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==", + "dev": true, + "requires": { + "bl": "^4.0.3", + "end-of-stream": "^1.4.1", + "fs-constants": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1" + }, + "dependencies": { + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } + } + }, "terser": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.7.2.tgz", - "integrity": "sha512-0Omye+RD4X7X69O0eql3lC4Heh/5iLj3ggxR/B5ketZLOtLiOqukUgjw3q4PDnNQbsrkKr3UMypqStQG3XKRvw==", + "version": "5.10.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.10.0.tgz", + "integrity": "sha512-AMmF99DMfEDiRJfxfY5jj5wNH/bYO09cniSqhfoyxc8sFoYIgkJy86G04UoZU5VjlpnplVu0K6Tx6E9b5+DlHA==", "dev": true, "requires": { "commander": "^2.20.0", "source-map": "~0.7.2", - "source-map-support": "~0.5.19" + "source-map-support": "~0.5.20" }, "dependencies": { + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true + }, "source-map": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", @@ -1629,17 +2057,53 @@ } }, "terser-webpack-plugin": { - "version": "5.2.4", - "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.2.4.tgz", - "integrity": "sha512-E2CkNMN+1cho04YpdANyRrn8CyN4yMy+WdFKZIySFZrGXZxJwJP6PMNGGc/Mcr6qygQHUUqRxnAPmi0M9f00XA==", + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz", + "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==", "dev": true, "requires": { - "jest-worker": "^27.0.6", - "p-limit": "^3.1.0", + "jest-worker": "^27.4.5", "schema-utils": "^3.1.1", "serialize-javascript": "^6.0.0", "source-map": "^0.6.1", "terser": "^5.7.2" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + } } }, "tmp": { @@ -1661,9 +2125,9 @@ } }, "ts-loader": { - "version": "9.2.5", - "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.2.5.tgz", - "integrity": "sha512-al/ATFEffybdRMUIr5zMEWQdVnCGMUA9d3fXJ8dBVvBlzytPvIszoG9kZoR+94k6/i293RnVOXwMaWbXhNy9pQ==", + "version": "9.2.6", + "resolved": "https://registry.npmjs.org/ts-loader/-/ts-loader-9.2.6.tgz", + "integrity": "sha512-QMTC4UFzHmu9wU2VHZEmWWE9cUajjfcdcws+Gh7FhiO+Dy0RnR1bNz0YCHqhI0yRowCE9arVnNxYHqELOy9Hjw==", "dev": true, "requires": { "chalk": "^4.1.0", @@ -1684,6 +2148,15 @@ "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", "dev": true }, + "tunnel-agent": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", + "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", + "dev": true, + "requires": { + "safe-buffer": "^5.0.1" + } + }, "typed-rest-client": { "version": "1.8.6", "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.8.6.tgz", @@ -1696,9 +2169,9 @@ } }, "typescript": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz", - "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ==", + "version": "4.4.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.4.tgz", + "integrity": "sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==", "dev": true }, "uc.micro": { @@ -1708,9 +2181,9 @@ "dev": true }, "underscore": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.1.tgz", - "integrity": "sha512-hzSoAVtJF+3ZtiFX0VgfFPHEDRm7Y/QPjGyNo4TVdnDTdft3tr8hEkD25a1jC+TjTuE7tkHGKkhwCgs9dgBB2g==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.2.tgz", + "integrity": "sha512-ekY1NhRzq0B08g4bGuX4wd2jZx5GnKz6mKSqFL4nqBlfyMGiG10gDFhDTMEfYmDL6Jy0FUIZp7wiRB+0BP7J2g==", "dev": true }, "uri-js": { @@ -1723,41 +2196,41 @@ } }, "url-join": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/url-join/-/url-join-1.1.0.tgz", - "integrity": "sha1-dBxsL0WWxIMNZxhGCSDQySIC3Hg=", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/url-join/-/url-join-4.0.1.tgz", + "integrity": "sha512-jk1+QP6ZJqyOiuEI9AEWQfju/nB2Pw466kbA0LEZljHwKeMgd9WrAEgEGxjPDD2+TNbbb37rTyhEfrCXfuKXnA==", "dev": true }, - "v8-compile-cache": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", - "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=", "dev": true }, "vsce": { - "version": "1.97.0", - "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.97.0.tgz", - "integrity": "sha512-5Rxj6qO0dN4FnzVS9G94osstx8R3r1OQP39G7WYERpoO9X+OSodVVkRhFDapPNjekfUNo+d5Qn7W1EtNQVoLCg==", + "version": "2.6.7", + "resolved": "https://registry.npmjs.org/vsce/-/vsce-2.6.7.tgz", + "integrity": "sha512-5dEtdi/yzWQbOU7JDUSOs8lmSzzkewBR5P122BUkmXE6A/DEdFsKNsg2773NGXJTwwF1MfsOgUR6QVF3cLLJNQ==", "dev": true, "requires": { "azure-devops-node-api": "^11.0.1", "chalk": "^2.4.2", "cheerio": "^1.0.0-rc.9", "commander": "^6.1.0", - "denodeify": "^1.2.1", "glob": "^7.0.6", + "hosted-git-info": "^4.0.2", + "keytar": "^7.7.0", "leven": "^3.1.0", - "lodash": "^4.17.15", - "markdown-it": "^10.0.0", + "markdown-it": "^12.3.2", "mime": "^1.3.4", "minimatch": "^3.0.3", - "osenv": "^0.1.3", "parse-semver": "^1.1.1", "read": "^1.0.7", "semver": "^5.1.0", "tmp": "^0.2.1", "typed-rest-client": "^1.8.4", - "url-join": "^1.1.0", + "url-join": "^4.0.1", + "xml2js": "^0.4.23", "yauzl": "^2.3.1", "yazl": "^2.2.2" }, @@ -1797,12 +2270,6 @@ "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", "dev": true }, - "commander": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz", - "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==", - "dev": true - }, "has-flag": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", @@ -1864,9 +2331,9 @@ "integrity": "sha512-k8luDIWJWyenLc5ToFQQMaSrqCHiLwyKPHKPQZ5zz21vM+vIVUSvsRpcbiECH4WR88K2XZqc4ScRcZ7nk/jbeA==" }, "watchpack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.2.0.tgz", - "integrity": "sha512-up4YAn/XHgZHIxFBVCdlMiWDj6WaLKpwVeGQk2I5thdYxF/KmF0aaz6TfJZ/hfl1h/XlcDr7k1KH7ThDagpFaA==", + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", "dev": true, "requires": { "glob-to-regexp": "^0.4.1", @@ -1874,9 +2341,9 @@ } }, "webpack": { - "version": "5.52.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.52.0.tgz", - "integrity": "sha512-yRZOat8jWGwBwHpco3uKQhVU7HYaNunZiJ4AkAVQkPCUGoZk/tiIXiwG+8HIy/F+qsiZvSOa+GLQOj3q5RKRYg==", + "version": "5.68.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.68.0.tgz", + "integrity": "sha512-zUcqaUO0772UuuW2bzaES2Zjlm/y3kRBQDVFVCge+s2Y8mwuUTdperGaAv65/NtRL/1zanpSJOq/MD8u61vo6g==", "dev": true, "requires": { "@types/eslint-scope": "^3.7.0", @@ -1888,12 +2355,12 @@ "acorn-import-assertions": "^1.7.6", "browserslist": "^4.14.5", "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.8.0", - "es-module-lexer": "^0.7.1", + "enhanced-resolve": "^5.8.3", + "es-module-lexer": "^0.9.0", "eslint-scope": "5.1.1", "events": "^3.2.0", "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.4", + "graceful-fs": "^4.2.9", "json-parse-better-errors": "^1.0.2", "loader-runner": "^4.2.0", "mime-types": "^2.1.27", @@ -1901,28 +2368,64 @@ "schema-utils": "^3.1.0", "tapable": "^2.1.1", "terser-webpack-plugin": "^5.1.3", - "watchpack": "^2.2.0", - "webpack-sources": "^3.2.0" + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + }, + "dependencies": { + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + } } }, "webpack-cli": { - "version": "4.8.0", - "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.8.0.tgz", - "integrity": "sha512-+iBSWsX16uVna5aAYN6/wjhJy1q/GKk4KjKvfg90/6hykCTSgozbfz5iRgDTSJt/LgSbYxdBX3KBHeobIs+ZEw==", + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", "dev": true, "requires": { "@discoveryjs/json-ext": "^0.5.0", - "@webpack-cli/configtest": "^1.0.4", - "@webpack-cli/info": "^1.3.0", - "@webpack-cli/serve": "^1.5.2", - "colorette": "^1.2.1", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", "commander": "^7.0.0", "execa": "^5.0.0", "fastest-levenshtein": "^1.0.12", "import-local": "^3.0.2", "interpret": "^2.2.0", "rechoir": "^0.7.0", - "v8-compile-cache": "^2.2.0", "webpack-merge": "^5.7.3" }, "dependencies": { @@ -1960,9 +2463,9 @@ } }, "webpack-sources": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.0.tgz", - "integrity": "sha512-fahN08Et7P9trej8xz/Z7eRu8ltyiygEo/hnRi9KqBUs80KeDcnf96ZJo++ewWd84fEf3xSX9bp4ZS9hbw0OBw==", + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", "dev": true }, "which": { @@ -1974,6 +2477,15 @@ "isexe": "^2.0.0" } }, + "wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "dev": true, + "requires": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, "wildcard": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", @@ -1986,6 +2498,22 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, + "xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dev": true, + "requires": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + } + }, + "xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "dev": true + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -2009,12 +2537,6 @@ "requires": { "buffer-crc32": "~0.2.3" } - }, - "yocto-queue": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", - "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", - "dev": true } } } diff --git a/packages/vscode-pyright/package.json b/packages/vscode-pyright/package.json index 19566b09d5b9..82807fccab9c 100644 --- a/packages/vscode-pyright/package.json +++ b/packages/vscode-pyright/package.json @@ -2,7 +2,7 @@ "name": "vscode-pyright", "displayName": "Pyright", "description": "VS Code static type checking for Python", - "version": "1.1.170", + "version": "1.1.225", "private": true, "license": "MIT", "author": { @@ -14,7 +14,7 @@ "url": "https://github.com/Microsoft/pyright" }, "engines": { - "vscode": "^1.57.0" + "vscode": "^1.63.1" }, "keywords": [ "python" @@ -127,7 +127,7 @@ "reportPropertyTypeMismatch": { "type": "string", "description": "Diagnostics for property whose setter and getter have mismatched types.", - "default": "error", + "default": "none", "enum": [ "none", "information", @@ -432,6 +432,17 @@ "error" ] }, + "reportInconsistentConstructor": { + "type": "string", + "description": "Diagnostics for __init__ and __new__ methods whose signatures are inconsistent.", + "default": "none", + "enum": [ + "none", + "information", + "warning", + "error" + ] + }, "reportOverlappingOverload": { "type": "string", "description": "Diagnostics for function overloads that overlap in signature and obscure each other or have incompatible return types.", @@ -443,6 +454,17 @@ "error" ] }, + "reportMissingSuperCall": { + "type": "string", + "description": "Diagnostics for missing call to parent class for inherited `__init__` methods.", + "default": "none", + "enum": [ + "none", + "information", + "warning", + "error" + ] + }, "reportUninitializedInstanceVariable": { "type": "string", "description": "Diagnostics for instance variables that are not declared or initialized within class body or `__init__` method.", @@ -520,6 +542,17 @@ "error" ] }, + "reportMissingParameterType": { + "type": "string", + "description": "Diagnostics for parameters that are missing a type annotation.", + "default": "none", + "enum": [ + "none", + "information", + "warning", + "error" + ] + }, "reportMissingTypeArgument": { "type": "string", "description": "Diagnostics for generic class reference with missing type arguments.", @@ -663,6 +696,17 @@ "error" ] }, + "reportUnsupportedDunderAll": { + "type": "string", + "description": "Diagnostics for unsupported operations performed on __all__.", + "default": "warning", + "enum": [ + "none", + "information", + "warning", + "error" + ] + }, "reportUnusedCallResult": { "type": "string", "description": "Diagnostics for call expressions whose results are not consumed and are not None.", @@ -685,10 +729,21 @@ "error" ] }, - "reportUnsupportedDunderAll": { + "reportUnnecessaryTypeIgnoreComment": { "type": "string", - "description": "Diagnostics for unsupported operations performed on __all__.", - "default": "warning", + "description": "Diagnostics for '# type: ignore' comments that have no effect.", + "default": "none", + "enum": [ + "none", + "information", + "warning", + "error" + ] + }, + "reportMatchNotExhaustive": { + "type": "string", + "description": "Diagnostics for 'match' statements that do not exhaustively match all possible values.", + "default": "none", "enum": [ "none", "information", @@ -784,16 +839,16 @@ "vscode-languageserver-protocol": "3.16.0" }, "devDependencies": { - "@types/copy-webpack-plugin": "^8.0.1", - "@types/node": "^12.20.24", - "@types/vscode": "~1.57.0", - "copy-webpack-plugin": "^9.0.1", + "@types/copy-webpack-plugin": "^10.1.0", + "@types/node": "^17.0.14", + "@types/vscode": "~1.63.1", + "copy-webpack-plugin": "^10.2.4", "detect-indent": "^6.1.0", - "shx": "^0.3.3", - "ts-loader": "^9.2.5", - "typescript": "~4.4.2", - "vsce": "^1.97.0", - "webpack": "^5.52.0", - "webpack-cli": "^4.8.0" + "shx": "^0.3.4", + "ts-loader": "^9.2.6", + "typescript": "~4.4.4", + "vsce": "^2.6.4", + "webpack": "^5.68.0", + "webpack-cli": "^4.9.2" } } diff --git a/packages/vscode-pyright/schemas/pyrightconfig.schema.json b/packages/vscode-pyright/schemas/pyrightconfig.schema.json index 26224b9d139c..6e1a61bbbd85 100644 --- a/packages/vscode-pyright/schemas/pyrightconfig.schema.json +++ b/packages/vscode-pyright/schemas/pyrightconfig.schema.json @@ -123,7 +123,7 @@ "$id": "#/properties/strictParameterNoneValue", "type": "boolean", "title": "Allow implicit Optional when default parameter value is None", - "default": false + "default": true }, "enableTypeIgnoreComments": { "$id": "#/properties/enableTypeIgnoreComments", @@ -141,7 +141,7 @@ "$id": "#/properties/reportPropertyTypeMismatch", "$ref": "#/definitions/diagnostic", "title": "Controls reporting of property getter/setter type mismatches", - "default": "error" + "default": "none" }, "reportFunctionMemberAccess": { "$id": "#/properties/reportFunctionMemberAccess", @@ -305,12 +305,24 @@ "title": "Controls reporting of overrides in subclasses that redefine a variable in an incompatible way", "default": "none" }, + "reportInconsistentConstructor": { + "$id": "#/properties/reportInconsistentConstructor", + "$ref": "#/definitions/diagnostic", + "title": "Controls reporting of __init__ and __new__ methods whose signatures are inconsistent", + "default": "none" + }, "reportOverlappingOverload": { "$id": "#/properties/reportOverlappingOverload", "$ref": "#/definitions/diagnostic", "title": "Controls reporting of function overloads that overlap in signature and obscure each other or do not agree on return type", "default": "none" }, + "reportMissingSuperCall": { + "$id": "#/properties/reportMissingSuperCall", + "$ref": "#/definitions/diagnostic", + "title": "Controls reporting of missing call to parent class for inherited `__init__` methods", + "default": "none" + }, "reportUninitializedInstanceVariable": { "$id": "#/properties/reportUninitializedInstanceVariable", "$ref": "#/definitions/diagnostic", @@ -353,6 +365,12 @@ "title": "Controls reporting class and instance variables whose types are unknown", "default": "none" }, + "reportMissingParameterType": { + "$id": "#/properties/reportMissingParameterType", + "$ref": "#/definitions/diagnostic", + "title": "Controls reporting input parameters that are missing a type annotation", + "default": "none" + }, "reportMissingTypeArgument": { "$id": "#/properties/reportMissingTypeArgument", "$ref": "#/definitions/diagnostic", @@ -449,12 +467,24 @@ "title": "Controls reporting of call expressions that returns Coroutine whose results are not consumed", "default": "error" }, + "reportUnnecessaryTypeIgnoreComment": { + "$id": "#/properties/reportUnnecessaryTypeIgnoreComment", + "$ref": "#/definitions/diagnostic", + "title": "Controls reporting of '# type: ignore' comments that have no effect'", + "default": "none" + }, + "reportMatchNotExhaustive": { + "$id": "#/properties/reportMatchNotExhaustive", + "$ref": "#/definitions/diagnostic", + "title": "Controls reporting of 'match' statements that do not exhaustively match all possible values", + "default": "none" + }, "extraPaths": { - "$id": "#/properties/executionEnvironments/items/properties/extraPaths", + "$id": "#/properties/extraPaths", "type": "array", "title": "Additional import search resolution paths", "items": { - "$id": "#/properties/executionEnvironments/items/properties/extraPaths/items", + "$id": "#/properties/extraPaths/items", "type": "string", "title": "Additional import search resolution path", "default": "", @@ -536,7 +566,7 @@ } }, "pythonVersion": { - "$id": "#/properties/pythonVersion", + "$id": "#/properties/executionEnvironments/items/properties/pythonVersion", "type": "string", "title": "Python version to assume during type analysis", "default": "", @@ -546,7 +576,7 @@ "pattern": "^3\\.[0-9]+$" }, "pythonPlatform": { - "$id": "#/properties/pythonPlatform", + "$id": "#/properties/executionEnvironments/items/properties/pythonPlatform", "type": "string", "title": "Python platform to assume during type analysis", "default": "", diff --git a/packages/vscode-pyright/src/extension.ts b/packages/vscode-pyright/src/extension.ts index 86a3aeb65c6b..52320a5936f9 100644 --- a/packages/vscode-pyright/src/extension.ts +++ b/packages/vscode-pyright/src/extension.ts @@ -45,6 +45,9 @@ let cancellationStrategy: FileBasedCancellationStrategy | undefined; const pythonPathChangedListenerMap = new Map(); +// Request a heap size of 3GB. This is reasonable for modern systems. +const defaultHeapSize = 3072; + export function activate(context: ExtensionContext) { // See if Pylance is installed. If so, don't activate the Pyright extension. // Doing so will generate "command already registered" errors and redundant @@ -63,11 +66,17 @@ export function activate(context: ExtensionContext) { cancellationStrategy = new FileBasedCancellationStrategy(); const bundlePath = context.asAbsolutePath(path.join('dist', 'server.js')); - const debugOptions = { execArgv: ['--nolazy', '--inspect=6600'] }; + const runOptions = { execArgv: [`--max-old-space-size=${defaultHeapSize}`] }; + const debugOptions = { execArgv: ['--nolazy', '--inspect=6600', `--max-old-space-size=${defaultHeapSize}`] }; // If the extension is launched in debug mode, then the debug server options are used. const serverOptions: ServerOptions = { - run: { module: bundlePath, transport: TransportKind.ipc, args: cancellationStrategy.getCommandLineArguments() }, + run: { + module: bundlePath, + transport: TransportKind.ipc, + args: cancellationStrategy.getCommandLineArguments(), + options: runOptions, + }, // In debug mode, use the non-bundled code if it's present. The production // build includes only the bundled package, so we don't want to crash if // someone starts the production extension in debug mode. diff --git a/packages/vscode-pyright/src/server.ts b/packages/vscode-pyright/src/server.ts index 2be652ea7355..a424b2a718f8 100644 --- a/packages/vscode-pyright/src/server.ts +++ b/packages/vscode-pyright/src/server.ts @@ -1,3 +1,5 @@ import { main } from 'pyright-internal/nodeMain'; +Error.stackTraceLimit = 256; + main(); diff --git a/specs/dataclass_transforms.md b/specs/dataclass_transforms.md index 60d21f4e62a6..4340f192c275 100644 --- a/specs/dataclass_transforms.md +++ b/specs/dataclass_transforms.md @@ -136,10 +136,19 @@ defined in PEP 557. `frozen` is a parameter supported in the stdlib dataclass, and its meaning is defined in PEP 557. -`kw_only` is a parameter supported by some dataclass-like libraries -(for example, attrs and pydantic) that controls whether the synthesized -`__init__` method uses keyword-only parameters or whether parameters -are positional. +`init` is a parameter supported in the stdlib dataclass, and its meaning is +defined in PEP 557. + +`unsafe_hash` is a parameter supported in the stdlib dataclass, and its meaning is +defined in PEP 557. + +`hash` is an alias for the `unsafe_hash` parameter. + +`kw_only` is a parameter supported in the stdlib dataclass, first introduced +in Python 3.10. + +`slots` is a parameter supported in the stdlib dataclass, first introduced +in Python 3.10. Parameters to `dataclass_transform` allow for some basic customization of @@ -281,10 +290,10 @@ Literal[True]). `default` is an optional parameter that provides the default value for the field. -`default_factory` is an optional parameter that provides a runtime callback -that returns the default value for the field. If `default` and `default_value` -are both unspecified, the field is assumed to have no default value and must be -provided a value when the class is instantiated. +`default_factory` or `factory` is an optional parameter that provides a runtime +callback that returns the default value for the field. If `default` and +`default_value` are both unspecified, the field is assumed to have no default +value and must be provided a value when the class is instantiated. `alias` is an optional str parameter that provides an alternative name for the field. This alternative name is used in the synthesized `__init__` method. @@ -320,7 +329,7 @@ def create_model( # Code that imports this library: @create_model(init=False) class CustomerModel: - id: int = ModelField(resolver=lambda : 0) + id: int = model_field(resolver=lambda : 0) name: str ``` @@ -470,10 +479,6 @@ preserves the original order, but attrs defines a new order based on subclasses. Users of attrs who rely on this ordering will not see the correct order of parameters in the synthesized `__init__` method. -The attrs library also differs from stdlib dataclasses in that it uses the -parameter name `factory` rather than `default_factory` in its `attr.ib` and -`attr.field` functions. - Django ------