diff --git a/.gitignore b/.gitignore
index c2cfa59e0a..ec86f7ba6c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,3 +41,4 @@ __pycache__
python-frontend/typeshed_serializer/serializer/proto_out
python-frontend/typeshed_serializer/output/*
+python-frontend/typeshed_serializer/.tox
diff --git a/python-frontend/pom.xml b/python-frontend/pom.xml
index aba69d3bd9..041cd59088 100644
--- a/python-frontend/pom.xml
+++ b/python-frontend/pom.xml
@@ -12,7 +12,7 @@
Python :: Frontend
- pom.xml,src/main/java,typeshed_serializer/serializer
+ pom.xml,src/main/java,typeshed_serializer/serializer,typeshed_serializer/runners
typeshed_serializer/cov.xml
3.9
@@ -144,16 +144,19 @@
org.codehaus.mojo
exec-maven-plugin
- 1.6.0
+ 3.1.0
typeshed_serializer
- tox
+ tox-runner
generate-resources
- tox
+ python
+
+ runners/tox_runner.py
+
exec
@@ -169,6 +172,7 @@
src/*/java/**/*.java
typeshed_serializer/serializer/**/*.py
+ typeshed_serializer/runners/**/*.py
typeshed_serializer/tests/**/*.py
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/docutils.parsers.null.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/docutils.parsers.null.protobuf
index fd03b8484d..07849d2fb3 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/docutils.parsers.null.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/docutils.parsers.null.protobuf
@@ -1,6 +1,6 @@
-docutils.parsers.null
-Parserdocutils.parsers.null.Parser"docutils.parsers.Parserj35j36j37j38j39j310r
+docutils.parsers.null
+Parserdocutils.parsers.null.Parser"builtins.objectj35j36j37j38j39j310r
config_section_dependencies8docutils.parsers.null.Parser.config_section_dependenciesL
builtins.tuple[builtins.str]
builtins.str"builtins.str"builtins.tuple*35*36*37*38*39*310
\ No newline at end of file
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/geoip2.models.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/geoip2.models.protobuf
index 1e6360589f..1f41571afb 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/geoip2.models.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/geoip2.models.protobuf
@@ -1,5 +1,5 @@
-
geoip2.models
+
geoip2.models
Countrygeoip2.models.Country"geoip2.mixins.SimpleEquality*
__init__geoip2.models.Country.__init__"
None*8
@@ -17,26 +17,26 @@ HTypeAlias[typing.Mapping[builtins.str,typing.Mapping[builtins.str,Any]]]
)Union[typing.Sequence[builtins.str],None]N
typing.Sequence[builtins.str]
builtins.str"builtins.str"typing.Sequence
-None"geoip2.models._Locales z35z36z37z38z39z310j35j36j37j38j39j310rN
- continentgeoip2.models.Country.continent
-Any*35*36*37*38*39*310rJ
-countrygeoip2.models.Country.country
-Any*35*36*37*38*39*310r`
-registered_country(geoip2.models.Country.registered_country
-Any*35*36*37*38*39*310rb
-represented_country)geoip2.models.Country.represented_country
-Any*35*36*37*38*39*310rJ
-maxmindgeoip2.models.Country.maxmind
-Any*35*36*37*38*39*310rH
-traitsgeoip2.models.Country.traits
-Any*35*36*37*38*39*310r
+None"geoip2.models._Locales z35z36z37z38z39z310j35j36j37j38j39j310r{
+ continentgeoip2.models.Country.continent4
+geoip2.records.Continent"geoip2.records.Continent*35*36*37*38*39*310rs
+countrygeoip2.models.Country.country0
+geoip2.records.Country"geoip2.records.Country*35*36*37*38*39*310r
+registered_country(geoip2.models.Country.registered_country0
+geoip2.records.Country"geoip2.records.Country*35*36*37*38*39*310r
+represented_country)geoip2.models.Country.represented_countryF
+!geoip2.records.RepresentedCountry"!geoip2.records.RepresentedCountry*35*36*37*38*39*310rs
+maxmindgeoip2.models.Country.maxmind0
+geoip2.records.MaxMind"geoip2.records.MaxMind*35*36*37*38*39*310ro
+traitsgeoip2.models.Country.traits.
+geoip2.records.Traits"geoip2.records.Traits*35*36*37*38*39*310r
rawgeoip2.models.Country.raw
HTypeAlias[typing.Mapping[builtins.str,typing.Mapping[builtins.str,Any]]]
=typing.Mapping[builtins.str,typing.Mapping[builtins.str,Any]]
builtins.str"builtins.strY
typing.Mapping[builtins.str,Any]
builtins.str"builtins.str
-Any"typing.Mapping"typing.Mapping"geoip2.models._RawResponse*35*36*37*38*39*310
+Any"typing.Mapping"typing.Mapping"geoip2.models._RawResponse*35*36*37*38*39*310
Citygeoip2.models.City"geoip2.models.Country*
__init__geoip2.models.City.__init__"
None*2
@@ -54,15 +54,16 @@ HTypeAlias[typing.Mapping[builtins.str,typing.Mapping[builtins.str,Any]]]
)Union[typing.Sequence[builtins.str],None]N
typing.Sequence[builtins.str]
builtins.str"builtins.str"typing.Sequence
-None"geoip2.models._Locales z35z36z37z38z39z310j35j36j37j38j39j310rA
-citygeoip2.models.City.city
-Any*35*36*37*38*39*310rI
-locationgeoip2.models.City.location
-Any*35*36*37*38*39*310rE
-postalgeoip2.models.City.postal
-Any*35*36*37*38*39*310rQ
-subdivisionsgeoip2.models.City.subdivisions
-Any*35*36*37*38*39*310O
+None"geoip2.models._Locales z35z36z37z38z39z310j35j36j37j38j39j310rd
+citygeoip2.models.City.city*
+geoip2.records.City"geoip2.records.City*35*36*37*38*39*310rt
+locationgeoip2.models.City.location2
+geoip2.records.Location"geoip2.records.Location*35*36*37*38*39*310rl
+postalgeoip2.models.City.postal.
+geoip2.records.Postal"geoip2.records.Postal*35*36*37*38*39*310r
+subdivisionsgeoip2.models.City.subdivisions_
+!Tuple[geoip2.records.Subdivision]8
+geoip2.records.Subdivision"geoip2.records.Subdivision*35*36*37*38*39*310O
Insightsgeoip2.models.Insights"geoip2.models.Cityj35j36j37j38j39j310S
Enterprisegeoip2.models.Enterprise"geoip2.models.Cityj35j36j37j38j39j310_
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.adapters.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.adapters.protobuf
index dc460cd32b..287653676d 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.adapters.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.adapters.protobuf
@@ -156,15 +156,19 @@ $Tuple[builtins.float,builtins.float]
CallableType[builtins.function]&
builtins.function"builtins.function*35*36*37*38*39*310*d
DEFAULT_CA_BUNDLE_PATH(requests.adapters.DEFAULT_CA_BUNDLE_PATH
-Any*35*36*37*38*39*310*j
-get_encoding_from_headers+requests.adapters.get_encoding_from_headers
-Any*35*36*37*38*39*310*h
-prepend_scheme_if_needed*requests.adapters.prepend_scheme_if_needed
-Any*35*36*37*38*39*310*Z
-get_auth_from_url#requests.adapters.get_auth_from_url
-Any*35*36*37*38*39*310*R
-
urldefragauthrequests.adapters.urldefragauth
-Any*35*36*37*38*39*310*
+Any*35*36*37*38*39*310*
+get_encoding_from_headers+requests.adapters.get_encoding_from_headersK
+CallableType[builtins.function]&
+builtins.function"builtins.function*35*36*37*38*39*310*
+prepend_scheme_if_needed*requests.adapters.prepend_scheme_if_neededK
+CallableType[builtins.function]&
+builtins.function"builtins.function*35*36*37*38*39*310*
+get_auth_from_url#requests.adapters.get_auth_from_urlK
+CallableType[builtins.function]&
+builtins.function"builtins.function*35*36*37*38*39*310*
+
urldefragauthrequests.adapters.urldefragauthK
+CallableType[builtins.function]&
+builtins.function"builtins.function*35*36*37*38*39*310*
extract_cookies_to_jar(requests.adapters.extract_cookies_to_jarK
CallableType[builtins.function]&
builtins.function"builtins.function*35*36*37*38*39*310*q
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.packages.urllib3.connectionpool.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.packages.urllib3.connectionpool.protobuf
index 74cf7b8180..447114f45e 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.packages.urllib3.connectionpool.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/requests.packages.urllib3.connectionpool.protobuf
@@ -164,6 +164,8 @@
kwz35z36z37z38z39z310*S
_connection)requests.packages.urllib3.util.connection *35*36*37*38*39*310*k
port_by_scheme7requests.packages.urllib3.connectionpool.port_by_scheme
+Any*35*36*37*38*39*310*g
+HTTPResponse5requests.packages.urllib3.connectionpool.HTTPResponse
Any*35*36*37*38*39*310*
is_connection_dropped>requests.packages.urllib3.connectionpool.is_connection_droppedK
CallableType[builtins.function]&
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.httpserver.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.httpserver.protobuf
index 4c21e547b8..8d4dd3da0f 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.httpserver.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.httpserver.protobuf
@@ -1,7 +1,7 @@
-tornado.httpserver
+tornado.httpserver
-HTTPServertornado.httpserver.HTTPServer"tornado.tcpserver.TCPServer"tornado.util.Configurable"-tornado.httputil.HTTPServerConnectionDelegate*
+HTTPServertornado.httpserver.HTTPServer"tornado.tcpserver.TCPServer"tornado.util.Configurable*
__init__&tornado.httpserver.HTTPServer.__init__"
None*H
self>
@@ -73,8 +73,8 @@ chunk_size *
address_family5tornado.httpserver._HTTPRequestContext.address_family
Any*27rJ
remote_ip0tornado.httpserver._HTTPRequestContext.remote_ip
-Any*27
-_ServerRequestAdapter(tornado.httpserver._ServerRequestAdapter"$tornado.httputil.HTTPMessageDelegate*
+Any*27
+_ServerRequestAdapter(tornado.httpserver._ServerRequestAdapter"builtins.object*
__init__1tornado.httpserver._ServerRequestAdapter.__init__"
None*^
selfT
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.web.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.web.protobuf
index 8aeb57d3ae..ad7b64d67c 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.web.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/tornado.web.protobuf
@@ -1,5 +1,5 @@
-tornado.web=
+tornado.web=
RequestHandlertornado.web.RequestHandler"builtins.object*
__init__#tornado.web.RequestHandler.__init__"
None*B
@@ -232,9 +232,9 @@ static_url%tornado.web.RequestHandler.static_url*
SUPPORTED_METHODS,tornado.web.RequestHandler.SUPPORTED_METHODS
Any*27rm
application&tornado.web.RequestHandler.application2
-tornado.web.Application"tornado.web.Application*27r{
-request"tornado.web.RequestHandler.requestH
-"tornado.httputil.HTTPServerRequest""tornado.httputil.HTTPServerRequest*27r
+tornado.web.Application"tornado.web.Application*27r:
+request"tornado.web.RequestHandler.request
+Any*27r
path_args$tornado.web.RequestHandler.path_argsJ
builtins.list[builtins.str]
builtins.str"builtins.str"
builtins.list*27r
@@ -275,9 +275,9 @@ initialize%tornado.web.RequestHandler.initializeK
options"tornado.web.RequestHandler.options
*TypeAlias[CallableType[builtins.function]]K
CallableType[builtins.function]&
-builtins.function"builtins.function"tornado.web._MethodType*27
+builtins.function"builtins.function"tornado.web._MethodType*27
-Applicationtornado.web.Application"-tornado.httputil.HTTPServerConnectionDelegate*
+Applicationtornado.web.Application"builtins.object*
__init__ tornado.web.Application.__init__"
None*<
self2
@@ -334,8 +334,8 @@ ui_modules"tornado.web.Application.ui_modules
Any*27r=
ui_methods"tornado.web.Application.ui_methods
-Any*27
-_RequestDispatchertornado.web._RequestDispatcher"$tornado.httputil.HTTPMessageDelegate*
+Any*27
+_RequestDispatchertornado.web._RequestDispatcher"builtins.object*
__init__'tornado.web._RequestDispatcher.__init__"
None*J
self@
diff --git a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/werkzeug.protobuf b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/werkzeug.protobuf
index fe37807c82..e6680bdeb1 100644
--- a/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/werkzeug.protobuf
+++ b/python-frontend/src/main/resources/org/sonar/python/types/third_party_protobuf/werkzeug.protobuf
@@ -31,61 +31,14 @@ useragentswerkzeug.useragents *35*36*37*38*39*310*2
run_simplewerkzeug.run_simple
Any*35*36*37*38*39*310*?
test_appwerkzeug.test_app
+Any*35*36*37*38*39*310*A
+ UserAgentwerkzeug.UserAgent
Any*35*36*37*38*39*310*
_eastereggwerkzeug._eastereggK
CallableType[builtins.function]&
builtins.function"builtins.function*35*36*37*38*39*310*U
DebuggedApplicationwerkzeug.DebuggedApplication
-Any*35*36*37*38*39*310*A
- MultiDictwerkzeug.MultiDict
-Any*35*36*37*38*39*310*Q
-CombinedMultiDictwerkzeug.CombinedMultiDict
-Any*35*36*37*38*39*310*=
-Headerswerkzeug.Headers
-Any*35*36*37*38*39*310*K
-EnvironHeaderswerkzeug.EnvironHeaders
-Any*35*36*37*38*39*310*I
-
ImmutableListwerkzeug.ImmutableList
-Any*35*36*37*38*39*310*I
-
ImmutableDictwerkzeug.ImmutableDict
-Any*35*36*37*38*39*310*S
-ImmutableMultiDictwerkzeug.ImmutableMultiDict
-Any*35*36*37*38*39*310*S
-TypeConversionDictwerkzeug.TypeConversionDict
-Any*35*36*37*38*39*310*e
-ImmutableTypeConversionDict$werkzeug.ImmutableTypeConversionDict
-Any*35*36*37*38*39*310*;
-Acceptwerkzeug.Accept
-Any*35*36*37*38*39*310*C
-
-MIMEAcceptwerkzeug.MIMEAccept
-Any*35*36*37*38*39*310*I
-
CharsetAcceptwerkzeug.CharsetAccept
-Any*35*36*37*38*39*310*K
-LanguageAcceptwerkzeug.LanguageAccept
-Any*35*36*37*38*39*310*U
-RequestCacheControlwerkzeug.RequestCacheControl
-Any*35*36*37*38*39*310*W
-ResponseCacheControlwerkzeug.ResponseCacheControl
-Any*35*36*37*38*39*310*9
-ETagswerkzeug.ETags
-Any*35*36*37*38*39*310*A
- HeaderSetwerkzeug.HeaderSet
-Any*35*36*37*38*39*310*M
-WWWAuthenticatewerkzeug.WWWAuthenticate
-Any*35*36*37*38*39*310*I
-
Authorizationwerkzeug.Authorization
-Any*35*36*37*38*39*310*I
-
FileMultiDictwerkzeug.FileMultiDict
-Any*35*36*37*38*39*310*G
-CallbackDictwerkzeug.CallbackDict
-Any*35*36*37*38*39*310*E
-FileStoragewerkzeug.FileStorage
-Any*35*36*37*38*39*310*O
-OrderedMultiDictwerkzeug.OrderedMultiDict
-Any*35*36*37*38*39*310*a
-ImmutableOrderedMultiDict"werkzeug.ImmutableOrderedMultiDict
Any*35*36*37*38*39*310*;
escapewerkzeug.escape
Any*35*36*37*38*39*310*O
@@ -150,19 +103,10 @@ MIMEAcceptwerkzeug.MIMEAccept
WWWAuthenticateMixinwerkzeug.WWWAuthenticateMixin
Any*35*36*37*38*39*310*i
CommonRequestDescriptorsMixin&werkzeug.CommonRequestDescriptorsMixin
-Any*35*36*37*38*39*310*9
-Localwerkzeug.Local
-Any*35*36*37*38*39*310*G
-LocalManagerwerkzeug.LocalManager
-Any*35*36*37*38*39*310*C
-
-LocalProxywerkzeug.LocalProxy
-Any*35*36*37*38*39*310*C
-
-LocalStackwerkzeug.LocalStack
-Any*35*36*37*38*39*310*I
-
release_localwerkzeug.release_local
-Any*35*36*37*38*39*310*
+Any*35*36*37*38*39*310*
+
release_localwerkzeug.release_localK
+CallableType[builtins.function]&
+builtins.function"builtins.function*35*36*37*38*39*310*
generate_password_hashwerkzeug.generate_password_hashK
CallableType[builtins.function]&
builtins.function"builtins.function*35*36*37*38*39*310*
diff --git a/python-frontend/typeshed_serializer/.coveragerc b/python-frontend/typeshed_serializer/.coveragerc
new file mode 100644
index 0000000000..5951051c8d
--- /dev/null
+++ b/python-frontend/typeshed_serializer/.coveragerc
@@ -0,0 +1,4 @@
+[report]
+
+exclude_lines =
+ if __name__ == .__main__.:
\ No newline at end of file
diff --git a/python-frontend/typeshed_serializer/checksum b/python-frontend/typeshed_serializer/checksum
new file mode 100644
index 0000000000..6560d31184
--- /dev/null
+++ b/python-frontend/typeshed_serializer/checksum
@@ -0,0 +1,2 @@
+ec8fbfb2c613fcbee0e6ada09e4709d647365941dea12caa49199ac487ce1ff5
+6065c4301981148c0a78c6f2d7a832947fdd11a221adcc44c75693097446b329
\ No newline at end of file
diff --git a/python-frontend/typeshed_serializer/runners/__init__.py b/python-frontend/typeshed_serializer/runners/__init__.py
new file mode 100644
index 0000000000..04f62ffaad
--- /dev/null
+++ b/python-frontend/typeshed_serializer/runners/__init__.py
@@ -0,0 +1,20 @@
+#
+# SonarQube Python Plugin
+# Copyright (C) 2011-2023 SonarSource SA
+# mailto:info AT sonarsource DOT com
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
diff --git a/python-frontend/typeshed_serializer/runners/tox_runner.py b/python-frontend/typeshed_serializer/runners/tox_runner.py
new file mode 100644
index 0000000000..59dd87e556
--- /dev/null
+++ b/python-frontend/typeshed_serializer/runners/tox_runner.py
@@ -0,0 +1,147 @@
+#
+# SonarQube Python Plugin
+# Copyright (C) 2011-2023 SonarSource SA
+# mailto:info AT sonarsource DOT com
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+import contextlib
+import os
+import sys
+from os.path import isfile, join
+import subprocess
+import hashlib
+from pathlib import Path
+from typing import Optional, Tuple
+from collections.abc import Callable
+import logging
+
+CURRENT_PATH = os.path.dirname(__file__)
+CHECKSUM_FILE = os.path.join(CURRENT_PATH, '../checksum')
+SERIALIZER_PATH = os.path.join(CURRENT_PATH, '../serializer')
+RESOURCES_FOLDER_PATH = os.path.join(CURRENT_PATH, '../resources')
+BINARY_FOLDER_PATH = os.path.join(CURRENT_PATH, '../../src/main/resources/org/sonar/python/types')
+PROTOBUF_EXTENSION = '.protobuf'
+PYTHON_STUB_EXTENSION = '.pyi'
+
+logger = logging.getLogger('tox_runner')
+handler = logging.StreamHandler(sys.stdout)
+log_formatter = logging.Formatter(fmt='%(name)s [%(levelname)s] --- %(message)s ---')
+logger.setLevel(logging.INFO)
+handler.setFormatter(log_formatter)
+logger.addHandler(handler)
+
+
+def fetch_python_file_names(folder_path: str) -> list[str]:
+ result = list()
+ for file in os.listdir(folder_path):
+ if isfile(join(folder_path, file)) and file.endswith('.py'):
+ result.append(join(folder_path, file))
+ return result
+
+
+def fetch_resource_file_names(folder_name: str, file_extension: str) -> list[str]:
+ result = list()
+ for root, _, files in os.walk(folder_name):
+ for file in files:
+ if file.endswith(file_extension):
+ result.append(join(root, file))
+ return result
+
+
+def fetch_config_file_names() -> list[str]:
+ return ['requirements.txt', 'tox.ini']
+
+
+def fetch_binary_file_names() -> list[str]:
+ return sorted(fetch_resource_file_names(BINARY_FOLDER_PATH, PROTOBUF_EXTENSION))
+
+
+def fetch_source_file_names(folder_path: str) -> list[str]:
+ filenames = fetch_python_file_names(folder_path)
+ resources = fetch_resource_file_names(RESOURCES_FOLDER_PATH, PYTHON_STUB_EXTENSION)
+ config_files = fetch_config_file_names()
+ return sorted([*filenames, *resources, *config_files])
+
+
+def normalize_text_files(file_name: str) -> bytes:
+ normalized_file = Path(file_name).read_text().strip().replace('\r\n', '\n').replace('\r', '\n')
+ return bytes(normalized_file, 'utf-8')
+
+
+def read_file(file_name: str) -> bytes:
+ return Path(file_name).read_bytes()
+
+
+def compute_checksum(file_names: list[str], get_file_bytes: Callable[[str], bytes]) -> str:
+ _hash = hashlib.sha256()
+ for fn in file_names:
+ with contextlib.suppress(IsADirectoryError):
+ _hash.update(get_file_bytes(fn))
+ return _hash.hexdigest()
+
+
+def read_previous_checksum(checksum_file: str) -> Tuple[Optional[str], Optional[str]]:
+ def empty_str_to_none(s: str) -> Optional[str]:
+ if not s:
+ return None
+ return s
+
+ if not Path(checksum_file).is_file():
+ return None, None
+ with open(checksum_file, 'r') as file:
+ source_checksum = empty_str_to_none(file.readline().strip())
+ binaries_checksum = empty_str_to_none(file.readline().strip())
+ return source_checksum, binaries_checksum
+
+
+def update_checksum():
+ with open(CHECKSUM_FILE, 'w') as file:
+ source_file_names = fetch_source_file_names(SERIALIZER_PATH)
+ source_checksum = compute_checksum(source_file_names, normalize_text_files)
+ binary_file_names = fetch_binary_file_names()
+ binary_checksum = compute_checksum(binary_file_names, read_file)
+ file.writelines([f"{source_checksum}\n", binary_checksum])
+
+
+def main():
+ source_files = fetch_source_file_names(SERIALIZER_PATH)
+ (previous_sources_checksum, previous_binaries_checksum) = read_previous_checksum(CHECKSUM_FILE)
+ current_sources_checksum = compute_checksum(source_files, normalize_text_files)
+ logger.info("STARTING TYPESHED SOURCE FILE CHECKSUM COMPUTATION")
+ logger.info(f"Previous checksum {previous_sources_checksum}")
+ logger.info(f"Current checksum {current_sources_checksum}")
+ logger.info(f"Checksum is computed over {len(source_files)} files")
+ if previous_sources_checksum != current_sources_checksum:
+ logger.info("STARTING TYPESHED SERIALIZATION")
+ subprocess.run(["tox"])
+ else:
+ binary_file_names = fetch_binary_file_names()
+ current_binaries_checksum = compute_checksum(binary_file_names, read_file)
+ logger.info("STARTING TYPESHED BINARY FILES CHECKSUM COMPUTATION")
+ logger.info(f"Previous binaries checksum {previous_binaries_checksum}")
+ logger.info(f"Current binaries checksum {current_binaries_checksum}")
+ logger.info(f"Checksum is computed over {len(binary_file_names)} files")
+ if previous_binaries_checksum != current_binaries_checksum:
+ raise RuntimeError('INCONSISTENT BINARY CHECKSUMS')
+ logger.info("SKIPPING TYPESHED SERIALIZATION")
+ # At the moment we need to run the tests in order to not break the quality gate.
+ # If the tests are skipped this could potentially result in missing coverage.
+ subprocess.run(['tox', '-e', 'py39'])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/python-frontend/typeshed_serializer/tests/runners/__init__.py b/python-frontend/typeshed_serializer/tests/runners/__init__.py
new file mode 100644
index 0000000000..04f62ffaad
--- /dev/null
+++ b/python-frontend/typeshed_serializer/tests/runners/__init__.py
@@ -0,0 +1,20 @@
+#
+# SonarQube Python Plugin
+# Copyright (C) 2011-2023 SonarSource SA
+# mailto:info AT sonarsource DOT com
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
diff --git a/python-frontend/typeshed_serializer/tests/runners/test_tox_runner.py b/python-frontend/typeshed_serializer/tests/runners/test_tox_runner.py
new file mode 100644
index 0000000000..9e1b91c018
--- /dev/null
+++ b/python-frontend/typeshed_serializer/tests/runners/test_tox_runner.py
@@ -0,0 +1,237 @@
+#
+# SonarQube Python Plugin
+# Copyright (C) 2011-2023 SonarSource SA
+# mailto:info AT sonarsource DOT com
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 3 of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+#
+
+import unittest
+from unittest import mock
+from unittest.mock import mock_open
+import os
+from six import PY2
+
+from runners import tox_runner
+from runners.tox_runner import CHECKSUM_FILE
+
+CURRENT_PATH = os.path.dirname(__file__)
+
+
+class ToxRunnerTest(unittest.TestCase):
+ MODULE_NAME = 'runners.tox_runner'
+ PATH_IS_FILE_FUNCTION = f'{MODULE_NAME}.Path.is_file'
+ COMPUTE_CHECKSUM_FUNCTION = f'{MODULE_NAME}.compute_checksum'
+ READ_PREVIOUS_CHECKSUM_FUNCTION = f'{MODULE_NAME}.read_previous_checksum'
+ SUBPROCESS_CALL = f'{MODULE_NAME}.subprocess'
+ BUILTIN_OPEN_FUNCTION = '__builtin__.open' if PY2 else 'builtins.open'
+
+ FILE_NAMES = ['a/test', 'b/file', 'requirements.txt']
+ FAKEMODULE_PATH = os.path.join(CURRENT_PATH, "../resources/fakemodule.pyi")
+ FAKEMODULE_IMPORTED_PATH = os.path.join(CURRENT_PATH, "../resources/fakemodule_imported.pyi")
+ TEST_RESOURCES_FILE_NAMES = [FAKEMODULE_PATH, FAKEMODULE_IMPORTED_PATH]
+ FILE_CONTENT = bytes("test\n end", 'utf-8')
+
+ def test_fetching_python_files(self):
+ folder = "test"
+ with mock.patch('os.listdir') as mocked_listdir, mock.patch(f'{self.MODULE_NAME}.isfile') as mocked_isfile:
+ mocked_listdir.return_value = ['folder1', 'folder2', 'file', 'file1.py', 'otherfile.cpp', 'file2.py']
+ mocked_isfile.side_effect = [False, False, True, True, True, True]
+ fns = tox_runner.fetch_python_file_names(folder)
+ expected = ['test/file1.py', 'test/file2.py']
+ self.assertListEqual(fns, expected)
+
+ def test_fetching_resources(self):
+ folder_name = 'test'
+ extension = '.test'
+ with mock.patch('os.walk') as mocked_walk:
+ mocked_walk.return_value = [('folder2', '', [f'__init__{extension}', f'file1{extension}']),
+ ('folder1', '', ['file', f'__init__{extension}']),
+ ('folder3', '', ['otherfile.cpp', 'file2.testother', 'filetest'])]
+ fns = tox_runner.fetch_resource_file_names(folder_name, extension)
+ expected = [f'folder2/__init__{extension}', f'folder2/file1{extension}', f'folder1/__init__{extension}']
+ mocked_walk.assert_called_once_with(folder_name)
+ self.assertListEqual(fns, expected)
+
+ def test_fetch_config_file_names(self):
+ fns = tox_runner.fetch_config_file_names()
+ expected = ['requirements.txt', 'tox.ini']
+ self.assertListEqual(fns, expected)
+
+ def test_fetch_source_file_names(self):
+ folder = "test"
+ with mock.patch(f'{self.MODULE_NAME}.fetch_python_file_names') as mock_fetch_python, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_resource_file_names') as mock_fetch_resource, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_config_file_names') as mock_fetch_config:
+ mock_fetch_resource.return_value = ['a/1', 'b/2', 'b/4']
+ mock_fetch_python.return_value = ['a/2', 'a/4', 'b/1', 'b/3']
+ mock_fetch_config.return_value = ['z', '_1']
+ fns = tox_runner.fetch_source_file_names(folder)
+ self.assertListEqual(fns, ['_1', 'a/1', 'a/2', 'a/4', 'b/1', 'b/2', 'b/3', 'b/4', 'z'])
+ mock_fetch_python.assert_called_with(folder)
+ mock_fetch_config.assert_called()
+ mock_fetch_resource.assert_called_with(tox_runner.RESOURCES_FOLDER_PATH, tox_runner.PYTHON_STUB_EXTENSION)
+
+ def test_fetch_binary_file_name(self):
+ with mock.patch(f'{self.MODULE_NAME}.fetch_resource_file_names') as mock_fetch_resource:
+ mock_fetch_resource.return_value = ['b/1', 'a/2', 'a/1']
+ fns = tox_runner.fetch_binary_file_names()
+ self.assertListEqual(fns, ['a/1', 'a/2', 'b/1'])
+ mock_fetch_resource.assert_called_with(tox_runner.BINARY_FOLDER_PATH, tox_runner.PROTOBUF_EXTENSION)
+
+ def test_read_previous_checksum_non_existant_file(self):
+ checksum_file = 'non_existant'
+ with mock.patch(self.PATH_IS_FILE_FUNCTION) as mocked_isfile:
+ mocked_isfile.return_value = False
+ assert tox_runner.read_previous_checksum(checksum_file) == (None, None)
+
+ def test_read_previous_checksum_file_exists(self):
+ source_checksum = '123'
+ binary_checksum = '456'
+ file_data = f"{source_checksum}\n{binary_checksum}"
+ checksum_file = 'test_checksum'
+ with mock.patch(self.PATH_IS_FILE_FUNCTION) as mocked_isfile, \
+ mock.patch(self.BUILTIN_OPEN_FUNCTION, mock_open(read_data=file_data)) as mocked_open:
+ mocked_isfile.return_value = True
+ assert tox_runner.read_previous_checksum(checksum_file) == (source_checksum, binary_checksum)
+ mocked_open.assert_called_with(checksum_file, 'r')
+
+ def test_read_previous_checksum_file_missing_line(self):
+ source_checksum = '123'
+ checksum_file = 'test_checksum'
+ with mock.patch(self.PATH_IS_FILE_FUNCTION) as mocked_isfile, \
+ mock.patch(self.BUILTIN_OPEN_FUNCTION, mock_open(read_data=source_checksum)) as mocked_open:
+ mocked_isfile.return_value = True
+ assert tox_runner.read_previous_checksum(checksum_file) == (source_checksum, None)
+ mocked_open.assert_called_with(checksum_file, 'r')
+
+ def test_update_checksum(self):
+ binary_file_names = ['test.protobuf', 'other.protobuf']
+ source_checksum = '123'
+ binaries_checksum = '456'
+ checksums = [source_checksum, binaries_checksum]
+
+ def feed_checksum(_fn, _f):
+ return checksums.pop(0)
+
+ with mock.patch(self.BUILTIN_OPEN_FUNCTION, mock_open()) as mocked_open, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_binary_file_names') as mock_binary_files, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_source_file_names') as mock_files, \
+ mock.patch(self.COMPUTE_CHECKSUM_FUNCTION) as mocked_checksum:
+ mocked_checksum.side_effect = feed_checksum
+ mock_files.return_value = self.FILE_NAMES
+ mock_binary_files.return_value = binary_file_names
+ tox_runner.update_checksum()
+ mocked_file = mocked_open()
+ mocked_open.assert_any_call(CHECKSUM_FILE, 'w')
+ mocked_checksum.assert_any_call(self.FILE_NAMES, tox_runner.normalize_text_files)
+ mocked_checksum.assert_any_call(binary_file_names, tox_runner.read_file)
+ mocked_file.writelines.assert_any_call([f"{source_checksum}\n", binaries_checksum])
+
+ def test_normalized_text_files_rn(self):
+ with mock.patch(f'{self.MODULE_NAME}.Path.read_text') as mock_read_text:
+ mock_read_text.return_value = "\r\ntest\r\n end\r\n"
+ text = tox_runner.normalize_text_files("test")
+ assert text == self.FILE_CONTENT
+
+ def test_normalized_text_files_r(self):
+ with mock.patch(f'{self.MODULE_NAME}.Path.read_text') as mock_read_text:
+ mock_read_text.return_value = "\rtest\r end\r"
+ text = tox_runner.normalize_text_files("test")
+ assert text == self.FILE_CONTENT
+
+ def test_normalized_text_files(self):
+ with mock.patch(f'{self.MODULE_NAME}.Path.read_text') as mock_read_text:
+ mock_read_text.return_value = "\ntest\n end\n"
+ text = tox_runner.normalize_text_files("test")
+ assert text == self.FILE_CONTENT
+
+ def test_read_file(self):
+ file_bytes = bytes("\ntest end\n", 'utf-8')
+ with mock.patch(f'{self.MODULE_NAME}.Path.read_bytes') as mock_read_bytes:
+ mock_read_bytes.return_value = file_bytes
+ text = tox_runner.read_file("test")
+ assert text == file_bytes
+
+ def test_compute_checksum(self):
+ checksum1 = tox_runner.compute_checksum(self.TEST_RESOURCES_FILE_NAMES, tox_runner.normalize_text_files)
+ checksum2 = tox_runner.compute_checksum(self.TEST_RESOURCES_FILE_NAMES, tox_runner.normalize_text_files)
+ assert checksum1 == checksum2
+
+ def test_compute_different_checksum(self):
+ checksum1 = tox_runner.compute_checksum(self.TEST_RESOURCES_FILE_NAMES, tox_runner.normalize_text_files)
+ checksum2 = tox_runner.compute_checksum([self.FAKEMODULE_IMPORTED_PATH], tox_runner.normalize_text_files)
+ assert checksum1 != checksum2
+
+ def test_tox_runner_unchanged_checksums(self):
+ checksum = ('123', '456')
+ computed_checksum = ['123', '456']
+
+ def feed_checksum(_fn, _f):
+ return computed_checksum.pop(0)
+
+ with mock.patch(self.READ_PREVIOUS_CHECKSUM_FUNCTION) as mocked_previous_checksum, \
+ mock.patch(self.COMPUTE_CHECKSUM_FUNCTION) as mocked_checksum, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_source_file_names') as mock_files, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_binary_file_names') as mock_binary_files, \
+ mock.patch(self.SUBPROCESS_CALL) as mocked_subprocess:
+ mocked_previous_checksum.return_value = checksum
+ mock_binary_files.return_value = self.FILE_NAMES
+ mock_files.return_value = self.FILE_NAMES
+ mocked_checksum.side_effect = feed_checksum
+ tox_runner.main()
+ mock_files.assert_called_once()
+ mock_binary_files.assert_called_once()
+ mocked_previous_checksum.assert_any_call(tox_runner.CHECKSUM_FILE)
+ mocked_checksum.assert_any_call(self.FILE_NAMES, tox_runner.normalize_text_files)
+ mocked_checksum.assert_any_call(self.FILE_NAMES, tox_runner.read_file)
+ mocked_subprocess.run.assert_called_with(['tox', '-e', 'py39'])
+
+ def test_tox_runner_different_binary_checksums(self):
+ previous_checksum = '123'
+ binaries_checksum = '456'
+ checksums = [previous_checksum, binaries_checksum]
+ previous_checksums = (previous_checksum, None)
+
+ def feed_checksum(_fn, _f):
+ return checksums.pop(0)
+
+ with mock.patch(self.READ_PREVIOUS_CHECKSUM_FUNCTION) as mocked_previous_checksum, \
+ mock.patch(self.COMPUTE_CHECKSUM_FUNCTION) as mocked_checksum, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_source_file_names') as mock_files, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_binary_file_names') as mock_binary_files:
+ mocked_previous_checksum.return_value = previous_checksums
+ mock_binary_files.return_value = self.FILE_NAMES
+ mock_files.return_value = self.FILE_NAMES
+ mocked_checksum.side_effect = feed_checksum
+ self.assertRaises(RuntimeError, tox_runner.main)
+ mock_files.assert_called_once()
+ mock_binary_files.assert_called_once()
+ mocked_previous_checksum.assert_any_call(tox_runner.CHECKSUM_FILE)
+ mocked_checksum.assert_any_call(self.FILE_NAMES, tox_runner.normalize_text_files)
+ mocked_checksum.assert_any_call(self.FILE_NAMES, tox_runner.read_file)
+
+ def test_tox_runner_modified_checksum(self):
+ with mock.patch(self.READ_PREVIOUS_CHECKSUM_FUNCTION) as mocked_previous_checksum, \
+ mock.patch(self.COMPUTE_CHECKSUM_FUNCTION) as mocked_checksum, \
+ mock.patch(f'{self.MODULE_NAME}.fetch_source_file_names') as mock_files, \
+ mock.patch(self.SUBPROCESS_CALL) as mocked_subprocess:
+ mock_files.return_value = self.FILE_NAMES
+ mocked_previous_checksum.return_value = ('123', '456')
+ mocked_checksum.return_value = ('789', '456')
+ tox_runner.main()
+ mocked_previous_checksum.assert_called_with(tox_runner.CHECKSUM_FILE)
+ mocked_checksum.assert_called_with(self.FILE_NAMES, tox_runner.normalize_text_files)
+ mocked_subprocess.run.assert_called_with(['tox'])
diff --git a/python-frontend/typeshed_serializer/tox.ini b/python-frontend/typeshed_serializer/tox.ini
index f84e02fb6e..7696e17157 100644
--- a/python-frontend/typeshed_serializer/tox.ini
+++ b/python-frontend/typeshed_serializer/tox.ini
@@ -4,8 +4,10 @@ skipsdist = True
[testenv]
deps = -rrequirements.txt
-commands = python -m pytest --cov=serializer --cov-report xml:cov.xml --ignore=resources --cov-branch
+commands = python -m pytest --cov=serializer --cov=runners --cov-report xml:cov.xml --ignore=resources --cov-branch tests/
+
[testenv:serialize]
deps = -rrequirements.txt
commands = python -m serializer.typeshed_serializer
+commands_post = python -c "from runners import tox_runner; tox_runner.update_checksum()"